repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
reyduar/ijdomingo-webapp | src/app/services/alumno.service.js | angular.module('app').factory('AlumnoService', function AlumnoService($http, __env) {
var service = {
obtenerAlumnos: obtenerAlumnos,
guardarAlumno: guardarAlumno,
editarAlumno: editarAlumno,
buscarAlumnoPorDni: buscarAlumnoPorDni,
borrarAlumno: borrarAlumno,
buscarAlumnoPorId: buscarAlumnoPorId
};
return service;
// Metodo para traer todos los alumnos
function obtenerAlumnos() {
var uri = __env.apiUrl + 'alumnos';
return $http({
url: uri,
method: "GET",
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
// Metodo para guardar alumno
function guardarAlumno(body) {
var uri = __env.apiUrl + 'alumno/agregar';
return $http({
url: uri,
method: "POST",
data: body,
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
// Metodo para editar alumno
function editarAlumno(body, id) {
var uri = __env.apiUrl + 'alumno/editar/' + id;
return $http({
url: uri,
method: "PUT",
data: body,
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
// Metodo para traer todos los alumnos
function buscarAlumnoPorDni(dni) {
var uri = __env.apiUrl + 'alumno/buscar/dni/' + dni;
return $http({
url: uri,
method: "GET",
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
// Metodo para alumno por id
function buscarAlumnoPorId(id) {
var uri = __env.apiUrl + 'alumno/buscar/id/' + id;
return $http({
url: uri,
method: "GET",
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
// Metodo para borrar alumno
function borrarAlumno(id) {
var uri = __env.apiUrl + 'alumno/borrar/' + id;
return $http({
url: uri,
method: "DELETE",
headers: { "Content-Type": "application/json" }
}).then(function (response) {
return response;
});
}
}); |
intergrate-dev/xy-wan-xzw | src/com/founder/mobileinternet/cmsinterface/util/ApiExtract.java | package com.founder.mobileinternet.cmsinterface.util;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Scanner;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.core.LocalVariableTableParameterNameDiscoverer;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
/**
* 外网接口自动识别
* @author han.xf
*
*/
public class ApiExtract{
public static void main(String[] args) throws Exception{
JSONObject apis = new JSONObject() ;
JSONArray arr = new JSONArray() ;
// 扫描指定包名称下的所有定义的controller
List<Class<?>> beans = new ArrayList<>() ;
String packageName = "com.founder.mobileinternet.cmsinterface.ui.controller" ;
beans = findBeanClassByPackageName(packageName,beans) ;
// 处理每个controller
Iterator<Class<?>> iter = beans.iterator() ;
while(iter.hasNext()) {
arr.add(dealBean(iter.next())) ;
}
apis.put("groups", arr) ;
String result = formatJson("var api_groups = " + apis.toString());
String savePath = "../../WEB/xy/system/script/api.json" ;
print(savePath, result) ;
System.out.println("Done!");
}
/**
* 递归扫描指定包名称下的 *.class 文件,得到所有定义的Controller
* @param packageName 包名称
* @param beans 存储扫描到的Controller
* @return 返回List集合
*/
public static List<Class<?>> findBeanClassByPackageName(String filePath,List<Class<?>> beans){
File packageFile ;
try {
packageFile = new ClassPathResource(filePath.replace(".", File.separator)).getFile() ;
File[] dirfiles = packageFile.listFiles(new FileFilter() {
public boolean accept(File file) {
return (true && file.isDirectory())
|| (file.getName().endsWith(".class")) ;
}
}) ;
for (File file : dirfiles) {
if (file.isDirectory()) {
findBeanClassByPackageName(filePath + "." + file.getName(),beans) ;
}else{
String className = file.getName().substring(0,file.getName().length() - 6) ;
//Class<?> cls =Thread.currentThread().getContextClassLoader().loadClass(packageName + '.' + className) ;
Class<?> cls = Class.forName(filePath + "." + className) ;
if(cls.isAnnotationPresent(Controller.class)) {
beans.add(cls) ;
}
}
}
} catch (ClassNotFoundException e) {
e.printStackTrace() ;
} catch (IOException e1) {
e1.printStackTrace() ;
}
return beans ;
}
/**
* 处理单个 bean
*/
public static JSONObject dealBean(Class<?> cls){
JSONObject obj = new JSONObject() ;
if(cls.isAnnotationPresent(XYComment.class)){
XYComment xyApi = cls.getAnnotation(XYComment.class) ;
obj.put("name", xyApi.name()) ; //控制器描述
if(xyApi.comment() != null && !"".equals(xyApi.comment())){
obj.put("comment", xyApi.comment()) ;
}
}else{
obj.put("name", cls.getSimpleName()) ; //控制器描述,如果没使用注解,先使用类名
}
obj.put("className", cls.getSimpleName()) ;
JSONArray arr = new JSONArray() ;
Method[] methods = cls.getMethods() ;
for(Method method : methods) {
if(method.isAnnotationPresent(RequestMapping.class)) {
JSONObject _obj = dealMethod(method) ;
arr.add(_obj) ;
}
}
obj.put("list", arr) ;
return obj ;
}
/**
* 处理bean中的单个方法
*/
public static JSONObject dealMethod(Method method){
JSONObject obj = new JSONObject() ;
//判断是否使用XYApi注解
if(method.isAnnotationPresent(XYComment.class)) {
XYComment xyApi = method.getAnnotation(XYComment.class) ;
obj.put("name", xyApi.name()) ; //方法描述
}else{
obj.put("name", method.getName()) ; //方法描述,如果没有使用XYComment注解,暂时使用方法名称
}
obj.put("methodName", method.getName()) ; //方法名称
//取得方法路径,请求方式
RequestMapping mapping = method.getAnnotation(RequestMapping.class) ;
obj.put("url",JSONArray.fromObject(mapping.value())) ; //方法路径
//方法请求方式
if(mapping.method().length > 0) {
obj.put("method", JSONArray.fromObject(mapping.method())) ;
}
//java反射包没有提供得到方法参数列表的方法,使用spring提供的方法,扫描的class文件生成的时候需要开启debug
LocalVariableTableParameterNameDiscoverer parameterNameDiscoverer= new LocalVariableTableParameterNameDiscoverer() ;
String[] paramterNames = parameterNameDiscoverer.getParameterNames(method) ; // 参数名称
Class<?>[] paramterTypes = method.getParameterTypes() ; // 参数类型
Annotation[][] paramterAnnotations = method.getParameterAnnotations() ; //方法参数上的注解
//处理方法参数
JSONArray arr = new JSONArray() ;
for(int i = 0 ; i < paramterTypes.length ; i++) {
//不记录request,response
if(HttpServletRequest.class == paramterTypes[i] || HttpServletResponse.class == paramterTypes[i]) continue ;
dealParamter(arr,paramterTypes[i],paramterNames[i],paramterAnnotations[i]) ;
}
obj.put("params", arr) ;
return obj ;
}
/**
* 处理方法参数
* @param paramterType 参数类型
* @param paramterName 参数形参名称
* @param paramterAnnotations 参数注解
* @return
*/
public static void dealParamter(JSONArray arr,Class<?> paramterType,String paramterName,Annotation[] paramterAnnotations){
//参数类型是VO类,扫描项目工程的包名
if(paramterType.getName().contains("com.founder")){
dealPOJO(arr, paramterType);
}else{
JSONObject obj = new JSONObject() ;
obj.put("name", paramterName) ; // 参数 形参 名称
obj.put("value", paramterName) ; //实参 参数 名
obj.put("type", format(paramterType)) ; //参数类型
obj.put("required", true) ; //必须传递参数
//处理参数注解
for(int i = 0 ; i < paramterAnnotations.length ; i++) {
if(paramterAnnotations[i] instanceof RequestParam){
RequestParam requestParam = (RequestParam)paramterAnnotations[i] ;
obj.put("required", requestParam.required()) ;
if(requestParam.value() != null && !"".equals(requestParam.value())) {
obj.put("value", requestParam.value()) ; //指定实参参数名
}
//如果没有定义默认值,必须要传递参数
if(requestParam.defaultValue() != null && !requestParam.defaultValue().contains("\t")) {
obj.put("default", requestParam.defaultValue()) ; //默认值
obj.put("required", false) ;
}
}else if(paramterAnnotations[i] instanceof XYComment){
XYComment xyComment = (XYComment)paramterAnnotations[i] ;
obj.put("comment", xyComment.comment()) ; //参数的中文描述
}
}
arr.add(obj) ;
}
}
public static void dealPOJO(JSONArray arr,Class<?> paramterType){
Field[] fields = paramterType.getDeclaredFields() ;
for(int n = 0 ; n < fields.length ; n++) {
JSONObject _obj = new JSONObject() ;
Field field = fields[n] ;
_obj.put("name", field.getName()) ; // 参数名称
if(field.isAnnotationPresent(XYComment.class)){
XYComment xyComment = field.getAnnotation(XYComment.class) ;
//有注解时都读到comment中,因为不能覆盖参数名
_obj.put("comment", xyComment.name() + " " + xyComment.comment()) ; //参数的中文描述
}
_obj.put("type", format(field)) ; //参数类型
arr.add(_obj) ;
}
}
/**
*
*/
public static String format(Field field){
Type type = field.getGenericType() ;
if(type instanceof ParameterizedType){ // 是泛型参数的类型
ParameterizedType pt = (ParameterizedType) type ;
String name = pt.toString();
name = name.replace("java.util.", "");
name = name.replace("java.lang.", "");
return name ;
}
return format(field.getType()) ;
}
/**
* 1.包装类字符串类型 类型名称转换为基础类型
* 2.字符串类型去掉包名称
*/
public static String format(Class<?> paramterType){
// 基础类型
if(paramterType.isPrimitive() || paramterType.equals(String.class)){
return paramterType.getSimpleName() ;
}
// 包装类
if(paramterType.equals(Byte.class)
|| paramterType.equals(Short.class)
|| paramterType.equals(Integer.class)
|| paramterType.equals(Long.class)
|| paramterType.equals(Float.class)
|| paramterType.equals(Character.class)
|| paramterType.equals(Boolean.class)){
try {
Field field = paramterType.getDeclaredField("TYPE") ;
return field.get(null).toString() ;
} catch (Exception e) {
return paramterType.getName() ;
}
}
return paramterType.getName();
}
/**
* 格式化json
*/
public static String formatJson(String jsonStr) {
if (null == jsonStr || "".equals(jsonStr)) return "";
StringBuilder sb = new StringBuilder();
char last = '\0';
char current = '\0';
int indent = 0;
for (int i = 0; i < jsonStr.length(); i++) {
last = current;
current = jsonStr.charAt(i);
switch (current) {
case '{':
case '[':
sb.append(current);
sb.append('\n');
indent++;
addIndentBlank(sb, indent);
break;
case '}':
case ']':
sb.append('\n');
indent--;
addIndentBlank(sb, indent);
sb.append(current);
break;
case ',':
sb.append(current);
if (last != '\\') {
sb.append('\n');
addIndentBlank(sb, indent);
}
break;
default:
sb.append(current);
}
}
return sb.toString();
}
private static void addIndentBlank(StringBuilder sb, int indent) {
for (int i = 0; i < indent; i++) {
sb.append('\t');
}
}
/**
* 输出到指定文件
* @param json
*/
public static void print(String savePath,String json){
PrintStream ps = null ;
try {
ps = new PrintStream(new File(savePath)) ;
ps.print(json) ;
} catch (FileNotFoundException e) {
e.printStackTrace() ;
}finally{
if(ps != null)
ps.close() ;
}
}
public static JSONObject scanner(String savePath) {
Scanner scanner = null ;
StringBuffer buffer = new StringBuffer() ;
try {
scanner = new Scanner(new File(savePath)) ;
if(scanner.hasNextLine()){
buffer.append(scanner.nextLine()) ;
}
} catch (FileNotFoundException e) {
e.printStackTrace() ;
}finally{
if(scanner != null)
scanner.close() ;
}
return JSONObject.fromObject(buffer.toString()) ;
}
}
|
afxcn/unit | src/nxt_job_file.h |
/*
* Copyright (C) <NAME>
* Copyright (C) NGINX, Inc.
*/
#ifndef _NXT_JOB_FILE_H_INCLUDED_
#define _NXT_JOB_FILE_H_INCLUDED_
/*
* nxt_job_file_read() allows to open a file, to get its type, size, and
* modification time, to read or map file content to memory, and to close
* the file. It can be done as one operation for small file or as several
* operations for large file. On each operation completion ready_handler
* or error_handler completion handlers are called. Since they are job
* operations, they can be run by a thread pool.
*
* If a file is not opened then it is opened and its type, size, and
* modification time are got. Then file content starting from given offset
* is read or mapped in memory if there is a buffer supplied. The offset
* field is correspondingly updated.
*
* If there is no buffer but the read_ahead flag is set then the first
* byte is read to initiate read ahead operation.
*
* If the close flag is set then file descriptor is closed when the file
* is completely read.
*
* The complete flag is set by nxt_job_file_read() when the file is
* completely read.
*
* The test_before_open flag allows to save syscalls in some case, for
* example, not to open and then not to close a directory. It calls
* nxt_file_info() to get file type, size, and modification time before
* opening the file. A custom read_required() callback combined with this
* flag can also omit opening and reading on some conditions. However,
* if the callback forces opening then additional nxt_file_info() is
* called after opening. The default read_required() callback always
* forces opening and reading.
*/
typedef struct nxt_job_file_s nxt_job_file_t;
struct nxt_job_file_s {
nxt_job_t job;
nxt_file_t file;
nxt_off_t offset;
nxt_buf_t *buffer;
nxt_work_handler_t ready_handler;
nxt_work_handler_t error_handler;
nxt_int_t (*read_required)(nxt_job_file_t *jbf);
uint16_t directory_end;
uint16_t close_before_open:1;
uint16_t test_before_open:1;
uint16_t read_ahead:1;
uint16_t close:1;
uint16_t complete:1;
};
NXT_EXPORT nxt_job_file_t *nxt_job_file_create(nxt_mp_t *mp);
NXT_EXPORT void nxt_job_file_init(nxt_job_file_t *jbf);
NXT_EXPORT void nxt_job_file_read(nxt_task_t *task, nxt_job_t *job);
#endif /* _NXT_JOB_FILE_H_INCLUDED_ */
|
robjporter/go-library | _examples/xenv.go | <gh_stars>0
package main
import (
"fmt"
"github.com/robjporter/go-library/xenv"
)
func main() {
env := xenv.New()
fmt.Println("SIZE: ", env.Size())
fmt.Println(env.GetString("GOPATH"))
fmt.Println("SIZE: ", env.Size())
env.AddString("TEST", "TESTING")
fmt.Println("SIZE: ", env.Size())
fmt.Println("DATA: ", env.GetAll())
fmt.Println("")
fmt.Println("SYSTEM ENVIRONMENTS *******************************************************")
fmt.Println("ENVIRONMENT GO PATH BIN: >", env.GOPATHBIN())
fmt.Println("ENVIRONMENT GO PATH: >", env.GOPATH())
fmt.Println("ENVIRONMENT PATH SEPARATOR: >", env.PathSeparator())
fmt.Println("ENVIRONMENT LIST SEPARATOR: >", env.ListSeparator())
fmt.Println("ENVIRONMENT IS COMPILED: >", env.IsCompiled())
fmt.Println("ENVIRONMENT BUILD DEBUG: >", env.BuildDebug())
fmt.Println("ENVIRONMENT CHECK ARCH: >", env.CheckArchitecture())
fmt.Println("ENVIRONMENT BUILD STAMP: >", env.BuildStamp())
fmt.Println("ENVIRONMENT COMPILER: >", env.Compiler())
fmt.Println("ENVIRONMENT GO ARCH: >", env.GOARCH())
fmt.Println("ENVIRONMENT GO OS: >", env.GOOS())
fmt.Println("ENVIRONMENT GO ROOT: >", env.GOROOT())
fmt.Println("ENVIRONMENT GO VERSION: >", env.GOVER())
fmt.Println("ENVIRONMENT NUMBER CPU: >", env.NumCPU())
fmt.Println("ENVIRONMENT FORMATTED TIME: >", env.GetFormattedTime())
fmt.Println("ENVIRONMENT USERNAME: >", env.GetUsername())
}
|
npocmaka/Windows-Server-2003 | multimedia/directx/dxvb/dx7vb/dpsessiondescobj.h | //+-------------------------------------------------------------------------
//
// Microsoft Windows
//
// Copyright (C) Microsoft Corporation, 1998 - 1998
//
// File: dpsessiondescobj.h
//
//--------------------------------------------------------------------------
#include "resource.h"
class C_dxj_DPSessionDescObject :
public I_dxj_DPSessionDesc,
public CComCoClass<C_dxj_DPSessionDescObject, &CLSID__dxj_DPSessionDesc>,
public CComObjectRoot
{
public:
C_dxj_DPSessionDescObject() ;
virtual ~C_dxj_DPSessionDescObject() ;
BEGIN_COM_MAP(C_dxj_DPSessionDescObject)
COM_INTERFACE_ENTRY( I_dxj_DPSessionDesc)
END_COM_MAP()
DECLARE_AGGREGATABLE(C_dxj_DPSessionDescObject)
DECLARE_REGISTRY(CLSID__dxj_D3dDeviceDesc, "DIRECT.DPSessionDesc.5", "DIRECT.DPSessionDesc.5", IDS_GENERIC_DESC, THREADFLAGS_BOTH)
public:
HRESULT STDMETHODCALLTYPE getDescription( DPSessionDesc *desc);
HRESULT STDMETHODCALLTYPE setDescription( DPSessionDesc *desc);
private:
DPSessionDesc m_desc;
};
|
Nikunj3121994/magda | magda-web-client/src/Components/ErrorPage/index.js | <gh_stars>0
import React from "react";
import * as queryString from "query-string";
import ErrorHandler from "../ErrorHandler";
import Error404 from "./Error404";
import Error500 from "./Error500";
export default function ErrorPage({ location }) {
const query = queryString.parse(location.search);
const errorCode = query.errorCode;
switch (errorCode) {
case "404":
return <Error404 errorData={query} />;
case "500":
return <Error500 errorData={query} />;
default:
return (
<ErrorHandler
error={{
title: "Unkown Error:",
detail: `An error with unrecoginised error code: \`${errorCode}\` has ocurred.`
}}
/>
);
}
}
|
DurtyFree/RageMP-NativeUI | examples/nativeUI-example/includes/NativeUI/enums/Control.js | <gh_stars>10-100
var Control;
(function (Control) {
Control[Control["NextCamera"] = 0] = "NextCamera";
Control[Control["LookLeftRight"] = 1] = "LookLeftRight";
Control[Control["LookUpDown"] = 2] = "LookUpDown";
Control[Control["LookUpOnly"] = 3] = "LookUpOnly";
Control[Control["LookDownOnly"] = 4] = "LookDownOnly";
Control[Control["LookLeftOnly"] = 5] = "LookLeftOnly";
Control[Control["LookRightOnly"] = 6] = "LookRightOnly";
Control[Control["CinematicSlowMo"] = 7] = "CinematicSlowMo";
Control[Control["FlyUpDown"] = 8] = "FlyUpDown";
Control[Control["FlyLeftRight"] = 9] = "FlyLeftRight";
Control[Control["ScriptedFlyZUp"] = 10] = "ScriptedFlyZUp";
Control[Control["ScriptedFlyZDown"] = 11] = "ScriptedFlyZDown";
Control[Control["WeaponWheelUpDown"] = 12] = "WeaponWheelUpDown";
Control[Control["WeaponWheelLeftRight"] = 13] = "WeaponWheelLeftRight";
Control[Control["WeaponWheelNext"] = 14] = "WeaponWheelNext";
Control[Control["WeaponWheelPrev"] = 15] = "WeaponWheelPrev";
Control[Control["SelectNextWeapon"] = 16] = "SelectNextWeapon";
Control[Control["SelectPrevWeapon"] = 17] = "SelectPrevWeapon";
Control[Control["SkipCutscene"] = 18] = "SkipCutscene";
Control[Control["CharacterWheel"] = 19] = "CharacterWheel";
Control[Control["MultiplayerInfo"] = 20] = "MultiplayerInfo";
Control[Control["Sprint"] = 21] = "Sprint";
Control[Control["Jump"] = 22] = "Jump";
Control[Control["Enter"] = 23] = "Enter";
Control[Control["Attack"] = 24] = "Attack";
Control[Control["Aim"] = 25] = "Aim";
Control[Control["LookBehind"] = 26] = "LookBehind";
Control[Control["Phone"] = 27] = "Phone";
Control[Control["SpecialAbility"] = 28] = "SpecialAbility";
Control[Control["SpecialAbilitySecondary"] = 29] = "SpecialAbilitySecondary";
Control[Control["MoveLeftRight"] = 30] = "MoveLeftRight";
Control[Control["MoveUpDown"] = 31] = "MoveUpDown";
Control[Control["MoveUpOnly"] = 32] = "MoveUpOnly";
Control[Control["MoveDownOnly"] = 33] = "MoveDownOnly";
Control[Control["MoveLeftOnly"] = 34] = "MoveLeftOnly";
Control[Control["MoveRightOnly"] = 35] = "MoveRightOnly";
Control[Control["Duck"] = 36] = "Duck";
Control[Control["SelectWeapon"] = 37] = "SelectWeapon";
Control[Control["Pickup"] = 38] = "Pickup";
Control[Control["SniperZoom"] = 39] = "SniperZoom";
Control[Control["SniperZoomInOnly"] = 40] = "SniperZoomInOnly";
Control[Control["SniperZoomOutOnly"] = 41] = "SniperZoomOutOnly";
Control[Control["SniperZoomInSecondary"] = 42] = "SniperZoomInSecondary";
Control[Control["SniperZoomOutSecondary"] = 43] = "SniperZoomOutSecondary";
Control[Control["Cover"] = 44] = "Cover";
Control[Control["Reload"] = 45] = "Reload";
Control[Control["Talk"] = 46] = "Talk";
Control[Control["Detonate"] = 47] = "Detonate";
Control[Control["HUDSpecial"] = 48] = "HUDSpecial";
Control[Control["Arrest"] = 49] = "Arrest";
Control[Control["AccurateAim"] = 50] = "AccurateAim";
Control[Control["Context"] = 51] = "Context";
Control[Control["ContextSecondary"] = 52] = "ContextSecondary";
Control[Control["WeaponSpecial"] = 53] = "WeaponSpecial";
Control[Control["WeaponSpecial2"] = 54] = "WeaponSpecial2";
Control[Control["Dive"] = 55] = "Dive";
Control[Control["DropWeapon"] = 56] = "DropWeapon";
Control[Control["DropAmmo"] = 57] = "DropAmmo";
Control[Control["ThrowGrenade"] = 58] = "ThrowGrenade";
Control[Control["VehicleMoveLeftRight"] = 59] = "VehicleMoveLeftRight";
Control[Control["VehicleMoveUpDown"] = 60] = "VehicleMoveUpDown";
Control[Control["VehicleMoveUpOnly"] = 61] = "VehicleMoveUpOnly";
Control[Control["VehicleMoveDownOnly"] = 62] = "VehicleMoveDownOnly";
Control[Control["VehicleMoveLeftOnly"] = 63] = "VehicleMoveLeftOnly";
Control[Control["VehicleMoveRightOnly"] = 64] = "VehicleMoveRightOnly";
Control[Control["VehicleSpecial"] = 65] = "VehicleSpecial";
Control[Control["VehicleGunLeftRight"] = 66] = "VehicleGunLeftRight";
Control[Control["VehicleGunUpDown"] = 67] = "VehicleGunUpDown";
Control[Control["VehicleAim"] = 68] = "VehicleAim";
Control[Control["VehicleAttack"] = 69] = "VehicleAttack";
Control[Control["VehicleAttack2"] = 70] = "VehicleAttack2";
Control[Control["VehicleAccelerate"] = 71] = "VehicleAccelerate";
Control[Control["VehicleBrake"] = 72] = "VehicleBrake";
Control[Control["VehicleDuck"] = 73] = "VehicleDuck";
Control[Control["VehicleHeadlight"] = 74] = "VehicleHeadlight";
Control[Control["VehicleExit"] = 75] = "VehicleExit";
Control[Control["VehicleHandbrake"] = 76] = "VehicleHandbrake";
Control[Control["VehicleHotwireLeft"] = 77] = "VehicleHotwireLeft";
Control[Control["VehicleHotwireRight"] = 78] = "VehicleHotwireRight";
Control[Control["VehicleLookBehind"] = 79] = "VehicleLookBehind";
Control[Control["VehicleCinCam"] = 80] = "VehicleCinCam";
Control[Control["VehicleNextRadio"] = 81] = "VehicleNextRadio";
Control[Control["VehiclePrevRadio"] = 82] = "VehiclePrevRadio";
Control[Control["VehicleNextRadioTrack"] = 83] = "VehicleNextRadioTrack";
Control[Control["VehiclePrevRadioTrack"] = 84] = "VehiclePrevRadioTrack";
Control[Control["VehicleRadioWheel"] = 85] = "VehicleRadioWheel";
Control[Control["VehicleHorn"] = 86] = "VehicleHorn";
Control[Control["VehicleFlyThrottleUp"] = 87] = "VehicleFlyThrottleUp";
Control[Control["VehicleFlyThrottleDown"] = 88] = "VehicleFlyThrottleDown";
Control[Control["VehicleFlyYawLeft"] = 89] = "VehicleFlyYawLeft";
Control[Control["VehicleFlyYawRight"] = 90] = "VehicleFlyYawRight";
Control[Control["VehiclePassengerAim"] = 91] = "VehiclePassengerAim";
Control[Control["VehiclePassengerAttack"] = 92] = "VehiclePassengerAttack";
Control[Control["VehicleSpecialAbilityFranklin"] = 93] = "VehicleSpecialAbilityFranklin";
Control[Control["VehicleStuntUpDown"] = 94] = "VehicleStuntUpDown";
Control[Control["VehicleCinematicUpDown"] = 95] = "VehicleCinematicUpDown";
Control[Control["VehicleCinematicUpOnly"] = 96] = "VehicleCinematicUpOnly";
Control[Control["VehicleCinematicDownOnly"] = 97] = "VehicleCinematicDownOnly";
Control[Control["VehicleCinematicLeftRight"] = 98] = "VehicleCinematicLeftRight";
Control[Control["VehicleSelectNextWeapon"] = 99] = "VehicleSelectNextWeapon";
Control[Control["VehicleSelectPrevWeapon"] = 100] = "VehicleSelectPrevWeapon";
Control[Control["VehicleRoof"] = 101] = "VehicleRoof";
Control[Control["VehicleJump"] = 102] = "VehicleJump";
Control[Control["VehicleGrapplingHook"] = 103] = "VehicleGrapplingHook";
Control[Control["VehicleShuffle"] = 104] = "VehicleShuffle";
Control[Control["VehicleDropProjectile"] = 105] = "VehicleDropProjectile";
Control[Control["VehicleMouseControlOverride"] = 106] = "VehicleMouseControlOverride";
Control[Control["VehicleFlyRollLeftRight"] = 107] = "VehicleFlyRollLeftRight";
Control[Control["VehicleFlyRollLeftOnly"] = 108] = "VehicleFlyRollLeftOnly";
Control[Control["VehicleFlyRollRightOnly"] = 109] = "VehicleFlyRollRightOnly";
Control[Control["VehicleFlyPitchUpDown"] = 110] = "VehicleFlyPitchUpDown";
Control[Control["VehicleFlyPitchUpOnly"] = 111] = "VehicleFlyPitchUpOnly";
Control[Control["VehicleFlyPitchDownOnly"] = 112] = "VehicleFlyPitchDownOnly";
Control[Control["VehicleFlyUnderCarriage"] = 113] = "VehicleFlyUnderCarriage";
Control[Control["VehicleFlyAttack"] = 114] = "VehicleFlyAttack";
Control[Control["VehicleFlySelectNextWeapon"] = 115] = "VehicleFlySelectNextWeapon";
Control[Control["VehicleFlySelectPrevWeapon"] = 116] = "VehicleFlySelectPrevWeapon";
Control[Control["VehicleFlySelectTargetLeft"] = 117] = "VehicleFlySelectTargetLeft";
Control[Control["VehicleFlySelectTargetRight"] = 118] = "VehicleFlySelectTargetRight";
Control[Control["VehicleFlyVerticalFlightMode"] = 119] = "VehicleFlyVerticalFlightMode";
Control[Control["VehicleFlyDuck"] = 120] = "VehicleFlyDuck";
Control[Control["VehicleFlyAttackCamera"] = 121] = "VehicleFlyAttackCamera";
Control[Control["VehicleFlyMouseControlOverride"] = 122] = "VehicleFlyMouseControlOverride";
Control[Control["VehicleSubTurnLeftRight"] = 123] = "VehicleSubTurnLeftRight";
Control[Control["VehicleSubTurnLeftOnly"] = 124] = "VehicleSubTurnLeftOnly";
Control[Control["VehicleSubTurnRightOnly"] = 125] = "VehicleSubTurnRightOnly";
Control[Control["VehicleSubPitchUpDown"] = 126] = "VehicleSubPitchUpDown";
Control[Control["VehicleSubPitchUpOnly"] = 127] = "VehicleSubPitchUpOnly";
Control[Control["VehicleSubPitchDownOnly"] = 128] = "VehicleSubPitchDownOnly";
Control[Control["VehicleSubThrottleUp"] = 129] = "VehicleSubThrottleUp";
Control[Control["VehicleSubThrottleDown"] = 130] = "VehicleSubThrottleDown";
Control[Control["VehicleSubAscend"] = 131] = "VehicleSubAscend";
Control[Control["VehicleSubDescend"] = 132] = "VehicleSubDescend";
Control[Control["VehicleSubTurnHardLeft"] = 133] = "VehicleSubTurnHardLeft";
Control[Control["VehicleSubTurnHardRight"] = 134] = "VehicleSubTurnHardRight";
Control[Control["VehicleSubMouseControlOverride"] = 135] = "VehicleSubMouseControlOverride";
Control[Control["VehiclePushbikePedal"] = 136] = "VehiclePushbikePedal";
Control[Control["VehiclePushbikeSprint"] = 137] = "VehiclePushbikeSprint";
Control[Control["VehiclePushbikeFrontBrake"] = 138] = "VehiclePushbikeFrontBrake";
Control[Control["VehiclePushbikeRearBrake"] = 139] = "VehiclePushbikeRearBrake";
Control[Control["MeleeAttackLight"] = 140] = "MeleeAttackLight";
Control[Control["MeleeAttackHeavy"] = 141] = "MeleeAttackHeavy";
Control[Control["MeleeAttackAlternate"] = 142] = "MeleeAttackAlternate";
Control[Control["MeleeBlock"] = 143] = "MeleeBlock";
Control[Control["ParachuteDeploy"] = 144] = "ParachuteDeploy";
Control[Control["ParachuteDetach"] = 145] = "ParachuteDetach";
Control[Control["ParachuteTurnLeftRight"] = 146] = "ParachuteTurnLeftRight";
Control[Control["ParachuteTurnLeftOnly"] = 147] = "ParachuteTurnLeftOnly";
Control[Control["ParachuteTurnRightOnly"] = 148] = "ParachuteTurnRightOnly";
Control[Control["ParachutePitchUpDown"] = 149] = "ParachutePitchUpDown";
Control[Control["ParachutePitchUpOnly"] = 150] = "ParachutePitchUpOnly";
Control[Control["ParachutePitchDownOnly"] = 151] = "ParachutePitchDownOnly";
Control[Control["ParachuteBrakeLeft"] = 152] = "ParachuteBrakeLeft";
Control[Control["ParachuteBrakeRight"] = 153] = "ParachuteBrakeRight";
Control[Control["ParachuteSmoke"] = 154] = "ParachuteSmoke";
Control[Control["ParachutePrecisionLanding"] = 155] = "ParachutePrecisionLanding";
Control[Control["Map"] = 156] = "Map";
Control[Control["SelectWeaponUnarmed"] = 157] = "SelectWeaponUnarmed";
Control[Control["SelectWeaponMelee"] = 158] = "SelectWeaponMelee";
Control[Control["SelectWeaponHandgun"] = 159] = "SelectWeaponHandgun";
Control[Control["SelectWeaponShotgun"] = 160] = "SelectWeaponShotgun";
Control[Control["SelectWeaponSmg"] = 161] = "SelectWeaponSmg";
Control[Control["SelectWeaponAutoRifle"] = 162] = "SelectWeaponAutoRifle";
Control[Control["SelectWeaponSniper"] = 163] = "SelectWeaponSniper";
Control[Control["SelectWeaponHeavy"] = 164] = "SelectWeaponHeavy";
Control[Control["SelectWeaponSpecial"] = 165] = "SelectWeaponSpecial";
Control[Control["SelectCharacterMichael"] = 166] = "SelectCharacterMichael";
Control[Control["SelectCharacterFranklin"] = 167] = "SelectCharacterFranklin";
Control[Control["SelectCharacterTrevor"] = 168] = "SelectCharacterTrevor";
Control[Control["SelectCharacterMultiplayer"] = 169] = "SelectCharacterMultiplayer";
Control[Control["SaveReplayClip"] = 170] = "SaveReplayClip";
Control[Control["SpecialAbilityPC"] = 171] = "SpecialAbilityPC";
Control[Control["PhoneUp"] = 172] = "PhoneUp";
Control[Control["PhoneDown"] = 173] = "PhoneDown";
Control[Control["PhoneLeft"] = 174] = "PhoneLeft";
Control[Control["PhoneRight"] = 175] = "PhoneRight";
Control[Control["PhoneSelect"] = 176] = "PhoneSelect";
Control[Control["PhoneCancel"] = 177] = "PhoneCancel";
Control[Control["PhoneOption"] = 178] = "PhoneOption";
Control[Control["PhoneExtraOption"] = 179] = "PhoneExtraOption";
Control[Control["PhoneScrollForward"] = 180] = "PhoneScrollForward";
Control[Control["PhoneScrollBackward"] = 181] = "PhoneScrollBackward";
Control[Control["PhoneCameraFocusLock"] = 182] = "PhoneCameraFocusLock";
Control[Control["PhoneCameraGrid"] = 183] = "PhoneCameraGrid";
Control[Control["PhoneCameraSelfie"] = 184] = "PhoneCameraSelfie";
Control[Control["PhoneCameraDOF"] = 185] = "PhoneCameraDOF";
Control[Control["PhoneCameraExpression"] = 186] = "PhoneCameraExpression";
Control[Control["FrontendDown"] = 187] = "FrontendDown";
Control[Control["FrontendUp"] = 188] = "FrontendUp";
Control[Control["FrontendLeft"] = 189] = "FrontendLeft";
Control[Control["FrontendRight"] = 190] = "FrontendRight";
Control[Control["FrontendRdown"] = 191] = "FrontendRdown";
Control[Control["FrontendRup"] = 192] = "FrontendRup";
Control[Control["FrontendRleft"] = 193] = "FrontendRleft";
Control[Control["FrontendRright"] = 194] = "FrontendRright";
Control[Control["FrontendAxisX"] = 195] = "FrontendAxisX";
Control[Control["FrontendAxisY"] = 196] = "FrontendAxisY";
Control[Control["FrontendRightAxisX"] = 197] = "FrontendRightAxisX";
Control[Control["FrontendRightAxisY"] = 198] = "FrontendRightAxisY";
Control[Control["FrontendPause"] = 199] = "FrontendPause";
Control[Control["FrontendPauseAlternate"] = 200] = "FrontendPauseAlternate";
Control[Control["FrontendAccept"] = 201] = "FrontendAccept";
Control[Control["FrontendCancel"] = 202] = "FrontendCancel";
Control[Control["FrontendX"] = 203] = "FrontendX";
Control[Control["FrontendY"] = 204] = "FrontendY";
Control[Control["FrontendLb"] = 205] = "FrontendLb";
Control[Control["FrontendRb"] = 206] = "FrontendRb";
Control[Control["FrontendLt"] = 207] = "FrontendLt";
Control[Control["FrontendRt"] = 208] = "FrontendRt";
Control[Control["FrontendLs"] = 209] = "FrontendLs";
Control[Control["FrontendRs"] = 210] = "FrontendRs";
Control[Control["FrontendLeaderboard"] = 211] = "FrontendLeaderboard";
Control[Control["FrontendSocialClub"] = 212] = "FrontendSocialClub";
Control[Control["FrontendSocialClubSecondary"] = 213] = "FrontendSocialClubSecondary";
Control[Control["FrontendDelete"] = 214] = "FrontendDelete";
Control[Control["FrontendEndscreenAccept"] = 215] = "FrontendEndscreenAccept";
Control[Control["FrontendEndscreenExpand"] = 216] = "FrontendEndscreenExpand";
Control[Control["FrontendSelect"] = 217] = "FrontendSelect";
Control[Control["ScriptLeftAxisX"] = 218] = "ScriptLeftAxisX";
Control[Control["ScriptLeftAxisY"] = 219] = "ScriptLeftAxisY";
Control[Control["ScriptRightAxisX"] = 220] = "ScriptRightAxisX";
Control[Control["ScriptRightAxisY"] = 221] = "ScriptRightAxisY";
Control[Control["ScriptRUp"] = 222] = "ScriptRUp";
Control[Control["ScriptRDown"] = 223] = "ScriptRDown";
Control[Control["ScriptRLeft"] = 224] = "ScriptRLeft";
Control[Control["ScriptRRight"] = 225] = "ScriptRRight";
Control[Control["ScriptLB"] = 226] = "ScriptLB";
Control[Control["ScriptRB"] = 227] = "ScriptRB";
Control[Control["ScriptLT"] = 228] = "ScriptLT";
Control[Control["ScriptRT"] = 229] = "ScriptRT";
Control[Control["ScriptLS"] = 230] = "ScriptLS";
Control[Control["ScriptRS"] = 231] = "ScriptRS";
Control[Control["ScriptPadUp"] = 232] = "ScriptPadUp";
Control[Control["ScriptPadDown"] = 233] = "ScriptPadDown";
Control[Control["ScriptPadLeft"] = 234] = "ScriptPadLeft";
Control[Control["ScriptPadRight"] = 235] = "ScriptPadRight";
Control[Control["ScriptSelect"] = 236] = "ScriptSelect";
Control[Control["CursorAccept"] = 237] = "CursorAccept";
Control[Control["CursorCancel"] = 238] = "CursorCancel";
Control[Control["CursorX"] = 239] = "CursorX";
Control[Control["CursorY"] = 240] = "CursorY";
Control[Control["CursorScrollUp"] = 241] = "CursorScrollUp";
Control[Control["CursorScrollDown"] = 242] = "CursorScrollDown";
Control[Control["EnterCheatCode"] = 243] = "EnterCheatCode";
Control[Control["InteractionMenu"] = 244] = "InteractionMenu";
Control[Control["MpTextChatAll"] = 245] = "MpTextChatAll";
Control[Control["MpTextChatTeam"] = 246] = "MpTextChatTeam";
Control[Control["MpTextChatFriends"] = 247] = "MpTextChatFriends";
Control[Control["MpTextChatCrew"] = 248] = "MpTextChatCrew";
Control[Control["PushToTalk"] = 249] = "PushToTalk";
Control[Control["CreatorLS"] = 250] = "CreatorLS";
Control[Control["CreatorRS"] = 251] = "CreatorRS";
Control[Control["CreatorLT"] = 252] = "CreatorLT";
Control[Control["CreatorRT"] = 253] = "CreatorRT";
Control[Control["CreatorMenuToggle"] = 254] = "CreatorMenuToggle";
Control[Control["CreatorAccept"] = 255] = "CreatorAccept";
Control[Control["CreatorDelete"] = 256] = "CreatorDelete";
Control[Control["Attack2"] = 257] = "Attack2";
Control[Control["RappelJump"] = 258] = "RappelJump";
Control[Control["RappelLongJump"] = 259] = "RappelLongJump";
Control[Control["RappelSmashWindow"] = 260] = "RappelSmashWindow";
Control[Control["PrevWeapon"] = 261] = "PrevWeapon";
Control[Control["NextWeapon"] = 262] = "NextWeapon";
Control[Control["MeleeAttack1"] = 263] = "MeleeAttack1";
Control[Control["MeleeAttack2"] = 264] = "MeleeAttack2";
Control[Control["Whistle"] = 265] = "Whistle";
Control[Control["MoveLeft"] = 266] = "MoveLeft";
Control[Control["MoveRight"] = 267] = "MoveRight";
Control[Control["MoveUp"] = 268] = "MoveUp";
Control[Control["MoveDown"] = 269] = "MoveDown";
Control[Control["LookLeft"] = 270] = "LookLeft";
Control[Control["LookRight"] = 271] = "LookRight";
Control[Control["LookUp"] = 272] = "LookUp";
Control[Control["LookDown"] = 273] = "LookDown";
Control[Control["SniperZoomIn"] = 274] = "SniperZoomIn";
Control[Control["SniperZoomOut"] = 275] = "SniperZoomOut";
Control[Control["SniperZoomInAlternate"] = 276] = "SniperZoomInAlternate";
Control[Control["SniperZoomOutAlternate"] = 277] = "SniperZoomOutAlternate";
Control[Control["VehicleMoveLeft"] = 278] = "VehicleMoveLeft";
Control[Control["VehicleMoveRight"] = 279] = "VehicleMoveRight";
Control[Control["VehicleMoveUp"] = 280] = "VehicleMoveUp";
Control[Control["VehicleMoveDown"] = 281] = "VehicleMoveDown";
Control[Control["VehicleGunLeft"] = 282] = "VehicleGunLeft";
Control[Control["VehicleGunRight"] = 283] = "VehicleGunRight";
Control[Control["VehicleGunUp"] = 284] = "VehicleGunUp";
Control[Control["VehicleGunDown"] = 285] = "VehicleGunDown";
Control[Control["VehicleLookLeft"] = 286] = "VehicleLookLeft";
Control[Control["VehicleLookRight"] = 287] = "VehicleLookRight";
Control[Control["ReplayStartStopRecording"] = 288] = "ReplayStartStopRecording";
Control[Control["ReplayStartStopRecordingSecondary"] = 289] = "ReplayStartStopRecordingSecondary";
Control[Control["ScaledLookLeftRight"] = 290] = "ScaledLookLeftRight";
Control[Control["ScaledLookUpDown"] = 291] = "ScaledLookUpDown";
Control[Control["ScaledLookUpOnly"] = 292] = "ScaledLookUpOnly";
Control[Control["ScaledLookDownOnly"] = 293] = "ScaledLookDownOnly";
Control[Control["ScaledLookLeftOnly"] = 294] = "ScaledLookLeftOnly";
Control[Control["ScaledLookRightOnly"] = 295] = "ScaledLookRightOnly";
Control[Control["ReplayMarkerDelete"] = 296] = "ReplayMarkerDelete";
Control[Control["ReplayClipDelete"] = 297] = "ReplayClipDelete";
Control[Control["ReplayPause"] = 298] = "ReplayPause";
Control[Control["ReplayRewind"] = 299] = "ReplayRewind";
Control[Control["ReplayFfwd"] = 300] = "ReplayFfwd";
Control[Control["ReplayNewmarker"] = 301] = "ReplayNewmarker";
Control[Control["ReplayRecord"] = 302] = "ReplayRecord";
Control[Control["ReplayScreenshot"] = 303] = "ReplayScreenshot";
Control[Control["ReplayHidehud"] = 304] = "ReplayHidehud";
Control[Control["ReplayStartpoint"] = 305] = "ReplayStartpoint";
Control[Control["ReplayEndpoint"] = 306] = "ReplayEndpoint";
Control[Control["ReplayAdvance"] = 307] = "ReplayAdvance";
Control[Control["ReplayBack"] = 308] = "ReplayBack";
Control[Control["ReplayTools"] = 309] = "ReplayTools";
Control[Control["ReplayRestart"] = 310] = "ReplayRestart";
Control[Control["ReplayShowhotkey"] = 311] = "ReplayShowhotkey";
Control[Control["ReplayCycleMarkerLeft"] = 312] = "ReplayCycleMarkerLeft";
Control[Control["ReplayCycleMarkerRight"] = 313] = "ReplayCycleMarkerRight";
Control[Control["ReplayFOVIncrease"] = 314] = "ReplayFOVIncrease";
Control[Control["ReplayFOVDecrease"] = 315] = "ReplayFOVDecrease";
Control[Control["ReplayCameraUp"] = 316] = "ReplayCameraUp";
Control[Control["ReplayCameraDown"] = 317] = "ReplayCameraDown";
Control[Control["ReplaySave"] = 318] = "ReplaySave";
Control[Control["ReplayToggletime"] = 319] = "ReplayToggletime";
Control[Control["ReplayToggletips"] = 320] = "ReplayToggletips";
Control[Control["ReplayPreview"] = 321] = "ReplayPreview";
Control[Control["ReplayToggleTimeline"] = 322] = "ReplayToggleTimeline";
Control[Control["ReplayTimelinePickupClip"] = 323] = "ReplayTimelinePickupClip";
Control[Control["ReplayTimelineDuplicateClip"] = 324] = "ReplayTimelineDuplicateClip";
Control[Control["ReplayTimelinePlaceClip"] = 325] = "ReplayTimelinePlaceClip";
Control[Control["ReplayCtrl"] = 326] = "ReplayCtrl";
Control[Control["ReplayTimelineSave"] = 327] = "ReplayTimelineSave";
Control[Control["ReplayPreviewAudio"] = 328] = "ReplayPreviewAudio";
Control[Control["VehicleDriveLook"] = 329] = "VehicleDriveLook";
Control[Control["VehicleDriveLook2"] = 330] = "VehicleDriveLook2";
Control[Control["VehicleFlyAttack2"] = 331] = "VehicleFlyAttack2";
Control[Control["RadioWheelUpDown"] = 332] = "RadioWheelUpDown";
Control[Control["RadioWheelLeftRight"] = 333] = "RadioWheelLeftRight";
Control[Control["VehicleSlowMoUpDown"] = 334] = "VehicleSlowMoUpDown";
Control[Control["VehicleSlowMoUpOnly"] = 335] = "VehicleSlowMoUpOnly";
Control[Control["VehicleSlowMoDownOnly"] = 336] = "VehicleSlowMoDownOnly";
Control[Control["MapPointOfInterest"] = 337] = "MapPointOfInterest";
Control[Control["ReplaySnapmaticPhoto"] = 338] = "ReplaySnapmaticPhoto";
Control[Control["VehicleCarJump"] = 339] = "VehicleCarJump";
Control[Control["VehicleRocketBoost"] = 340] = "VehicleRocketBoost";
Control[Control["VehicleParachute"] = 341] = "VehicleParachute";
Control[Control["VehicleBikeWings"] = 342] = "VehicleBikeWings";
Control[Control["VehicleFlyBombBay"] = 343] = "VehicleFlyBombBay";
Control[Control["VehicleFlyCounter"] = 344] = "VehicleFlyCounter";
Control[Control["VehicleFlyTransform"] = 345] = "VehicleFlyTransform";
})(Control || (Control = {}));
export default Control;
|
ubntc/go | metrics/watch.go | <gh_stars>0
package metrics
import (
"context"
"log"
"time"
"github.com/prometheus/client_golang/prometheus"
)
// Metrics represents a set of registerable metrics.
type Metrics interface {
Register(reg prometheus.Registerer)
}
// Watch registers the metrics and continuously logs them to the console.
func Watch(ctx context.Context, m Metrics) {
go func() {
tick := time.NewTicker(time.Second)
defer tick.Stop()
log.Print("start watching metrics")
defer log.Print("stopped watching metrics")
reg := prometheus.NewPedanticRegistry()
m.Register(reg)
for {
select {
case <-ctx.Done():
return
case <-tick.C:
printMetrics(reg)
}
}
}()
}
// printMetrics gathers and prints registered metrics.
func printMetrics(reg prometheus.Gatherer) {
metrics, _ := reg.Gather()
for _, mf := range metrics {
for _, m := range mf.GetMetric() {
val := 0.0
count := m.GetCounter().GetValue()
gauge := m.GetGauge().GetValue()
buckets := m.GetHistogram().GetBucket()
switch {
case gauge != 0:
val = gauge
case count != 0:
val = count
case len(buckets) > 0:
val = buckets[0].GetExemplar().GetValue()
}
if val != 0 {
labels := m.GetLabel()
label := ""
if len(labels) > 0 {
label = labels[0].GetValue()
}
log.Printf("%s[%s] %f", mf.GetName(), label, val)
}
}
}
}
|
tinochinamora/iw_imdb | accls/RBM/SysCV/Cmodel/predict_mod/subfun_cppUpdateFun_upc.cpp | #include "common.hpp"
#include "model_predict_class.hpp"
BIT_VEC model_predict::cppUpdateFun_upc(BIT_VEC conf_done, BIT_VEC conf_num_hidden, BIT_VEC conf_num_loops, BIT_VEC conf_num_movies, BIT_VEC conf_num_testusers, BIT_VEC conf_num_users, BIT_VEC conf_num_visible, BIT_VEC data_in, BIT_VEC rd_grant, BIT_VEC rst, BIT_VEC wr_grant)
{
BIT_VEC cppVar_1851;
BIT_VEC cppVar_1850;
BIT_VEC cppVar_1830;
bool cppVar_1831;
BIT_VEC cppVar_1833;
bool cppVar_1834;
BIT_VEC cppVar_1836;
bool cppVar_1837;
BIT_VEC cppVar_1839;
bool cppVar_1840;
BIT_VEC cppVar_1841;
bool cppVar_1842;
bool cppVar_1843;
BIT_VEC cppVar_1845;
BIT_VEC cppVar_1846;
bool cppVar_1847;
bool cppVar_1848;
bool cppVar_1849;
BIT_VEC cppVar_1844;
BIT_VEC cppVar_1838;
BIT_VEC cppVar_1835;
BIT_VEC cppVar_1832;
BIT_VEC cppVar_1829;
cppVar_1831 = (predict_upc == 0);
if (cppVar_1831) {
cppVar_1829 = upc;
} else {
cppVar_1834 = (predict_upc == 1);
if (cppVar_1834) {
cppVar_1832 = upc;
} else {
cppVar_1837 = (predict_upc == 2);
if (cppVar_1837) {
cppVar_1840 = (wr_request == 0);
cppVar_1842 = (wr_complete == 1);
cppVar_1843 = cppVar_1840 && cppVar_1842;
if (cppVar_1843) {
cppVar_1846 = num_testusers - 1;
cppVar_1846 = (cppVar_1846 & cppMask_un_16_);
cppVar_1847 = (index == cppVar_1846);
cppVar_1848 = (loop_count == num_loops);
cppVar_1849 = cppVar_1847 && cppVar_1848;
if (cppVar_1849) {
cppVar_1844 = 3;
} else {
cppVar_1844 = 0;
}
cppVar_1838 = cppVar_1844;
} else {
cppVar_1838 = upc;
}
cppVar_1835 = cppVar_1838;
} else {
cppVar_1835 = upc;
}
cppVar_1832 = cppVar_1835;
}
cppVar_1829 = cppVar_1832;
}
return cppVar_1829;
}
|
matalbec/yoroi-frontend | packages/yoroi-extension/app/components/topbar/NavWalletDetailsRevamp.js | <reponame>matalbec/yoroi-frontend
// @flow
import { Component } from 'react';
import { observer } from 'mobx-react';
import type { Node } from 'react';
import classnames from 'classnames';
import { intlShape } from 'react-intl';
import { splitAmount, truncateToken } from '../../utils/formatters';
import styles from './NavWalletDetailsRevamp.scss';
import IconEyeOpen from '../../assets/images/my-wallets/icon_eye_open.inline.svg';
import IconEyeClosed from '../../assets/images/my-wallets/icon_eye_closed.inline.svg';
import type { $npm$ReactIntl$IntlFormat } from 'react-intl';
import { hiddenAmount } from '../../utils/strings';
import { MultiToken } from '../../api/common/lib/MultiToken';
import type { TokenLookupKey } from '../../api/common/lib/MultiToken';
import { getTokenName } from '../../stores/stateless/tokenHelpers';
import type { TokenRow } from '../../api/ada/lib/storage/database/primitives/tables';
import type { WalletChecksum } from '@emurgo/cip4-js';
import type { ConceptualWallet } from '../../api/ada/lib/storage/models/ConceptualWallet/index';
import WalletAccountIcon from './WalletAccountIcon';
type Props = {|
+onUpdateHideBalance: void => Promise<void>,
+shouldHideBalance: boolean,
+highlightTitle?: boolean,
+showEyeIcon?: boolean,
/**
* undefined => wallet is not a reward wallet
* null => still calculating
* value => done calculating
*/
+rewards: null | void | MultiToken,
+walletAmount: null | MultiToken,
+infoText?: string,
+showDetails?: boolean,
+getTokenInfo: ($ReadOnly<Inexact<TokenLookupKey>>) => $ReadOnly<TokenRow>,
+defaultToken: $ReadOnly<TokenRow>,
+plate: null | WalletChecksum,
+wallet: {|
conceptualWallet: ConceptualWallet,
conceptualWalletName: string,
|},
|};
function constructPlate(
plate: WalletChecksum,
saturationFactor: number,
divClass: string
): [string, React$Element<'div'>] {
return [
plate.TextPart,
<div className={divClass}>
<WalletAccountIcon
iconSeed={plate.ImagePart}
saturationFactor={saturationFactor}
scalePx={6}
/>
</div>,
];
}
@observer
export default class NavWalletDetailsRevamp extends Component<Props> {
static defaultProps: {|
highlightTitle: boolean,
infoText: void,
showDetails: boolean,
showEyeIcon: boolean,
|} = {
highlightTitle: false,
infoText: undefined,
showDetails: true,
showEyeIcon: true,
};
static contextTypes: {| intl: $npm$ReactIntl$IntlFormat |} = {
intl: intlShape.isRequired,
};
render(): Node {
const {
shouldHideBalance,
onUpdateHideBalance,
highlightTitle,
showEyeIcon,
plate,
} = this.props;
const totalAmount = this.getTotalAmount();
const showEyeIconSafe = showEyeIcon != null && showEyeIcon;
const [, iconComponent] = plate ? constructPlate(plate, 0, styles.icon) : [];
return (
<div className={styles.wrapper}>
<div className={styles.outerWrapper}>
<div className={classnames([styles.currency])}>{iconComponent}</div>
<div className={styles.content}>
<div
className={classnames([
styles.amount,
highlightTitle !== null && highlightTitle === true && styles.highlightAmount,
])}
>
{this.renderAmountDisplay({
shouldHideBalance,
amount: totalAmount,
})}
</div>
<div className={styles.fixedAmount}>
{/* TODO: fix value to USD */}
{this.renderAmountDisplay({
shouldHideBalance,
amount: totalAmount,
})}{' '}
USD
</div>
</div>
{totalAmount != null && showEyeIconSafe && (
<button type="button" className={styles.toggleButton} onClick={onUpdateHideBalance}>
{shouldHideBalance ? <IconEyeClosed /> : <IconEyeOpen />}
</button>
)}
</div>
</div>
);
}
getTotalAmount: void => null | MultiToken = () => {
if (this.props.rewards === undefined) {
return this.props.walletAmount;
}
if (this.props.rewards === null || this.props.walletAmount === null) {
return null;
}
return this.props.rewards.joinAddCopy(this.props.walletAmount);
};
renderAmountDisplay: ({|
shouldHideBalance: boolean,
amount: ?MultiToken,
|}) => Node = request => {
if (request.amount == null) {
return <div className={styles.isLoading} />;
}
const defaultEntry = request.amount.getDefaultEntry();
const tokenInfo = this.props.getTokenInfo(defaultEntry);
const shiftedAmount = defaultEntry.amount.shiftedBy(-tokenInfo.Metadata.numberOfDecimals);
let balanceDisplay;
if (request.shouldHideBalance) {
balanceDisplay = <span>{hiddenAmount}</span>;
} else {
const [beforeDecimalRewards, afterDecimalRewards] = splitAmount(
shiftedAmount,
tokenInfo.Metadata.numberOfDecimals
);
balanceDisplay = (
<>
{beforeDecimalRewards}
<span className={styles.afterDecimal}>{afterDecimalRewards}</span>
</>
);
}
return (
<>
{balanceDisplay} {truncateToken(getTokenName(tokenInfo))}
</>
);
};
}
|
mhuryanov/gitlab | ee/app/helpers/ee/welcome_helper.rb | <reponame>mhuryanov/gitlab<gh_stars>0
# frozen_string_literal: true
module EE
module WelcomeHelper
include ::Gitlab::Utils::StrongMemoize
def in_subscription_flow?
redirect_path == new_subscriptions_path
end
def in_trial_flow?
redirect_path == new_trial_path
end
def in_trial_onboarding_flow?
params[:trial_onboarding_flow] == 'true'
end
def in_trial_during_signup_flow?
params[:trial] == 'true'
end
def already_showed_trial_activation?
params[:hide_trial_activation_banner] == 'true'
end
def in_oauth_flow?
redirect_path&.starts_with?(oauth_authorization_path)
end
def setup_for_company_label_text
if in_subscription_flow?
_('Who will be using this GitLab subscription?')
elsif in_trial_flow?
_('Who will be using this GitLab trial?')
else
_('Who will be using GitLab?')
end
end
def show_signup_flow_progress_bar?
return true if in_subscription_flow?
return false if user_has_memberships? || in_oauth_flow? || in_trial_flow?
signup_onboarding_enabled?
end
def welcome_submit_button_text
continue = _('Continue')
get_started = _('Get started!')
return continue if in_subscription_flow? || in_trial_flow?
return get_started if user_has_memberships? || in_oauth_flow?
signup_onboarding_enabled? ? continue : get_started
end
def data_attributes_for_progress_bar_js_component
{
is_in_subscription_flow: in_subscription_flow?.to_s,
is_signup_onboarding_enabled: signup_onboarding_enabled?.to_s
}
end
def user_has_memberships?
strong_memoize(:user_has_memberships) do
current_user.members.any?
end
end
def signup_onboarding_enabled?
::Gitlab.dev_env_or_com?
end
end
end
|
LaudateCorpus1/oci-go-sdk | loadbalancer/source_ip_address_condition.go | <reponame>LaudateCorpus1/oci-go-sdk
// Copyright (c) 2016, 2018, 2022, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// Load Balancing API
//
// API for the Load Balancing service. Use this API to manage load balancers, backend sets, and related items. For more
// information, see Overview of Load Balancing (https://docs.cloud.oracle.com/iaas/Content/Balance/Concepts/balanceoverview.htm).
//
package loadbalancer
import (
"encoding/json"
"fmt"
"github.com/oracle/oci-go-sdk/v58/common"
"strings"
)
// SourceIpAddressCondition A rule condition that checks client source IP against specified IP address or address range.
type SourceIpAddressCondition struct {
// An IPv4 or IPv6 address range that the source IP address of an incoming packet must match.
// The service accepts only classless inter-domain routing (CIDR) format (x.x.x.x/y or x:x::x/y) strings.
// Specify 0.0.0.0/0 or ::/0 to match all incoming traffic.
AttributeValue *string `mandatory:"true" json:"attributeValue"`
}
func (m SourceIpAddressCondition) String() string {
return common.PointerString(m)
}
// ValidateEnumValue returns an error when providing an unsupported enum value
// This function is being called during constructing API request process
// Not recommended for calling this function directly
func (m SourceIpAddressCondition) ValidateEnumValue() (bool, error) {
errMessage := []string{}
if len(errMessage) > 0 {
return true, fmt.Errorf(strings.Join(errMessage, "\n"))
}
return false, nil
}
// MarshalJSON marshals to json representation
func (m SourceIpAddressCondition) MarshalJSON() (buff []byte, e error) {
type MarshalTypeSourceIpAddressCondition SourceIpAddressCondition
s := struct {
DiscriminatorParam string `json:"attributeName"`
MarshalTypeSourceIpAddressCondition
}{
"SOURCE_IP_ADDRESS",
(MarshalTypeSourceIpAddressCondition)(m),
}
return json.Marshal(&s)
}
|
watilde/web-platform-tests | FileAPI/FileReaderSync.worker.js | <gh_stars>100-1000
importScripts("/resources/testharness.js");
var blob, readerSync;
setup(function() {
readerSync = new FileReaderSync();
blob = new Blob(["test"]);
});
test(function() {
assert_true(readerSync instanceof FileReaderSync);
}, "Interface");
test(function() {
var text = readerSync.readAsText(blob);
assert_equals(text, "test");
}, "readAsText");
test(function() {
var data = readerSync.readAsDataURL(blob);
assert_equals(data.indexOf("data:"), 0);
}, "readAsDataURL");
test(function() {
var data = readerSync.readAsArrayBuffer(blob);
assert_true(data instanceof ArrayBuffer);
}, "readAsArrayBuffer");
done();
|
tlgtaa/education-backend | src/products/admin/courses/actions.py | <filename>src/products/admin/courses/actions.py
from django import forms
from django.contrib.admin.helpers import ActionForm
from django.utils.translation import gettext_lazy as _
class CourseActionForm(ActionForm):
template_id = forms.CharField(
required=False,
label='',
widget=forms.TextInput(attrs={'placeholder': _('Email template id')}),
max_length=32,
)
class Media:
js = ('admin/js/vendor/jquery/jquery.js', 'js/admin/course_action_form.js')
css = {
'all': ['css/admin/course_action_form.css'],
}
def send_email_to_all_purchased_users(modeladmin, request, queryset):
course_count = 0
purchased_users_count = 0
for course in queryset.iterator():
course.send_email_to_all_purchased_users(template_id=request.POST['template_id'])
course_count += 1
purchased_users_count += course.get_purchased_users().count()
modeladmin.message_user(request, f'Sending letter to {purchased_users_count} customers of {course_count} courses')
send_email_to_all_purchased_users.short_description = _('Send email to all purchased_users')
|
dotwebstack/dotwebstack-framework | example/example-rml-proxy/src/main/java/org/dotwebstack/framework/example/rmlproxy/IndicatieFunction.java | <reponame>dotwebstack/dotwebstack-framework
package org.dotwebstack.framework.example.rmlproxy;
import com.taxonic.carml.engine.function.FnoFunction;
import com.taxonic.carml.engine.function.FnoParam;
import java.util.Objects;
public class IndicatieFunction {
private static final String BEER_NS = "http://dotwebstack.org/id/mapping/beer#";
@FnoFunction(BEER_NS + "booleanToIndicatie")
public static String booleanToIndicatie(@FnoParam(BEER_NS + "valueParam") String value) {
return Objects.equals(value, "true") ? "J" : "N";
}
}
|
HexogenDev/HexogenAPI | src/main/java/net/hexogendev/hexogen/api/management/PlayerManager.java | package net.hexogendev.hexogen.api.management;
import java.util.List;
import java.util.UUID;
import net.hexogendev.hexogen.api.Server;
import net.hexogendev.hexogen.api.entity.alive.player.Player;
import net.hexogendev.hexogen.api.entity.alive.player.PlayerProfile;
import net.hexogendev.hexogen.api.world.World;
public interface PlayerManager {
public Server getServer();
public void joinPlayer(PlayerProfile profile);
public void onLeavePlayer(Player player);
public List<Player> getPlayers();
public void sendMessageForAll(String message);
public void sendMessageForAll(World world, String message);
public Player getPlayer(String name);
public Player getPlayer(UUID uuid);
}
|
tingyu91/snsjf | tools/init-core.js | import cp from 'child_process';
const install = () => {
if (process.argv[2] === 'core') {
console.log('Setting new git hooksPath : ', process.cwd() + '\\.githooks');
cp.exec('git config core.hooksPath ' + process.cwd() + '\\.githooks');
console.log('Setting npm core_prj_root :', process.cwd());
cp.exec('npm config set core_prj_root ' + process.cwd());
console.log('Need to install dotenv...for now');
cp.exec('npm install dotenv');
} else {
console.log('Setting new git hooksPath : %npm_config_core_prj_root%\\.githooks');
cp.exec('git config core.hooksPath %npm_config_core_prj_root%\\.githooks');
}
return 1;
}
install();
|
id774/fastladder | spec/lib/fastladder_spec.rb | <reponame>id774/fastladder
require 'spec_helper'
describe Fastladder do
let(:fastladder) { described_class }
it 'changes http_proxy_except_hosts' do
fastladder.proxy_except_hosts = [/foo/, :bar, "buz"]
expect(Fastladder.http_proxy_except_hosts).to eq([/foo/])
end
it 'changes http_open_timeout' do
fastladder.open_timeout = 100
expect(Fastladder.http_open_timeout).to eq(100)
end
it 'changes http_read_timeout' do
fastladder.read_timeout = 200
expect(Fastladder.http_read_timeout).to eq(200)
end
it 'changes crawler_user_agent' do
fastladder.crawler_user_agent = "YetAnother FeedFetcher/0.0.3 (http://example.com/)"
expect(Fastladder.crawler_user_agent).to eq("YetAnother FeedFetcher/0.0.3 (http://example.com/)")
end
end
|
ameily/cincoconfig | cincoconfig/__init__.py | <reponame>ameily/cincoconfig<gh_stars>1-10
#
# Copyright (C) 2021 <NAME>
#
# This file is subject to the terms and conditions defined in the file 'LICENSE', which is part of
# this source code package.
#
'''
Cincoconfig Public API
'''
from .core import Config, Field, Schema, ValidationError, AnyField, ConfigFormat, ConfigType
from .support import (make_type, validator, get_all_fields, generate_argparse_parser,
item_ref_path, cmdline_args_override, asdict, get_fields, reset_value,
is_value_defined)
from .fields import *
from .encryption import KeyFile
from .stubs import generate_stub
from .version import __version__
# DEPRECATED TYPE ALIASES
BaseConfig = Config
BaseSchema = Schema
|
ViolaBuddy/EscapeFromPlegia | app/utilities/data.py | from __future__ import annotations
import logging
from ctypes import Union
from typing import Dict, Generic, List, Tuple, TypeVar
from app.utilities.typing import NID
T = TypeVar('T')
class Data(Generic[T]):
"""
Only accepts data points that have nid attribute
Generally behaves as a list first and a dictionary second
"""
datatype = T
def __init__(self, vals: List[T] = None):
if vals:
self._list: List[T] = vals
self._dict: Dict[NID, T] = {val.nid: val for val in vals}
else:
self._list = []
self._dict = {}
def values(self) -> List[T]:
return self._list
def keys(self) -> List[NID]:
return [val.nid for val in self._list]
def items(self) -> List[Tuple[NID, T]]:
return [(val.nid, val) for val in self._list]
def get(self, key: NID, fallback: T = None) -> T:
return self._dict.get(key, fallback)
def update_nid(self, val: T, nid: NID, set_nid=True):
for k, v in self._dict.items():
if v == val:
del self._dict[k]
if set_nid:
val.nid = nid
self._dict[nid] = val
break
def find_key(self, val: T) -> NID:
for k, v in self._dict.items():
if v == val:
return k
def change_key(self, old_key: NID, new_key: NID):
if old_key in self._dict:
old_value = self._dict[old_key]
del self._dict[old_key]
old_value.nid = new_key
self._dict[new_key] = old_value
else:
logging.error('%s not found in self._dict' % old_key)
def append(self, val: T):
if val.nid not in self._dict:
self._list.append(val)
self._dict[val.nid] = val
else:
logging.warning("%s already present in data" % val.nid)
def delete(self, val: T):
# Fails silently
if val.nid in self._dict:
self._list.remove(val)
del self._dict[val.nid]
def remove_key(self, key: NID):
val = self._dict[key]
self._list.remove(val)
del self._dict[key]
def pop(self, idx: int = None):
if idx is None:
idx = len(self._list) - 1
r = self._list[idx]
if r.nid in self._dict:
r = self._list.pop(idx)
del self._dict[r.nid]
else:
logging.error("Tried to delete %s which wasn't present in data" % r.nid)
def insert(self, idx: int, val: T):
self._list.insert(idx, val)
self._dict[val.nid] = val
def clear(self):
self._list = []
self._dict = {}
def index(self, nid: NID) -> int:
for idx, val in enumerate(self._list):
if val.nid == nid:
return idx
raise ValueError
def move_index(self, old_index: int, new_index: int):
if old_index == new_index:
return
obj = self._list.pop(old_index)
self._list.insert(new_index, obj)
# def begin_insert_row(self, index):
# self.drop_to = index
# Saving functions
def save(self):
if self.datatype and issubclass(self.datatype, Prefab):
return [elem.save() for elem in self._list]
else:
return self._list[:]
def restore(self, vals: T):
self.clear()
if self.datatype and issubclass(self.datatype, Prefab):
for s_dict in vals:
new_val = self.datatype.restore(s_dict)
self.append(new_val)
else:
for val in vals:
self.append(val)
return self
# Magic Methods
def __repr__(self):
return repr(self._list)
def __len__(self):
return len(self._list)
def __getitem__(self, idx):
return self._list[idx]
def __iter__(self):
return iter(self._list)
class Prefab():
def save(self):
s_dict = {}
for attr in self.__dict__.items():
name, value = attr
value = self.save_attr(name, value)
s_dict[name] = value
return s_dict
def save_attr(self, name, value):
if isinstance(value, Data):
value = value.save()
else: # int, str, float, list, dict
value = value
return value
@classmethod
def restore(cls, s_dict):
self = cls.default()
for attr_name, attr_value in self.__dict__.items():
value = self.restore_attr(attr_name, s_dict.get(attr_name))
if value is not None:
setattr(self, attr_name, value)
return self
def restore_attr(self, name, value):
if isinstance(value, Data):
value = value.restore()
else:
value = value
return value
@classmethod
def default(cls):
return cls()
|
QuocAnh90/Uintah_Aalto | CCA/Components/MPM/Crack/CrackPropagation.cc | <reponame>QuocAnh90/Uintah_Aalto<filename>CCA/Components/MPM/Crack/CrackPropagation.cc
/*
* The MIT License
*
* Copyright (c) 1997-2019 The University of Utah
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
/********************************************************************************
Crack.cc
PART FIVE: CRACK PROPAGATION SIMULATION
Created by <NAME> in 2002-2005.
********************************************************************************/
#include "Crack.h"
#include <CCA/Components/MPM/Core/MPMLabel.h>
#include <Core/Math/Matrix3.h>
#include <Core/Math/Short27.h>
#include <Core/Geometry/Vector.h>
#include <Core/Geometry/IntVector.h>
#include <Core/Grid/Grid.h>
#include <Core/Grid/Level.h>
#include <Core/Grid/Variables/NCVariable.h>
#include <Core/Grid/Patch.h>
#include <Core/Grid/Variables/NodeIterator.h>
#include <Core/Grid/MaterialManager.h>
#include <Core/Grid/MaterialManagerP.h>
#include <CCA/Ports/DataWarehouse.h>
#include <Core/Grid/Task.h>
#include <CCA/Components/MPM/Materials/MPMMaterial.h>
#include <CCA/Components/MPM/Materials/ConstitutiveModel/ConstitutiveModel.h>
#include <Core/Grid/Variables/VarTypes.h>
#include <vector>
#include <iostream>
#include <fstream>
using namespace Uintah;
using namespace std;
using std::vector;
using std::string;
#define MAX_BASIS 27
void Crack::addComputesAndRequiresPropagateCrackFrontPoints(Task* t,
const PatchSet* /*patches*/,
const MaterialSet* /*matls*/) const
{
Ghost::GhostType gac = Ghost::AroundCells;
int NGC=2*NGN;
t->requires(Task::NewDW, lb->gMassLabel, gac, NGC);
t->requires(Task::NewDW, lb->GMassLabel, gac, NGC);
if(n8or27==27)
t->requires(Task::OldDW,lb->pSizeLabel, Ghost::None);
}
void Crack::PropagateCrackFrontPoints(const ProcessorGroup*,
const PatchSubset* patches,
const MaterialSubset* /*matls*/,
DataWarehouse* old_dw,
DataWarehouse* new_dw)
{
for(int p=0; p<patches->size(); p++){
const Patch* patch = patches->get(p);
Vector dx = patch->dCell();
double dx_bar=(dx.x()+dx.y()+dx.z())/3.;
int pid,patch_size;
Uintah::MPI::Comm_rank(mpi_crack_comm, &pid);
Uintah::MPI::Comm_size(mpi_crack_comm, &patch_size);
MPI_Datatype MPI_POINT=fun_getTypeDescription((Point*)0)->getMPIType();
int numMPMMatls=d_materialManager->getNumMatls( "MPM" );
for(int m=0; m<numMPMMatls; m++) {
MPMMaterial* mpm_matl = d_materialManager->getMaterial( "MPM", m);
ConstitutiveModel* cm = mpm_matl->getConstitutiveModel();
// Cell mass of the material
double d_cell_mass=mpm_matl->getInitialDensity()*dx.x()*dx.y()*dx.z();
// Get nodal mass information
int dwi = mpm_matl->getDWIndex();
ParticleSubset* pset = old_dw->getParticleSubset(dwi, patch);
Ghost::GhostType gac = Ghost::AroundCells;
constNCVariable<double> gmass,Gmass;
int NGC=2*NGN;
new_dw->get(gmass, lb->gMassLabel, dwi, patch, gac, NGC);
new_dw->get(Gmass, lb->GMassLabel, dwi, patch, gac, NGC);
constParticleVariable<Matrix3> psize;
if(n8or27==27) old_dw->get(psize, lb->pSizeLabel, pset);
if(doCrackPropagation) {
// Step 1: Detect if crack front nodes propagate (cp)
// and propagate them virtually (da) for the active nodes
// Clear up cfSegPtsT, which stores temporarily the coordinates
// of crack-front nodes after propagation
cfSegPtsT[m].clear();
int cfNodeSize= (int) cfSegNodes[m].size();
short* cp=new short[cfNodeSize];
Vector* da=new Vector[cfNodeSize];
for(int i=0; i<cfNodeSize; i++) {
int preIdx=cfSegPreIdx[m][i];
if(preIdx<0) { // a duplicate node, not operated
// Direction cosines at the node
Vector v1=cfSegV1[m][i];
Vector v2=cfSegV2[m][i];
Vector v3=cfSegV3[m][i];
// Coordinate transformation matrix from local to global coordinates
Matrix3 T=Matrix3(v1.x(), v2.x(), v3.x(),
v1.y(), v2.y(), v3.y(),
v1.z(), v2.z(), v3.z());
// Get fracture parameters at crack-front nodes
double KI = cfSegK[m][i].x();
double KII = cfSegK[m][i].y();
double Vc = cfSegVel[m][i];
// If KI less than zero, set it to zero.
// if(KI<0.) KI=0.;
// Determine if the node propagates and the propagation direction
cp[i]=NO;
double theta;
if(cm->CrackPropagates(Vc,KI,KII,theta)) cp[i]=YES;
// Propagate the node virtually
double dl=rdadx*dx_bar;
Vector da_local=Vector(dl*cos(theta),dl*sin(theta),0.);
da[i]=T*da_local;
} // End of if(!operated)
else { // if(operated)
cp[i]=cp[preIdx];
da[i]=da[preIdx];
}
} // End of loop over cfNodeSize
// Step 2: Propagate crack-front nodes
for(int i=0; i<cfNodeSize; i++) {
int node=cfSegNodes[m][i];
Point pt=cx[m][node];
// Maximum and minimum indexes of the sub-crack which the node resides
int maxIdx=cfSegMaxIdx[m][i];
int minIdx=cfSegMinIdx[m][i];
int preIdx=cfSegPreIdx[m][i];
if(preIdx<0) { // a duplicate node, not operated
// Count the nodes which propagate among (2ns+1) nodes around pt
int nsegs=(maxIdx-minIdx+1)/2;
int ns=(nsegs+8)/8;
// ns=1 for 1-7 segs; ns=2 for 8-15 segs; ...
int np=0;
for(int j=-ns; j<=ns; j++) {
int cIdx=i+2*j;
if(cIdx<minIdx && cp[minIdx]) np++;
if(cIdx>maxIdx && cp[maxIdx]) np++;
if(cIdx>=minIdx && cIdx<=maxIdx && cp[cIdx]) np++;
}
// New position of the node after virtual propagation
double fraction=(double)np/(2*ns+1);
Point new_pt=pt+fraction*da[i];
cfSegPtsT[m].push_back(new_pt);
} // End if(!operated)
else {
Point prePt=cfSegPtsT[m][preIdx];
cfSegPtsT[m].push_back(prePt);
}
} // End of loop cfSegNodes
// Release dynamic arraies
delete [] cp;
delete [] da;
// Step 3: Deal with the propagating edge nodes,
// extending new_pt out to material boundary
for(int i=0; i<cfNodeSize; i++) {
int node=cfSegNodes[m][i];
Point pt=cx[m][node];
Point new_pt=cfSegPtsT[m][i];
// segments connected by the node
int segs[2];
FindSegsFromNode(m,node,segs);
if((segs[R]<0||segs[L]<0) && // Edge nodes
(new_pt-pt).length()/dx_bar>0.01) { // It propagates
// Find the direction of the edge crack-front segment
Point ptp=cfSegPtsT[m][i];
Point pt2p;
if(segs[R]<0) { // right edge nodes
pt2p=cfSegPtsT[m][i+1];
}
else if(segs[L]<0) { // left edge nodes
pt2p=cfSegPtsT[m][i-1];
}
Vector v=TwoPtsDirCos(pt2p,ptp);
IntVector ni[MAX_BASIS];
// Task 3a: Extend new_pt to the outside of the material
short newPtInMat=YES;
while(newPtInMat) {
// Detect which patch new_pt resides in
short* newPtInPatch=new short[patch_size];
for(int k=0; k<patch_size; k++) newPtInPatch[k]=NO;
if(patch->containsPointInExtraCells(new_pt)) newPtInPatch[pid]=YES;
Uintah::MPI::Barrier(mpi_crack_comm);
// Detect if new_pt is inside material
for(int k=0; k<patch_size; k++) {
if(newPtInPatch[k]) {
if(n8or27==8)
patch->findCellNodes(new_pt, ni);
else if(n8or27==27)
patch->findCellNodes27(new_pt, ni);
for(int j=0; j<n8or27; j++) {
double totalMass=gmass[ni[j]]+Gmass[ni[j]];
if(totalMass<d_cell_mass/64.) {
newPtInMat=NO;
break;
}
} // End of loop over j
} // End if(newPtInPatch[k])
Uintah::MPI::Bcast(&newPtInMat,1,MPI_SHORT,k,mpi_crack_comm);
} // End of loop over k
delete [] newPtInPatch;
// If new_pt is inside, extend it out by dx_bar/3 each time
if(newPtInMat) new_pt+=v*(dx_bar/3.);
} // End of while(newPtInMat)
// If new_pt is outside the global grid, trim it
TrimLineSegmentWithBox(pt2p,new_pt,GLP,GHP);
// Task 3b: If new_pt is outside, trim it back to MPM material boundary
if(!newPtInMat) {
short* newPtInPatch=new short[patch_size];
for(int k=0; k<patch_size; k++) newPtInPatch[k]=NO;
if(patch->containsPointInExtraCells(new_pt)) newPtInPatch[pid]=YES;
Uintah::MPI::Barrier(mpi_crack_comm);
for(int k=0; k<patch_size; k++) {
if(newPtInPatch[k]) {
// Get cell nodes where new_pt resides
if(n8or27==8)
patch->findCellNodes(new_pt, ni);
else if(n8or27==27)
patch->findCellNodes27(new_pt, ni);
// Get the lowest and highest points of the cell
Point LLP=Point( 9e99, 9e99, 9e99);
Point LHP=Point(-9e99,-9e99,-9e99);
for(int j=0; j<n8or27; j++) {
Point pj=patch->nodePosition(ni[j]);
LLP=Min(LLP,pj);
LHP=Max(LHP,pj);
}
// Trim ptp(or pt2p)->new_pt by the cell
Point cross_pt=pt2p;
TrimLineSegmentWithBox(new_pt,cross_pt,LLP,LHP);
// Extend cross_pt a little bit (dx_bar*10%) outside,
// which is the need of crack MPM calculation
new_pt=cross_pt+v*(dx_bar*0.1);
} // End of if(newPtInPatch[k])
Uintah::MPI::Bcast(&new_pt,1,MPI_POINT,k,mpi_crack_comm);
} // End of loop over k
delete [] newPtInPatch;
} // End of if(!newPtInMat)
// Save the eventual position of the edge node after propagation
cfSegPtsT[m][i]=new_pt;
}
} // End of loop cfSegNodes
// Step 4: Prune crack-front points if the angle of any two adjacent
// segments is larger than a critical angle (ca),
// moving it to the mass-center of the three points
double ca=2*csa[m]+15.;
PruneCrackFrontAfterPropagation(m,ca);
// Step 5: Apply symmetric BCs to new crack-front points
for(int i=0; i<(int)cfSegNodes[m].size();i++) {
Point pt=cx[m][cfSegNodes[m][i]];
ApplySymmetricBCsToCrackPoints(dx,pt,cfSegPtsT[m][i]);
}
} // End of if(doCrackPropagation)
} // End of loop over matls
}
}
void Crack::addComputesAndRequiresConstructNewCrackFrontElems(Task* t,
const PatchSet* /*patches*/,
const MaterialSet* /*matls*/) const
{
// delT will be used to calculate crack propagation velocity
t->requires(Task::OldDW, lb->simulationTimeLabel);
t->requires(Task::OldDW, lb->delTLabel);
}
void Crack::ConstructNewCrackFrontElems(const ProcessorGroup*,
const PatchSubset* patches,
const MaterialSubset* /*matls*/,
DataWarehouse* old_dw,
DataWarehouse* /*new_dw*/)
{
// double time=d_materialManager->getElapsedSimTime();
// simTime_vartype simTime;
// old_dw->get(simTime, lb->simulationTimeLabel);
// delt_vartype delT;
// old_dw->get(delT, lb->delTLabel, getLevel(patches) );
for(int p=0; p<patches->size(); p++) {
const Patch* patch = patches->get(p);
Vector dx = patch->dCell();
double dx_bar=(dx.x()+dx.y()+dx.z())/3.;
int numMPMMatls=d_materialManager->getNumMatls( "MPM" );
for(int m=0; m<numMPMMatls; m++) {
if(doCrackPropagation) {
/*
// Step 1: Combine crack front nodes if they propagates
// a little (<10%) or in self-similar way (angle<10 degree)
for(int i=0; i<(int)cfSegNodes[m].size(); i++) {
// Crack-front node and normal
int node=cfSegNodes[m][i];
Vector v2=cfSegV2[m][i];
// Crack propagation increment(dis) and direction(vp)
double dis=(cfSegPtsT[m][i]-cx[m][node]).length();
Vector vp=TwoPtsDirCos(cx[m][node],cfSegPtsT[m][i]);
// Crack propa angle(in degree) measured from crack plane
double angle=90-acos(Dot(vp,v2))*180/3.141592654;
if(dis<0.1*(rdadx*dx_bar) || fabs(angle)<5)
cx[m][node]=cfSegPtsT[m][i];
}
*/
// Temporary crack-front segment nodes and velocity
vector<int> cfSegNodesT;
cfSegNodesT.clear();
cfSegVel[m].clear();
int ncfSegs= (int) cfSegNodes[m].size()/2;
int preIdxAtMin=-1;
for(int i=0; i<ncfSegs; i++) {
// Relations of this seg with the left and right segs
int preIdx1=cfSegPreIdx[m][2*i];
int preIdx2=cfSegPreIdx[m][2*i+1];
// crack-front nodes and coordinates before propagation
int n1,n2,n1p,n2p,nc,nmc;
Point p1,p2,p1p,p2p,pc,pmc;
n1=cfSegNodes[m][2*i];
n2=cfSegNodes[m][2*i+1];
p1=cx[m][n1];
p2=cx[m][n2];
// crack-front node coordinates after propagaion
p1p=cfSegPtsT[m][2*i];
p2p=cfSegPtsT[m][2*i+1];
pc =p1p+(p2p-p1p)/2.;
// Detect if it is the first segment of an enclosed crack-front
short firstSegOfEnclosedCrack=NO;
int minIdx=cfSegMinIdx[m][2*i];
int maxIdx=cfSegMaxIdx[m][2*i+1];
if(cfSegNodes[m][minIdx]==cfSegNodes[m][maxIdx] &&
minIdx==(2*i)) firstSegOfEnclosedCrack=YES;
// Detect if this is the last segment of the crack
short lastSegment=NO;
if(maxIdx==(2*i+1)) lastSegment=YES;
// length of crack front segment after propagation
double l12=(p2p-p1p).length();
// Step 2: Determine ten cases of propagation of a segment
short sp=YES, ep=YES;
if((p1p-p1).length()/dx_bar<0.01) sp=NO; // p1 no propagating
if((p2p-p2).length()/dx_bar<0.01) ep=NO; // p2 no propagating
// Calculate crack propagation velocity
double vc1=0.,vc2=0.,vcc=0.;
/*
if(sp) { // Record crack incremental and time instant
cfSegDis[m][2*i]=(p1p-p1).length();
cfSegTime[m][2*i] =simTime-delT;
}
if(ep) { // Record crack incremental and time instant
cfSegDis[m][2*i+1]=(p2p-p2).length();
cfSegTime[m][2*i+1]=simTime-delT;
}
if(simTime>0.) {
vc1=cfSegDis[m][2*i]/(simTime-cfSegTime[m][2*i]);
vc2=cfSegDis[m][2*i+1]/(simTime-cfSegTime[m][2*i+1]);
vcc=(vc1+vc2)/2.;
}
*/
short CASE=0; // No propagation
if(l12/css[m]<0.25) {
CASE=1; // Too short segment, drop it
}
else if(l12/css[m]>2.) { // Too long segment, break it into two
if( sp && !ep) CASE=5; // p1 propagates, p2 doesn't
if(!sp && ep) CASE=6; // p2 propagates, p1 doesn't
if( sp && ep) CASE=7; // Both p1 and p2 propagate
}
else { // Normal propagation
if( sp && !ep) CASE=2; // p1 propagates, p2 doesn't
if(!sp && ep) CASE=3; // p2 propagates, p1 doesn't
if( sp && ep) CASE=4; // Both p1 and p2 propagate
}
// Step 3: Construct new crack elems and crack-front segments
// Detect if the segment is the first segment of a crack
switch(CASE) {
case 0: // Both ends of the segment do not propagate
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1);
cfSegNodesT.push_back(n2);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vc2);
break;
case 1: // The segment becomes too short (<25%) after propagation
// Set the new position of both ends after propagation to pc
if(preIdx1<0) { // Not generated
n1p=(int)cx[m].size();
cx[m].push_back(pc);
}
else { // Change p1p to pc
n1p=(int)cx[m].size()-1;
cx[m][n1p]=pc;
}
// Accordingly, chnage p1p of the next seg to pc if it is not
// the last segment of a crack
if(!lastSegment) cfSegPtsT[m][2*(i+1)]=pc;
// A new crack elem generated, but no new crack-front segment
ce[m].push_back(IntVector(n1,n1p,n2));
break;
case 2: // The first end propagates, but the second does not
// The first end after propagation
if(preIdx1<0) { // Not generated
n1p=(int)cx[m].size();
cx[m].push_back(p1p);
}
else { // Just generated
n1p=(int)cx[m].size()-1;
}
// The new crack element
ce[m].push_back(IntVector(n1,n1p,n2));
// The new crack-front segment
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1p);
cfSegNodesT.push_back(n2);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vc2);
break;
case 3: // The second end propagates, but the first does not
// The second end after propagation
if(preIdx2<0) { // Not generated
n2p=(int)cx[m].size();
cx[m].push_back(p2p);
}
else { // The last segment of an enclosed crack, p2p has been generated
n2p=cfSegNodesT[preIdxAtMin];
}
// The new crack element
ce[m].push_back(IntVector(n1,n2p,n2));
// The new crack-front segment
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1);
cfSegNodesT.push_back(n2p);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vc2);
break;
case 4: // Both ends of the segment propagate
// Three new crack points
// 1. The first end of the segment
if(preIdx1<0) { // Not generated
n1p=(int)cx[m].size();
cx[m].push_back(p1p);
}
else { // Just generated
n1p=(int)cx[m].size()-1;
}
// 2. The mass center of the quad
nmc=n1p+1;
pmc=p1+(p1p-p1)/4.+(p2-p1)/4.+(p2p-p1)/4.;
cx[m].push_back(pmc);
// 3. The second end of the segment
if(preIdx2<0) { // Not generated
n2p=n1p+2;
cx[m].push_back(p2p);
}
else { // The last segment of an enclosed crack, p2p has been generated
n2p=cfSegNodesT[preIdxAtMin];
}
// Four new crack elements
ce[m].push_back(IntVector(nmc,n2,n1));
ce[m].push_back(IntVector(nmc,n2p,n2));
ce[m].push_back(IntVector(nmc,n1,n1p));
ce[m].push_back(IntVector(nmc,n1p,n2p));
// The new crack-front segment
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1p);
cfSegNodesT.push_back(n2p);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vc2);
break;
case 5: // Too long segment with only the first end propagating
// New crack points
// 1. The first end after propagation
if(preIdx1<0) { // Not generated
n1p=(int)cx[m].size();
cx[m].push_back(p1p);
}
else { // Just generated
n1p=(int)cx[m].size()-1;
}
// 2. The center of the segment after propagation
nc=n1p+1;
cx[m].push_back(pc);
// Two new crack elements
ce[m].push_back(IntVector(n1,n1p,nc));
ce[m].push_back(IntVector(n1,nc,n2));
// Two new crack-front segment
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1p);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(n2);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vc2);
break;
case 6: // Too long segment with only the second end propagating
// Two new crack points
// 1. The center of the sgement after propagation
nc=(int)cx[m].size();
cx[m].push_back(pc);
// 2. The second end after propagation
if(preIdx2<0) { // Not generated
n2p=nc+1;
cx[m].push_back(p2p);
}
else { // The last seg of an enclosed crack, p2p has been generated
n2p=cfSegNodesT[preIdxAtMin];
}
// Two new crack elements
ce[m].push_back(IntVector(n1,nc,n2));
ce[m].push_back(IntVector(n2,nc,n2p));
// Two new crack-front segments
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(n2p);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vc2);
break;
case 7: // Too long segment with both ends propagating
// Four new crack points
// 1. The first end of the segment after propagation
if(preIdx1<0) { // Not generated
n1p=(int)cx[m].size();
cx[m].push_back(p1p);
}
else { // Just generated
n1p=(int)cx[m].size()-1;
}
// 2. The center of segment after propagation
nc=n1p+1;
cx[m].push_back(pc);
// 3. The mass center of the quad
nmc=n1p+2;
pmc=p1+(p1p-p1)/4.+(p2-p1)/4.+(p2p-p1)/4.;
cx[m].push_back(pmc);
// 4. The second end of the segment after propagation
if(preIdx2<0) { // Not generated
n2p=n1p+3;
cx[m].push_back(p2p);
}
else { // The last segment of an enclosed crack, p2p has been generated
n2p=cfSegNodesT[preIdxAtMin];
}
// Five new crack elements
ce[m].push_back(IntVector(nmc,n2,n1));
ce[m].push_back(IntVector(nmc,n2p,n2));
ce[m].push_back(IntVector(nmc,n1,n1p));
ce[m].push_back(IntVector(nmc,nc,n2p));
ce[m].push_back(IntVector(nmc,n1p,nc));
// Two new crack-front segments
if(firstSegOfEnclosedCrack) preIdxAtMin= (int) cfSegNodesT.size();
cfSegNodesT.push_back(n1p);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(nc);
cfSegNodesT.push_back(n2p);
// Velocity of crack-front nodes
cfSegVel[m].push_back(vc1);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vcc);
cfSegVel[m].push_back(vc2);
break;
}
} // End of loop over crack-front segs
Uintah::MPI::Barrier(mpi_crack_comm);
// Reset crack-front segment nodes after crack propagation
cfSegNodes[m].clear();
for(int i=0; i<(int)cfSegNodesT.size(); i++) {
cfSegNodes[m].push_back(cfSegNodesT[i]);
}
cfSegNodesT.clear();
} // End of if(doCrackPropagation)
} // End of loop over matls
} // End of loop over patches
}
// Find the intersection between a line-segment (p1->p2) and a box
void Crack::TrimLineSegmentWithBox(const Point& p1, Point& p2,
const Point& lp, const Point& hp)
{
// For a box with the lowest and highest points (lp & hp) and
// a line-seement (p1->p2), p1 is inside the box. If p2 is outside,
// find the intersection between the line-segment (p1->p2) and the box,
// and store the intersection in p2.
Vector v;
double l,m,n;
// Make sure p1!=p2
if(p1==p2) {
cout << "Error: p1=p2=" << p1 << " in Crack::TrimLineSegmentWithBox(...)."
<< " Program is terminated." << endl;
exit(1);
}
else {
v=TwoPtsDirCos(p1,p2);
l=v.x(); m=v.y(); n=v.z();
}
double xl=lp.x(), yl=lp.y(), zl=lp.z();
double xh=hp.x(), yh=hp.y(), zh=hp.z();
double x1=p1.x(), y1=p1.y(), z1=p1.z();
double x2=p2.x(), y2=p2.y(), z2=p2.z();
// one-millionth of the diagonal length of the box
double d=(hp-lp).length()*1.e-6;
// Detect if p1 is inside the box
short p1Outside=YES;
if(x1>xl-d && x1<xh+d && y1>yl-d && y1<yh+d && z1>zl-d && z1<zh+d) p1Outside=NO;
if(p1Outside) {
cout << "Error: p1=" << p1
<< " is outside of the box in Crack::TrimLineSegmentWithBox(): "
<< lp << "-->" << hp << ", where p2=" << p2 << endl;
cout << " Program terminated." << endl;
exit(1);
}
// If p2 is outside the box, find the intersection
short p2Outside=YES;
if(x2>xl-d && x2<xh+d && y2>yl-d && y2<yh+d && z2>zl-d && z2<zh+d) p2Outside=NO;
while(p2Outside) {
if(x2>xh || x2<xl) {
if(x2>xh) x2=xh;
if(x2<xl) x2=xl;
if(l>1.e-6) {
y2=y1+m*(x2-x1)/l;
z2=z1+n*(x2-x1)/l;
}
}
else if(y2>yh || y2<yl) {
if(y2>yh) y2=yh;
if(y2<yl) y2=yl;
if(m>1.e-6) {
x2=x1+l*(y2-y1)/m;
z2=z1+n*(y2-y1)/m;
}
}
else if(z2>zh || z2<zl) {
if(z2>zh) z2=zh;
if(z2<zl) z2=zl;
if(n>1.e-6) {
x2=x1+l*(z2-z1)/n;
y2=y1+m*(z2-z1)/n;
}
}
if(x2>xl-d && x2<xh+d && y2>yl-d && y2<yh+d && z2>zl-d && z2<zh+d) p2Outside=NO;
} // End of while(!p2Inside)
p2=Point(x2,y2,z2);
}
void Crack::PruneCrackFrontAfterPropagation(const int& m, const double& ca)
{
// If the angle between two line-segments connected by
// a point is larger than a certain value (ca), move the point to
// the mass center of the triangle
int num=(int)cfSegNodes[m].size();
vector<Point> cfSegPtsPruned;
cfSegPtsPruned.resize(num);
for(int i=0; i<num; i++) {
cfSegPtsPruned[i]=cfSegPtsT[m][i];
}
for(int i=0; i<(int)cfSegNodes[m].size(); i++) {
int preIdx=cfSegPreIdx[m][i];
if(preIdx<0) { // not operated
if(i>cfSegMinIdx[m][i] && i<cfSegMaxIdx[m][i]) {
Point p =cfSegPtsT[m][i];
Point p1=cfSegPtsT[m][i-1];
Point p2=cfSegPtsT[m][i+2];
Vector v1=TwoPtsDirCos(p1,p);
Vector v2=TwoPtsDirCos(p,p2);
double theta=acos(Dot(v1,v2))*180/3.141592654;
if(fabs(theta)>ca) {
cfSegPtsPruned[i]=p+(p1-p)/3.+(p2-p)/3.;
}
} // End of if(i>minIdx && i<maxIdx)
}
else { // operated
cfSegPtsPruned[i]=cfSegPtsPruned[preIdx];
}
} // End of loop over i
for(int i=0; i<num; i++) {
cfSegPtsT[m][i]=cfSegPtsPruned[i];
}
cfSegPtsPruned.clear();
}
|
npocmaka/Windows-Server-2003 | printscan/print/drivers/usermode/gpdres/pagesres/pagesres.c | /*++
Copyright (c) 1996-1999 Microsoft Corporation
Module Name:
cmdcb.c
Abstract:
Implementation of GPD command callback for "test.gpd":
OEMCommandCallback
Environment:
Windows NT Unidrv driver
Revision History:
// NOTICE-2002/03/19-v-sueyas-
// 04/07/97 -zhanw-
// Created it.
--*/
#include <windows.h>
#include "pdev.h"
#include "compress.h"
//////////////////////////////////////////////////////////////////////////
// Function: BInitOEMExtraData
//
// Description: Initializes OEM Extra data.
//
//
// Parameters:
//
// pOEMExtra Pointer to a OEM Extra data.
//
// dwSize Size of OEM extra data.
//
//
// Returns: TRUE if successful; FALSE otherwise.
//
//
// Comments:
//
//
// History:
// // NOTICE-2002/03/19-v-sueyas-
// // 02/11/97 APresley Created.
//
//////////////////////////////////////////////////////////////////////////
BOOL BInitOEMExtraData(POEM_EXTRADATA pOEMExtra)
{
// Initialize OEM Extra data.
pOEMExtra->dmExtraHdr.dwSize = sizeof(OEM_EXTRADATA);
pOEMExtra->dmExtraHdr.dwSignature = OEM_SIGNATURE;
pOEMExtra->dmExtraHdr.dwVersion = OEM_VERSION;
pOEMExtra->fCallback = FALSE;
pOEMExtra->wCurrentRes = 0;
pOEMExtra->lWidthBytes = 0;
pOEMExtra->lHeightPixels = 0;
#ifdef FONTPOS
pOEMExtra->wFontHeight = 0;
pOEMExtra->wYPos = 0;
#endif
// #278517: RectFill
pOEMExtra->wRectWidth = 0;
pOEMExtra->wRectHeight = 0;
pOEMExtra->wUnit = 1;
return TRUE;
}
//////////////////////////////////////////////////////////////////////////
// Function: BMergeOEMExtraData
//
// Description: Validates and merges OEM Extra data.
//
//
// Parameters:
//
// pdmIn pointer to an input OEM private devmode containing the settings
// to be validated and merged. Its size is current.
//
// pdmOut pointer to the output OEM private devmode containing the
// default settings.
//
//
// Returns: TRUE if valid; FALSE otherwise.
//
//
// Comments:
//
// //NOTICE-2002/03/19-v-sueyas-
// //History:
// // 02/11/97 APresley Created.
// // 04/08/97 ZhanW Modified the interface
//
//////////////////////////////////////////////////////////////////////////
BOOL BMergeOEMExtraData(
POEM_EXTRADATA pdmIn,
POEM_EXTRADATA pdmOut
)
{
if(pdmIn) {
//
// copy over the private fields, if they are valid
//
pdmOut->fCallback = pdmIn->fCallback;
pdmOut->wCurrentRes = pdmIn->wCurrentRes;
pdmOut->lWidthBytes = pdmIn->lWidthBytes;
pdmOut->lHeightPixels = pdmIn->lHeightPixels;
#ifdef FONTPOS
pdmOut->wFontHeight = pdmIn->wFontHeight;
pdmOut->wYPos = pdmIn->wYPos;
#endif
// #278517: RectFill
pdmOut->wRectWidth = pdmIn->wRectWidth;
pdmOut->wRectHeight = pdmIn->wRectHeight;
pdmOut->wUnit = pdmIn->wUnit;
}
return TRUE;
}
// #######
#define WRITESPOOLBUF(p, s, n) \
((p)->pDrvProcs->DrvWriteSpoolBuf(p, s, n))
#define PARAM(p,n) \
(*((p)+(n)))
/*********************************************************/
/* RL_ECmd : main function */
/* ARGS : LPBYTE - pointer to image */
/* LPBYTE - pointer to BRL code */
/* WORD - size of image */
/* RET : WORD - size of BRL Code */
/* 0 - COMPRESSION FAILED */
/*********************************************************/
DWORD RL_ECmd(PBYTE iptr, PBYTE cptr, DWORD isize, DWORD osize)
{
COMP_DATA CompData;
if (VALID == RL_Init(iptr, cptr, isize, osize, &CompData))
RL_Enc( &CompData );
if (CompData.BUF_OVERFLOW)
return 0;
else
return CompData.RL_CodeSize;
}
/*********************************************************/
/* RL_Init : Initializer */
/* ARGS : BYTE * - pointer to image */
/* BYTE * - pointer to BRL code */
/* WORD - size of image */
/* RET : BYTE - VALID or INVALID */
/*********************************************************/
BYTE RL_Init(PBYTE iptr, PBYTE cptr, DWORD isize, DWORD osize,
PCOMP_DATA pCompData)
{
pCompData->RL_ImagePtr = iptr;
pCompData->RL_CodePtr = cptr;
pCompData->RL_ImageSize = isize;
pCompData->BUF_OVERFLOW = 0;
pCompData->RL_BufEnd = cptr + osize;
return VALID;
}
/*********************************************************/
/* RL_Enc : Encoder */
/* ARGS : void */
/* RET : char COMP_SUCC or COMP_FAIL */
/*********************************************************/
char RL_Enc(PCOMP_DATA pCompData)
{
// #313252: RLE compressed data doesn't match with length.
// Rewrite RLE compression algorithm.
int count;
BYTE rdata;
PBYTE pdata, pcomp, pend;
DWORD i;
pdata = pCompData->RL_ImagePtr;
pcomp = pCompData->RL_CodePtr;
pend = pCompData->RL_BufEnd;
count = 0;
for (i = 0; i < pCompData->RL_ImageSize; i++, pdata++) {
if (count == 0) {
rdata = *pdata;
count = 1;
} else if (*pdata != rdata) {
if (pcomp + 2 >= pend)
goto overflow;
*pcomp++ = count - 1;
*pcomp++ = rdata;
rdata = *pdata;
count = 1;
} else if (++count >= 256) {
if (pcomp + 2 >= pend)
goto overflow;
*pcomp++ = count - 1;
*pcomp++ = rdata;
count = 0;
}
}
if (count) {
if (pcomp + 2 >= pend)
goto overflow;
*pcomp++ = count - 1;
*pcomp++ = rdata;
}
pCompData->RL_CodeSize = (DWORD)(pcomp - pCompData->RL_CodePtr);
pCompData->RL_CodePtr = pcomp;
return COMP_SUCC;
overflow:
pCompData->BUF_OVERFLOW = 1;
return COMP_FAIL;
}
//---------------------------*OEMSendFontCmd*----------------------------------
// Action: send Pages-style font selection command.
//-----------------------------------------------------------------------------
// NTRAID#NTBUG9-581704-2002/03/19-v-sueyas-: Error handling
BOOL APIENTRY bOEMSendFontCmd(pdevobj, pUFObj, pFInv)
PDEVOBJ pdevobj;
PUNIFONTOBJ pUFObj;
PFINVOCATION pFInv;
{
DWORD i, ocmd;
BYTE rgcmd[CCHMAXCMDLEN];
PGETINFO_STDVAR pSV;
//#287800 ->
DWORD dwStdVariable[2 + 2 * 3];
DWORD dwTxtRes ;
//#287800 <-
//#319705
WORD wAscend, wDescend ;
POEM_EXTRADATA pOEM;
VERBOSE(("OEMSendFontCmd entry.\n"));
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (NULL == pdevobj)
{
ERR(("bOEMSendFontCmd: Invalid parameter(s).\n"));
return FALSE;
}
pOEM = (POEM_EXTRADATA)(pdevobj->pOEMDM);
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for null pointers
if (NULL == pOEM)
{
ERR(("bOEMSendFontCmd: pdevobj->pOEMDM = 0.\n"));
return FALSE;
}
ASSERT(VALID_PDEVOBJ(pdevobj));
if(!pUFObj || !pFInv)
{
ERR(("OEMSendFontCmd: pUFObj or pFInv is NULL."));
return FALSE;
}
//#287800 ->
pSV = (PGETINFO_STDVAR)dwStdVariable;
pSV->dwSize = sizeof(GETINFO_STDVAR) + 2 * sizeof(DWORD) * (3 - 1);
pSV->dwNumOfVariable = 3;
pSV->StdVar[0].dwStdVarID = FNT_INFO_FONTHEIGHT;
pSV->StdVar[1].dwStdVarID = FNT_INFO_FONTWIDTH;
pSV->StdVar[2].dwStdVarID = FNT_INFO_TEXTYRES;
//#287800 <-
if (!pUFObj->pfnGetInfo(pUFObj, UFO_GETINFO_STDVARIABLE, pSV, pSV->dwSize, NULL))
{
ERR(("UFO_GETINFO_STDVARIABLE failed.\n"));
return FALSE;
}
#ifdef FONTPOS
pOEM->wFontHeight = (WORD)pSV->StdVar[0].lStdVariable;
//#287800 ->
dwTxtRes = pSV->StdVar[2].lStdVariable ;
// NTRAID#NTBUG9-581703-2002/03/19-v-sueyas-: Check for deviding by zero
if (0 == dwTxtRes)
{
ERR(("dwTxtRes = 0.\n"));
return FALSE;
}
pOEM->wFontHeight = (WORD)((pOEM->wFontHeight * pOEM->wCurrentRes
+ dwTxtRes / 2) / dwTxtRes) ;
//#287800 <-
//#319705 For TTFS positioning ->
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for null pointers
if (NULL == pUFObj->pIFIMetrics)
{
ERR(("pUFObj->pIFIMetrics = NULL.\n"));
return FALSE;
}
wAscend = pUFObj->pIFIMetrics->fwdWinAscender ;
wDescend = pUFObj->pIFIMetrics->fwdWinDescender ;
// NTRAID#NTBUG9-581703-2002/03/19-v-sueyas-: Check for deviding by zero
if (0 == (wAscend + wDescend))
{
ERR(("pUFObj->pIFIMetrics = NULL.\n"));
return FALSE;
}
wDescend = pOEM->wFontHeight * wDescend / (wAscend + wDescend) ;
pOEM->wFontHeight -= wDescend ;
#endif
#define SV_HEIGHT (pSV->StdVar[0].lStdVariable)
#define SV_WIDTH (pSV->StdVar[1].lStdVariable)
ocmd = 0;
for (i = 0; i < pFInv->dwCount && ocmd < CCHMAXCMDLEN; )
{
WORD wTemp;
if (pFInv->pubCommand[i] == '#')
{
if (pFInv->pubCommand[i+1] == 'V')
{
// character height setting
wTemp = (WORD)SV_HEIGHT;
if (pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
wTemp = wTemp * 1440 / 600;
rgcmd[ocmd++] = HIBYTE(wTemp);
rgcmd[ocmd++] = LOBYTE(wTemp);
i += 2;
}
else if (pFInv->pubCommand[i+1] == 'H')
{
// (DBCS) character width setting
wTemp = (WORD)(SV_WIDTH * 2);
if (pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
wTemp = wTemp * 1440 / 600;
rgcmd[ocmd++] = HIBYTE(wTemp);
rgcmd[ocmd++] = LOBYTE(wTemp);
i += 2;
}
else if (pFInv->pubCommand[i+1] == 'P')
{
// (DBCS) character pitch setting
wTemp = (WORD)(SV_WIDTH * 2);
if (pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
wTemp = wTemp * 1440 / 600;
rgcmd[ocmd++] = HIBYTE(wTemp);
rgcmd[ocmd++] = LOBYTE(wTemp);
i += 2;
}
else if (pFInv->pubCommand[i+1] == 'L')
{
// Line pitch (spacing) setting
wTemp = (WORD)SV_HEIGHT;
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
wTemp = wTemp * 1440 / 600;
rgcmd[ocmd++] = HIBYTE(wTemp);
rgcmd[ocmd++] = LOBYTE(wTemp);
i += 2;
}
else {
rgcmd[ocmd++] = pFInv->pubCommand[i++];
}
continue;
}
// just copy others
rgcmd[ocmd++] = pFInv->pubCommand[i++];
}
WRITESPOOLBUF(pdevobj, rgcmd, ocmd);
return TRUE;
}
// NTRAID#NTBUG9-581704-2002/03/19-v-sueyas-: Error handling
BOOL APIENTRY bOEMOutputCharStr(
PDEVOBJ pdevobj,
PUNIFONTOBJ pUFObj,
DWORD dwType,
DWORD dwCount,
PVOID pGlyph)
{
GETINFO_GLYPHSTRING GStr;
PBYTE tempBuf;
PTRANSDATA pTrans;
DWORD i, j;
DWORD rSize = 0;
BOOL fLeadByteFlag;
BYTE fDBCS[256];
BYTE ESC_VERT_ON[] = "\x1B\x7E\x0E\x00\x01\x0B";
BYTE ESC_VERT_OFF[] = "\x1B\x7E\x0E\x00\x01\x0C";
#ifdef FONTPOS
POEM_EXTRADATA pOEM;
BYTE ESC_Y_ABS[] = "\x1b\x7e\x1d\x00\x03\x05\x00\x00";
#endif
BOOL bVFont, bDBChar;
BYTE *pTemp;
WORD wLen;
VERBOSE(("OEMOutputCharStr() entry.\n"));
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if(NULL == pdevobj || NULL == pUFObj)
{
ERR(("bOEMOutputCharStr: Invalid parameter(s).\n"));
return FALSE;
}
#ifdef FONTPOS
pOEM = (POEM_EXTRADATA)(pdevobj->pOEMDM);
#endif
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for null pointers
if(NULL == pOEM)
{
ERR(("bOEMOutputCharStr: pdevobj->pOEMDM = 0.\n"));
return FALSE;
}
ASSERT(VALID_PDEVOBJ(pdevobj));
// NTRAID#NTBUG9-751233-2002/12/05-yasuho-: Memory leak in pagesres.dll
// Delete the redundant memory allocation.
GStr.dwSize = sizeof(GETINFO_GLYPHSTRING);
GStr.dwCount = dwCount;
GStr.dwTypeIn = TYPE_GLYPHHANDLE;
GStr.pGlyphIn = pGlyph;
GStr.dwTypeOut = TYPE_TRANSDATA;
GStr.pGlyphOut = NULL;
GStr.dwGlyphOutSize = 0;
/* Get TRANSDATA buffer size */
if (FALSE != pUFObj->pfnGetInfo(pUFObj,
UFO_GETINFO_GLYPHSTRING, &GStr, 0, NULL)
|| 0 == GStr.dwGlyphOutSize )
{
ERR(("Get Glyph String error\n"));
return FALSE;
}
/* Alloc TRANSDATA buffer */
if(!(tempBuf = (PBYTE)MemAllocZ(GStr.dwGlyphOutSize) ))
{
ERR(("Mem alloc failed.\n"));
return FALSE;
}
/* Get actual TRANSDATA */
GStr.pGlyphOut = tempBuf;
if (!pUFObj->pfnGetInfo(pUFObj,
UFO_GETINFO_GLYPHSTRING, &GStr, 0, NULL))
{
ERR(("GetInfo failed.\n"));
// NTRAID#NTBUG9-751233-2002/12/05-yasuho-: Memory leak in pagesres.dll
MemFree(tempBuf);
}
pTrans = (PTRANSDATA)GStr.pGlyphOut;
#ifdef FONTPOS
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600 )
ESC_Y_ABS[5] = 0x25;
// ntbug9#406475: Font printed the different position.
if((pOEM->wYPos - pOEM->wFontHeight) >= 0)
{
ESC_Y_ABS[6] = HIBYTE((pOEM->wYPos - pOEM->wFontHeight));
ESC_Y_ABS[7] = LOBYTE((pOEM->wYPos - pOEM->wFontHeight));
WRITESPOOLBUF(pdevobj, ESC_Y_ABS, 8);
}
#endif //FONTPOS
bVFont = BVERTFONT(pUFObj);
bDBChar = FALSE;
for(i = 0; i < dwCount; i++, pTrans++)
{
switch((pTrans->ubType & MTYPE_FORMAT_MASK))
{
case MTYPE_DIRECT: // SBCS character
if (bVFont && bDBChar)
{
WRITESPOOLBUF(pdevobj, ESC_VERT_OFF, sizeof(ESC_VERT_OFF));
bDBChar = FALSE;
}
WRITESPOOLBUF(pdevobj, &pTrans->uCode.ubCode, 1);
break;
case MTYPE_PAIRED: // DBCS character
if (bVFont && !bDBChar)
{
WRITESPOOLBUF(pdevobj, ESC_VERT_ON, sizeof(ESC_VERT_ON));
bDBChar = TRUE;
}
WRITESPOOLBUF(pdevobj, pTrans->uCode.ubPairs, 2);
break;
case MTYPE_COMPOSE:
if (bVFont && bDBChar)
{
WRITESPOOLBUF(pdevobj, ESC_VERT_OFF, sizeof(ESC_VERT_OFF));
bDBChar = FALSE;
}
pTemp = (BYTE *)(GStr.pGlyphOut) + pTrans->uCode.sCode;
// first two bytes are the length of the string
wLen = *pTemp + (*(pTemp + 1) << 8);
pTemp += 2;
WRITESPOOLBUF(pdevobj, pTemp, wLen);
}
}
if (bDBChar)
{
WRITESPOOLBUF(pdevobj, ESC_VERT_OFF, sizeof(ESC_VERT_OFF));
}
MemFree(tempBuf);
return TRUE;
}
BOOL APIENTRY OEMFilterGraphics(
PDEVOBJ pdevobj, // Points to private data required by the Unidriver.dll
PBYTE pBuf, // points to buffer of graphics data
DWORD dwLen) // length of buffer in bytes
{
DWORD dwCompLen;
LONG lHorzPixel;
DWORD dwLength; // Let's use a temporary LEN
PBYTE pCompImage;
POEM_EXTRADATA pOEM;
BYTE ESC_ESX86[] = "\x1B\x7E\x86\x00\x00\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01";
// #291170: Image data is not printed partly
LONG li, lHeightPixel, lPixels, lBytes, lRemain, lSize;
PBYTE pTemp;
BYTE ESC_Y_REL[] = "\x1b\x7e\x1d\x00\x03\x06\x00\x00";
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (NULL == pdevobj || NULL == pBuf || 0 == dwLen)
{
ERR(("OEMFilterGraphics: Invalid parameter(s).\n"));
return FALSE;
}
pOEM = (POEM_EXTRADATA)(pdevobj->pOEMDM);
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for null pointers
if (NULL == pOEM)
{
ERR(("OEMFilterGraphics: pdevobj->pOEMDM = 0.\n"));
return FALSE;
}
if(!pOEM->fCallback)
{
WRITESPOOLBUF(pdevobj, pBuf, dwLen);
return TRUE;
}
if(!(pCompImage = (BYTE *)MemAllocZ(MAXIMGSIZE)))
{
ERR(("Memory alloc error\n"));
return FALSE;
}
// #291170: Image data is not printed partly
// Sent 'SendBlock' command separately if necessary.
#define RLE_THRESH (MAXIMGSIZE / 2 - 2) // threshold for RLE should success
/*_ Calculate i-axis direction size of the iage ISIZ */
lBytes = pOEM->lWidthBytes;
lHorzPixel = lBytes * 8;
lHeightPixel = pOEM->lHeightPixels;
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
ESC_ESX86[5] = (pOEM->wCurrentRes == 300 ? 0x10 : 0x40);
pTemp = pBuf;
lRemain = lBytes * lHeightPixel;
li = 0;
while (li < lHeightPixel) {
// NTRAID#NTBUG9-581703-2002/03/19-v-sueyas-: Check for deviding by zero
if (0 == lBytes)
{
ERR(("OEMFilterGraphics: pOEM->lWidthBytes = 0.\n"));
// NTRAID#NTBUG9-751233-2002/12/05-yasuho-: Memory leak in pagesres.dll
MemFree(pCompImage);
return FALSE;
}
/*_ Compress image data using Byte Run Length Algorithm */
lPixels = min(lRemain, RLE_THRESH) / lBytes;
lSize = lBytes * lPixels;
dwCompLen = RL_ECmd(pTemp, pCompImage, lSize, MAXIMGSIZE);
pTemp += lSize;
lRemain -= lSize;
li += lPixels;
/*_ Set ISIZ of ESX86 command */
ESC_ESX86[17] = HIBYTE(lHorzPixel);
ESC_ESX86[18] = LOBYTE(lHorzPixel);
ESC_ESX86[21] = HIBYTE(lPixels);
ESC_ESX86[22] = LOBYTE(lPixels);
/*_ Add parameter length to the data length after compression */
dwLength = dwCompLen + 18;
/*_ Set LEN of ESX86 command */
ESC_ESX86[3] = HIBYTE(dwLength);
ESC_ESX86[4] = LOBYTE(dwLength);
/*_ Output ESX86 command */
WRITESPOOLBUF(pdevobj, (PBYTE)ESC_ESX86, 23);
/*_ Output compressed data */
WRITESPOOLBUF(pdevobj, pCompImage, dwCompLen);
/* Move Y position to the next graphics portion */
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
ESC_Y_REL[5] = 0x26;
dwLength = lPixels * pOEM->wUnit; // Convert to MasterUnit
ESC_Y_REL[6] = HIBYTE(dwLength);
ESC_Y_REL[7] = LOBYTE(dwLength);
WRITESPOOLBUF(pdevobj, ESC_Y_REL, 8);
}
MemFree(pCompImage);
return TRUE;
}
/*****************************************************************************/
/* */
/* INT APIENTRY OEMCommandCallback( */
/* PDEVOBJ pdevobj */
/* DWORD dwCmdCbId */
/* DWORD dwCount */
/* PDWORD pdwParams */
/* */
/*****************************************************************************/
INT APIENTRY
OEMCommandCallback(
PDEVOBJ pdevobj, // Points to private data required by the Unidriver.dll
DWORD dwCmdCbId, // Callback ID
DWORD dwCount, // Counts of command parameter
PDWORD pdwParams) // points to values of command params
{
POEM_EXTRADATA pOEM;
WORD wTemp =0;
// #278517: Support RectFill
WORD wUnit;
BYTE ESC_X_ABS_NP[] = "\x1b\x7e\x1c\x00\x03\x25\x00\x00";
BYTE ESC_X_REL_NP[] = "\x1b\x7e\x1c\x00\x03\x26\x00\x00";
BYTE ESC_Y_ABS[] = "\x1b\x7e\x1d\x00\x03\x05\x00\x00";
BYTE ESC_Y_REL[] = "\x1b\x7e\x1d\x00\x03\x06\x00\x00";
// #278517: RectFill
BYTE ESC_RECT_FILL[] =
"\x1b\x7e\x32\x00\x08\x80\x40\x00\x02\x00\x00\x00\x00";
BYTE ESC_BEGIN_RECT[] =
"\x1b\x7e\x52\x00\x06\x00\x00\x17\x70\x17\x70";
BYTE ESC_END_RECT[] =
"\x1b\x7e\x52\x00\x06\x00\x00\x38\x40\x38\x40";
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (NULL == pdevobj)
{
ERR(("OEMCommandCallback: Invalid parameter(s).\n"));
return 0;
}
pOEM = (POEM_EXTRADATA)(pdevobj->pOEMDM);
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for null pointers
if (NULL == pOEM)
{
ERR(("OEMCommandCallback: pdevobj->pOEMDM = 0.\n"));
return 0;
}
switch(dwCmdCbId)
{
case GRXFILTER_ON:
pOEM->fCallback = TRUE;
break;
case CMD_SELECT_RES_300:
pOEM->wCurrentRes = 300;
pOEM->wUnit = 2;
break;
case CMD_SELECT_RES_600:
pOEM->wCurrentRes = 600;
pOEM->wUnit = 1;
break;
// #278517: Support RectFill
case CMD_SELECT_RES_240:
pOEM->wCurrentRes = 240;
pOEM->wUnit = 6;
break;
case CMD_SELECT_RES_360:
pOEM->wCurrentRes = 360;
pOEM->wUnit = 4;
break;
case CMD_SEND_BLOCKDATA:
if( !pdwParams || dwCount != 2)
break;
pOEM->fCallback = TRUE;
pOEM->lHeightPixels = (LONG)PARAM(pdwParams, 0);
pOEM->lWidthBytes = (LONG)PARAM(pdwParams, 1);
break;
case CURSOR_Y_ABS_MOVE:
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (dwCount < 1 || !pdwParams)
break;
wTemp = (WORD)*pdwParams;
#ifdef FONTPOS
pOEM->wYPos = wTemp;
#endif
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
ESC_Y_ABS[5] = 0x25;
ESC_Y_ABS[6] = HIBYTE(wTemp);
ESC_Y_ABS[7] = LOBYTE(wTemp);
WRITESPOOLBUF(pdevobj, ESC_Y_ABS, 8);
return (INT)wTemp;
case CURSOR_Y_REL_DOWN:
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (dwCount < 1 || !pdwParams)
break;
wTemp = (WORD)*pdwParams;
#ifdef FONTPOS
pOEM->wYPos += wTemp;
#endif
if(pOEM->wCurrentRes == 300 || pOEM->wCurrentRes == 600)
ESC_Y_REL[5] = 0x26;
ESC_Y_REL[6] = HIBYTE(wTemp);
ESC_Y_REL[7] = LOBYTE(wTemp);
WRITESPOOLBUF(pdevobj, ESC_Y_REL, 8);
return (INT)wTemp;
case CURSOR_X_ABS_MOVE:
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (dwCount < 1 || !pdwParams)
break;
wTemp = (WORD)*pdwParams;
ESC_X_ABS_NP[6] = HIBYTE(wTemp);
ESC_X_ABS_NP[7] = LOBYTE(wTemp);
WRITESPOOLBUF(pdevobj, ESC_X_ABS_NP, 8);
return (INT)wTemp;
case CURSOR_X_REL_RIGHT:
// NTRAID#NTBUG9-581700-2002/03/19-v-sueyas-: Check for illegal parameters
if (dwCount < 1 || !pdwParams)
break;
wTemp = (WORD)*pdwParams;
ESC_X_REL_NP[6] = HIBYTE(wTemp);
ESC_X_REL_NP[7] = LOBYTE(wTemp);
WRITESPOOLBUF(pdevobj, ESC_X_REL_NP, 8);
return (INT)wTemp;
// #278517: RectFill
case CMD_RECT_WIDTH:
pOEM->wRectWidth = (WORD)*pdwParams;
break;
case CMD_RECT_HEIGHT:
pOEM->wRectHeight = (WORD)*pdwParams;
break;
case CMD_RECT_BLACK:
case CMD_RECT_BLACK_2:
//#292316
// ESC_RECT_FILL[6] = 0x60;
ESC_RECT_FILL[7] = 0x00; // Black
goto fill;
case CMD_RECT_WHITE:
case CMD_RECT_WHITE_2:
//#292316
// ESC_RECT_FILL[6] = 0x40;
ESC_RECT_FILL[7] = 0x0F; // White
goto fill;
case CMD_RECT_GRAY:
case CMD_RECT_GRAY_2:
//#292316
// ESC_RECT_FILL[6] = 0x60;
ESC_RECT_FILL[7] = (BYTE)((100 - *pdwParams) * 100 / 1111); // Gray
goto fill;
fill:
if (dwCmdCbId >= CMD_RECT_BLACK_2)
WRITESPOOLBUF(pdevobj, ESC_BEGIN_RECT, 11);
wUnit = pOEM->wUnit ? pOEM->wUnit : 1; // for our safety
//#292316
// wTemp = pOEM->wRectWidth - 1;
wTemp = pOEM->wRectWidth;
wTemp = (WORD)(((LONG)wTemp + wUnit - 1) / wUnit * wUnit);
ESC_RECT_FILL[9] = HIBYTE(wTemp);
ESC_RECT_FILL[10] = LOBYTE(wTemp);
//#292316
// wTemp = pOEM->wRectHeight - 1;
wTemp = pOEM->wRectHeight;
wTemp = (WORD)(((LONG)wTemp + wUnit - 1) / wUnit * wUnit);
ESC_RECT_FILL[11] = HIBYTE(wTemp);
ESC_RECT_FILL[12] = LOBYTE(wTemp);
WRITESPOOLBUF(pdevobj, ESC_RECT_FILL, 13);
if (dwCmdCbId >= CMD_RECT_BLACK_2)
WRITESPOOLBUF(pdevobj, ESC_END_RECT, 11);
break;
default:
break;
}
return 0;
}
|
zoidbergwill/rumors-line-bot | src/liff/lib.js | <gh_stars>0
import { writable } from 'svelte/store';
import { t } from 'ttag';
const params = new URLSearchParams(location.search);
/**
* Boolean value indicating if we are in the middle of LIFF redirect.
* Ref: https://www.facebook.com/groups/linebot/permalink/2380490388948200/?comment_id=2380868955577010
*/
export const isDuringLiffRedirect = !!params.get('liff.state');
/**
* Current page. Initialized from URL param.
*/
export const page = writable(params.get('p'));
/**
* Original JWT token from URL param.
*/
const urlToken = params.get('token');
/**
* Usage: gql`query {...}`(variables)
*
* @returns {(variables: object): Promise<object>}
*/
export const gql = (query, ...substitutions) => variables => {
const queryAndVariable = {
query: String.raw(query, ...substitutions),
};
if (variables) queryAndVariable.variables = variables;
let status;
let lineIDToken;
if (!urlToken) {
lineIDToken = liff.getIDToken();
if (!lineIDToken) return Promise.reject('gql Error: token not set.');
}
const token = urlToken ? `Bearer ${urlToken}` : `line ${lineIDToken}`;
return fetch('/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: token,
},
body: JSON.stringify(queryAndVariable),
})
.then(r => {
status = r.status;
return r.json();
})
.then(resp => {
if (status === 400) {
throw new Error(
`GraphQL Error: ${resp.errors
.map(({ message }) => message)
.join('\n')}`
);
}
if (resp.errors) {
// When status is 200 but have error, just print them out.
console.error('GraphQL operation contains error:', resp.errors);
rollbar.error(
'GraphQL error',
{ body: JSON.stringify(queryAndVariable) },
{ resp }
);
}
return resp;
});
};
/**
* Prevent users from proceeding with external browsers.
* Useful when the following process involves functions only available within LINE client,
* such as invoking `liff.sendMessage()`.
*/
export const assertInClient = () => {
if (DEBUG_LIFF) {
return;
}
if (!liff.isInClient()) {
alert(
t`Sorry, the function is not applicable on desktop.` +
'\n' +
t`Please proceed on your mobile phone.` +
' 📲 '
);
liff.closeWindow();
}
};
/**
* Checks if still in the same search session.
* This checks URL token for expiracy and try retrieving sessionId from GraphQL server.
*
* Closes LIFF when GraphQL server rejects.
*/
export const assertSameSearchSession = async () => {
if (!urlToken) {
alert(t`Cannot get token from URL`);
liff.closeWindow();
return;
}
const parsedToken = urlToken
? JSON.parse(atob(urlToken.split('.')[1]))
: null;
if ((parsedToken.exp || -Infinity) < Date.now() / 1000) {
alert(t`Sorry, the button is expired.`);
liff.closeWindow();
return;
}
const { data, errors } = await gql`
query CheckSessionId {
context {
data {
sessionId
}
}
}
`();
if (errors && errors[0].message === 'Invalid authentication header') {
alert(t`This button was for previous search and is now expired.`);
liff.closeWindow();
return;
}
if (
!data ||
!data.context ||
!data.context.data ||
!data.context.data.sessionId
) {
alert(
/* t: In LIFF, should not happen */ t`Unexpected error, no search session data is retrieved.`
);
liff.closeWindow();
return;
}
};
/**
* @param {string[]} articleIds
* @returns {Article} Article object from Cofacts API
*/
export const getArticlesFromCofacts = async articleIds => {
if (articleIds.length === 0) return [];
const variables = articleIds.reduce((agg, articleId, idx) => {
agg[`a${idx}`] = articleId;
return agg;
}, {});
const variableKeys = Object.keys(variables);
const query = `
query GetArticlesLinkedToUser(
${variableKeys.map(k => `$${k}: String!`).join('\n')}
) {
${variableKeys
.map(
k =>
`${k}: GetArticle(id: $${k}) {
id
text
articleReplies(status: NORMAL) {
createdAt
}
}`
)
.join('\n')}
}
`;
let status;
return fetch(COFACTS_API_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-app-id': APP_ID,
},
body: JSON.stringify({ query, variables }),
})
.then(r => {
status = r.status;
return r.json();
})
.then(resp => {
if (status === 400) {
throw new Error(
`getArticlesFromCofacts Error: ${resp.errors
.map(({ message }) => message)
.join('\n')}`
);
}
if (resp.errors) {
// When status is 200 but have error, just print them out.
console.error(
'getArticlesFromCofacts operation contains error:',
resp.errors
);
rollbar.error(
'getArticlesFromCofacts error',
{ body: JSON.stringify({ query, variables }) },
{ resp }
);
}
return variableKeys.map(key => resp.data[key]);
});
};
/**
* @param {Object} messages
*/
export const sendMessages = async messages => {
try {
await liff.sendMessages(messages);
} catch (e) {
if (e.code == 403) {
alert(
t`Please retry and allow the permission 'send messages to chats', so that you can interact with chatbot while clicking the buttons.`
);
} else {
alert(e);
throw e;
}
}
};
|
DeckieHQ/deckie-api | spec/requests/events/delete_spec.rb | <reponame>DeckieHQ/deckie-api
require 'rails_helper'
RSpec.describe 'Event delete', :type => :request do
let(:event) { FactoryGirl.create(:event) }
before do
delete event_path(event), headers: json_headers
end
it_behaves_like 'an action requiring authentication'
context 'when user is authenticated' do
context 'when event belongs to the user' do
let(:authenticate) { event.host.user }
it { is_expected.to return_status_code 204 }
it 'deletes the event' do
expect(Event.find_by(id: event.id)).to be_nil
end
it { is_expected.to have_created_action(authenticate.profile, event, 'cancel') }
context 'when event is closed' do
let(:event) { FactoryGirl.create(:event_closed) }
it { is_expected.to return_authorization_error(:event_closed) }
it "doesn't delete the event" do
expect(event.reload).to be_persisted
end
end
end
context "when event doesn't exists" do
let(:event) { { id: 0 } }
let(:authenticate) { FactoryGirl.create(:user) }
it { is_expected.to return_not_found }
end
context "when event doesn't belong to the user" do
let(:authenticate) { FactoryGirl.create(:user) }
it { is_expected.to return_forbidden }
it "doesn't destroy the event" do
expect(event.reload).to be_persisted
end
end
end
end
|
aavcc/taiga-openshift | tests/integration/resources_permissions/test_webhooks_resources.py | <filename>tests/integration/resources_permissions/test_webhooks_resources.py
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 <NAME> <<EMAIL>>
# Copyright (C) 2014-2017 <NAME> <<EMAIL>>
# Copyright (C) 2014-2017 <NAME> <<EMAIL>>
# Copyright (C) 2014-2017 <NAME> <<EMAIL>>
# Copyright (C) 2014-2017 <NAME> <<EMAIL>>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from taiga.projects import choices as project_choices
from taiga.webhooks.serializers import WebhookSerializer
from taiga.webhooks.models import Webhook
from tests import factories as f
from tests.utils import helper_test_http_method, disconnect_signals, reconnect_signals
from unittest import mock
import pytest
pytestmark = pytest.mark.django_db
def setup_module(module):
disconnect_signals()
def teardown_module(module):
reconnect_signals()
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.project_owner = f.UserFactory.create()
m.project1 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner)
m.project2 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner)
m.blocked_project = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
blocked_code=project_choices.BLOCKED_BY_STAFF)
f.MembershipFactory(project=m.project1,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.blocked_project,
user=m.project_owner,
is_admin=True)
m.webhook1 = f.WebhookFactory(project=m.project1)
m.webhooklog1 = f.WebhookLogFactory(webhook=m.webhook1)
m.webhook2 = f.WebhookFactory(project=m.project2)
m.webhooklog2 = f.WebhookLogFactory(webhook=m.webhook2)
m.blocked_webhook = f.WebhookFactory(project=m.blocked_project)
m.blocked_webhooklog = f.WebhookLogFactory(webhook=m.blocked_webhook)
return m
def test_webhook_retrieve(client, data):
url1 = reverse('webhooks-detail', kwargs={"pk": data.webhook1.pk})
url2 = reverse('webhooks-detail', kwargs={"pk": data.webhook2.pk})
blocked_url = reverse('webhooks-detail', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'get', url1, None, users)
assert results == [401, 403, 200]
results = helper_test_http_method(client, 'get', url2, None, users)
assert results == [401, 403, 403]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 200]
def test_webhook_update(client, data):
url1 = reverse('webhooks-detail', kwargs={"pk": data.webhook1.pk})
url2 = reverse('webhooks-detail', kwargs={"pk": data.webhook2.pk})
blocked_url = reverse('webhooks-detail', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
webhook_data = WebhookSerializer(data.webhook1).data
webhook_data["key"] = "test"
webhook_data = json.dumps(webhook_data)
results = helper_test_http_method(client, 'put', url1, webhook_data, users)
assert results == [401, 403, 200]
webhook_data = WebhookSerializer(data.webhook2).data
webhook_data["key"] = "test"
webhook_data = json.dumps(webhook_data)
results = helper_test_http_method(client, 'put', url2, webhook_data, users)
assert results == [401, 403, 403]
webhook_data = WebhookSerializer(data.blocked_webhook).data
webhook_data["key"] = "test"
webhook_data = json.dumps(webhook_data)
results = helper_test_http_method(client, 'put', blocked_url, webhook_data, users)
assert results == [401, 403, 451]
def test_webhook_delete(client, data):
url1 = reverse('webhooks-detail', kwargs={"pk": data.webhook1.pk})
url2 = reverse('webhooks-detail', kwargs={"pk": data.webhook2.pk})
blocked_url = reverse('webhooks-detail', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'delete', url1, None, users)
assert results == [401, 403, 204]
results = helper_test_http_method(client, 'delete', url2, None, users)
assert results == [401, 403, 403]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 451]
def test_webhook_list(client, data):
url = reverse('webhooks-list')
response = client.get(url)
webhooks_data = json.loads(response.content.decode('utf-8'))
assert len(webhooks_data) == 0
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
webhooks_data = json.loads(response.content.decode('utf-8'))
assert len(webhooks_data) == 0
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
webhooks_data = json.loads(response.content.decode('utf-8'))
assert len(webhooks_data) == 2
assert response.status_code == 200
def test_webhook_create(client, data):
url = reverse('webhooks-list')
users = [
None,
data.registered_user,
data.project_owner
]
create_data = json.dumps({
"name": "Test",
"url": "http://test.com",
"key": "test",
"project": data.project1.pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users, lambda: Webhook.objects.all().delete())
assert results == [401, 403, 201]
create_data = json.dumps({
"name": "Test",
"url": "http://test.com",
"key": "test",
"project": data.project2.pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users, lambda: Webhook.objects.all().delete())
assert results == [401, 403, 403]
create_data = json.dumps({
"name": "Test",
"url": "http://test.com",
"key": "test",
"project": data.blocked_project.pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users, lambda: Webhook.objects.all().delete())
assert results == [401, 403, 451]
def test_webhook_patch(client, data):
url1 = reverse('webhooks-detail', kwargs={"pk": data.webhook1.pk})
url2 = reverse('webhooks-detail', kwargs={"pk": data.webhook2.pk})
blocked_url = reverse('webhooks-detail', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
patch_data = json.dumps({"key": "test"})
results = helper_test_http_method(client, 'patch', url1, patch_data, users)
assert results == [401, 403, 200]
patch_data = json.dumps({"key": "test"})
results = helper_test_http_method(client, 'patch', url2, patch_data, users)
assert results == [401, 403, 403]
patch_data = json.dumps({"key": "test"})
results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users)
assert results == [401, 403, 451]
def test_webhook_action_test(client, data):
url1 = reverse('webhooks-test', kwargs={"pk": data.webhook1.pk})
url2 = reverse('webhooks-test', kwargs={"pk": data.webhook2.pk})
blocked_url = reverse('webhooks-test', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock:
_send_request_mock.return_value = data.webhooklog1
results = helper_test_http_method(client, 'post', url1, None, users)
assert results == [404, 404, 200]
assert _send_request_mock.called is True
with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock:
_send_request_mock.return_value = data.webhooklog1
results = helper_test_http_method(client, 'post', url2, None, users)
assert results == [404, 404, 404]
assert _send_request_mock.called is False
with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock:
_send_request_mock.return_value = data.webhooklog1
results = helper_test_http_method(client, 'post', blocked_url, None, users)
assert results == [404, 404, 451]
assert _send_request_mock.called is False
def test_webhooklogs_list(client, data):
url = reverse('webhooklogs-list')
response = client.get(url)
webhooklogs_data = json.loads(response.content.decode('utf-8'))
assert len(webhooklogs_data) == 0
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
webhooklogs_data = json.loads(response.content.decode('utf-8'))
assert len(webhooklogs_data) == 0
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
webhooklogs_data = json.loads(response.content.decode('utf-8'))
assert len(webhooklogs_data) == 2
assert response.status_code == 200
def test_webhooklogs_retrieve(client, data):
url1 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog1.pk})
url2 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog2.pk})
blocked_url = reverse('webhooks-detail', kwargs={"pk": data.blocked_webhook.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'get', url1, None, users)
assert results == [401, 403, 200]
results = helper_test_http_method(client, 'get', url2, None, users)
assert results == [401, 403, 403]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 200]
def test_webhooklogs_create(client, data):
url1 = reverse('webhooklogs-list')
url2 = reverse('webhooklogs-list')
blocked_url = reverse('webhooklogs-list')
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'post', url1, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'post', url2, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'post', blocked_url, None, users)
assert results == [405, 405, 405]
def test_webhooklogs_delete(client, data):
url1 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog1.pk})
url2 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog2.pk})
blocked_url = reverse('webhooklogs-detail', kwargs={"pk": data.blocked_webhooklog.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'delete', url1, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'delete', url2, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [405, 405, 405]
def test_webhooklogs_update(client, data):
url1 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog1.pk})
url2 = reverse('webhooklogs-detail', kwargs={"pk": data.webhooklog2.pk})
blocked_url = reverse('webhooklogs-detail', kwargs={"pk": data.blocked_webhooklog.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'put', url1, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'put', url2, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'put', blocked_url, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'patch', url1, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'patch', url2, None, users)
assert results == [405, 405, 405]
results = helper_test_http_method(client, 'patch', blocked_url, None, users)
assert results == [405, 405, 405]
def test_webhooklogs_action_resend(client, data):
url1 = reverse('webhooklogs-resend', kwargs={"pk": data.webhooklog1.pk})
url2 = reverse('webhooklogs-resend', kwargs={"pk": data.webhooklog2.pk})
blocked_url = reverse('webhooklogs-resend', kwargs={"pk": data.blocked_webhooklog.pk})
users = [
None,
data.registered_user,
data.project_owner
]
results = helper_test_http_method(client, 'post', url1, None, users)
assert results == [404, 404, 200]
results = helper_test_http_method(client, 'post', url2, None, users)
assert results == [404, 404, 404]
results = helper_test_http_method(client, 'post', blocked_url, None, users)
assert results == [404, 404, 451]
|
stephandruskat/hexatomic | tests/org.corpus_tools.hexatomic.core.tests/src/main/java/org/corpus_tools/hexatomic/core/DummySync.java | <reponame>stephandruskat/hexatomic
package org.corpus_tools.hexatomic.core;
import org.eclipse.e4.ui.di.UISynchronize;
public final class DummySync extends UISynchronize {
@Override
public void syncExec(Runnable runnable) {
runnable.run();
}
@Override
public void asyncExec(Runnable runnable) {
runnable.run();
}
@Override
protected boolean isUIThread(Thread thread) {
return false;
}
@Override
protected void showBusyWhile(Runnable runnable) {
// Not implemented in dummy
}
@Override
protected boolean dispatchEvents() {
return false;
}
}
|
Histler/Infodota | app/src/main/java/com/badr/infodota/cosmetic/api/price/ItemClass.java | <gh_stars>1-10
package com.badr.infodota.cosmetic.api.price;
import java.io.Serializable;
/**
* User: ABadretdinov
* Date: 31.03.14
* Time: 16:26
*/
public class ItemClass implements Serializable {
private String name;
private String value;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
|
HyunWooBro/Capitalism-Online | CapitalismMobile/src_core/core/framework/graphics/batch/Batch.java | <filename>CapitalismMobile/src_core/core/framework/graphics/batch/Batch.java
package core.framework.graphics.batch;
import core.framework.graphics.Color4;
import core.framework.graphics.Form;
import core.framework.graphics.texture.Texture;
import core.framework.graphics.texture.TextureRegion;
import core.framework.graphics.utils.ShaderProgram;
import core.math.Matrix3;
import core.math.Matrix4;
import core.utils.Disposable;
/**
* 렌더링의 과정에서 성능의 이점을 얻기 위해 한번에 batch 처리를 하는 것이 기본적이다.</p>
*
* 이 인터페이스는 텍스쳐의 영역을 batch 렌더링하기 위한 기본 메서드의 원형을 정의한다.</p>
*
* @author 김현우
*/
public interface Batch extends Disposable {
/**
* 배치 렌더링을 시작한다. 배치 렌더링을 이미 시작한 경우 {@link #end()}를 호출하기
* 전까지 다시 호출할 수 없다.
*/
public void begin();
/**
* 배치 렌더링을 종료한다. {@link #begin()}을 호출하기 전에 이 메서드를 호출할 수 없다.
*/
public void end();
/**
* 축적했던 버텍스관련 데이터를 바탕으로 실제로 렌더링을 한다.
*/
public void flush();
public float getAlpha();
public void setAlpha(float alpha);
public Color4 getColor();
public void setColor(Color4 color);
public void setColor(float a, float r, float g, float b);
public void setColor(int a, int r, int g, int b);
public void setColor(int color);
public void draw(Texture texture, float srcX, float srcY, float srcWidth, float srcHeight,
float dstX, float dstY, float dstWidth, float dstHeight);
public void draw(Texture texture, float srcX, float srcY, float srcWidth, float srcHeight,
float dstX, float dstY, float dstWidth, float dstHeight, boolean flipX, boolean flipY);
public void draw(TextureRegion textureRegion, float dstX, float dstY);
public void draw(TextureRegion textureRegion, float dstX, float dstY,
float dstWidth, float dstHeight);
public void draw(TextureRegion textureRegion, float dstX,
float dstY, float dstWidth, float dstHeight, boolean flipX, boolean flipY);
public void draw(TextureRegion textureRegion, float dstX,
float dstY, float dstWidth, float dstHeight, boolean flipX, boolean flipY, boolean clockwise);
public void draw(TextureRegion textureRegion, Form dstForm);
/**
* 변환 매트릭스를 스택에 넣는다. 레퍼런스를 스택에 넣기 때문에 외부에서 수정하면
* 영향을 받는다.
*/
public void pushTransformMatrix(Matrix3 matrix);
public Matrix3 peekTransformMatrix();
public Matrix3 popTransformMatrix();
public void pushTransformColor(Color4 color);
public Color4 peekTransformColor();
public Color4 popTransformColor();
public Matrix4 getProjectionMatrix();
public void setProjectionMatrix(Matrix4 projectionMatrix);
public boolean isBlendEnabled();
public void setBlendEnabled(boolean blendEnabled);
public int getBlendSrcFactor();
public int getBlendDstFactor();
public void setBlendFunc(int blendSrcFactor, int blendDstFactor);
public ShaderProgram getShaderProgram();
public void setShaderProgram(ShaderProgram program);
public int getMaxSprites();
public int getSpriteCount();
} |
NoNews/NoPaginate | sample/src/main/java/ru/alexbykov/pagination/utils/MockUtils.java | <reponame>NoNews/NoPaginate<gh_stars>100-1000
package ru.alexbykov.pagination.utils;
import android.os.Handler;
import java.util.ArrayList;
import java.util.List;
/**
* Created by <NAME> on 14.08.2017.
* You can contact me at: <EMAIL>.
*/
public class MockUtils {
private MockUtils() {
}
public static List<Integer> getMockItems() {
final List<Integer> list = new ArrayList<>();
for (int i = 0; i < 20; i++) {
list.add(i);
}
return list;
}
public static void mockHttpRequest(final NetworkCallback networkCallback) {
new Handler().postDelayed(() -> {
if (RandomUtils.getRandomBoolean()) {
networkCallback.onSuccess();
}
else networkCallback.onError();
}, 2000);
}
public interface NetworkCallback {
void onSuccess();
void onError();
}
}
|
Clayful/clayful-js | lib/models-js/store.js | const assign = require('../util/assign');
module.exports = request => {
const Store = {
name: 'Store',
path: 'store',
get: function() {
return request(assign(Store._get(), { args: Array.prototype.slice.call(arguments) }));
},
};
Store._get = function() {
return {
modelName: Store.name,
methodName: 'get',
httpMethod: 'GET',
path: '/v1/store',
params: [],
};
};
return Store;
}; |
joeyudongs/UoloNet | Uolo-Net/UNet/lambda_rad/static/dwv/io/memoryLoader.js | <reponame>joeyudongs/UoloNet<filename>Uolo-Net/UNet/lambda_rad/static/dwv/io/memoryLoader.js
// namespaces
var dwv = dwv || {};
dwv.io = dwv.io || {};
/**
* Memory loader.
* @constructor
*/
dwv.io.MemoryLoader = function ()
{
/**
* Closure to self.
* @private
* @type Object
*/
var self = this;
/**
* Launched loader (used in abort).
* @private
* @type Object
*/
var runningLoader = null;
/**
* Number of data to load.
* @private
* @type Number
*/
var nToLoad = 0;
/**
* Number of loaded data.
* @private
* @type Number
*/
var nLoaded = 0;
/**
* The default character set (optional).
* @private
* @type String
*/
var defaultCharacterSet;
/**
* Get the default character set.
* @return {String} The default character set.
*/
this.getDefaultCharacterSet = function () {
return defaultCharacterSet;
};
/**
* Set the default character set.
* @param {String} characterSet The character set.
*/
this.setDefaultCharacterSet = function (characterSet) {
defaultCharacterSet = characterSet;
};
/**
* Store a launched loader.
* @param {Object} loader The launched loader.
*/
this.storeLoader = function (loader) {
runningLoader = loader;
};
/**
* Clear the stored loader.
*/
this.clearStoredLoader = function () {
runningLoader = null;
};
/**
* Abort a memory load.
*/
this.abort = function () {
// abort loader
runningLoader.abort();
this.clearStoredLoaders();
};
/**
* Set the number of data to load.
* @param {Number} n The number of data to load.
*/
this.setNToLoad = function (n) {
nToLoad = n;
};
/**
* Increment the number of loaded data
* and call onloadend if loaded all data.
*/
this.addLoaded = function () {
nLoaded++;
if ( nLoaded === nToLoad ) {
self.onloadend();
}
};
}; // class Memory
/**
* Handle a load event.
* @param {Object} event The load event, 'event.target'
* should be the loaded data.
* Default does nothing.
*/
dwv.io.MemoryLoader.prototype.onload = function (/*event*/) {};
/**
* Handle a load end event.
* Default does nothing.
*/
dwv.io.MemoryLoader.prototype.onloadend = function () {};
/**
* Handle a progress event.
* @param {Object} event The progress event.
* Default does nothing.
*/
dwv.io.MemoryLoader.prototype.onprogress = function (/*event*/) {};
/**
* Handle an error event.
* @param {Object} event The error event with an
* optional 'event.message'.
* Default does nothing.
*/
dwv.io.MemoryLoader.prototype.onerror = function (/*event*/) {};
/**
* Handle an abort event.
* @param {Object} event The abort event with an
* optional 'event.message'.
* Default does nothing.
*/
dwv.io.MemoryLoader.prototype.onabort = function (/*event*/) {};
/**
* Load a list of buffers.
* @param {Array} ioArray The list of buffers to load.
*/
dwv.io.MemoryLoader.prototype.load = function (ioArray)
{
// clear storage
this.clearStoredLoader();
// closure to self for handlers
var self = this;
// set the number of data to load
this.setNToLoad( ioArray.length );
var mproghandler = new dwv.utils.MultiProgressHandler(self.onprogress);
mproghandler.setNToLoad( ioArray.length );
// get loaders
var loaders = [];
for (var m = 0; m < dwv.io.loaderList.length; ++m) {
loaders.push( new dwv.io[dwv.io.loaderList[m]]() );
}
// set loaders callbacks
var loader = null;
for (var k = 0; k < loaders.length; ++k) {
loader = loaders[k];
loader.onload = self.onload;
loader.onloadend = self.addLoaded;
loader.onerror = self.onerror;
loader.onabort = self.onabort;
loader.setOptions({
'defaultCharacterSet': this.getDefaultCharacterSet()
});
loader.onprogress = mproghandler.getUndefinedMonoProgressHandler(1);
}
// loop on I/O elements
for (var i = 0; i < ioArray.length; ++i)
{
var iodata = ioArray[i];
// find a loader
var foundLoader = false;
for (var l = 0; l < loaders.length; ++l) {
loader = loaders[l];
if (loader.canLoadUrl(iodata.filename)) {
foundLoader = true;
// store loader
this.storeLoader(loader);
// read
loader.load(iodata.data, iodata.filename, i);
// next file
break;
}
}
// TODO: throw?
if (!foundLoader) {
throw new Error("No loader found for file: "+iodata.filename);
}
}
};
|
tysm/cpsols | codeforces/contests/round/480-div2/b.cpp | <filename>codeforces/contests/round/480-div2/b.cpp
#include <cpplib/stdinc.hpp>
int32_t main(){
desync();
int n, k;
cin >> n >> k;
char ans[4][n];
for(int i=0; i<n; ++i){
for(int j=0; j<4; ++j)
ans[j][i] = '.';
}
int i = 1;
while(k > 1 and i <= 2){
for(int j=1; j<n/2 and k>1; ++j){
ans[i][j] = ans[i][n-j-1] = '#';
k -= 2;
}
i++;
}
for(i=1; i<=2 and k>0; ++i, k--)
ans[i][n/2] = '#';
cout << "YES" << endl;
for(int i=0; i<4; ++i){
for(int j=0; j<n; ++j)
cout << ans[i][j];
cout << endl;
}
return 0;
}
|
jschwindt/Venganzas-del-Pasado | app/views/torrents/index.rss.builder | <reponame>jschwindt/Venganzas-del-Pasado
xml.instruct! :xml, :version => '1.0'
xml.rss :version => "2.0", 'xmlns:atom' => "http://www.w3.org/2005/Atom" do
xml.channel do
xml.title 'Venganzas del Pasado - Torrents de programas recientes'
xml.description 'Torrents de audios de La Venganza será Terrible'
xml.link root_url
for torrent in @torrents
xml.item do
xml.title torrent.post.title
xml.pubDate torrent.post.created_at.rfc822
xml.link torrent.torrent_url
xml.guid post_url(torrent.post), :isPermaLink => "true"
xml.enclosure :url => torrent.torrent_url, :length => 1974, :type => "application/x-bittorrent"
end
end
end
end
|
devon-ye/demos-parent | framework/spring-app/src/main/java/org/devon/spring/bean/autowiring/BeanAutoWiringDao.java | package org.devon.spring.bean.autowiring;
/**
* Created by lenovo on 2017/12/5.
*/
public class BeanAutoWiringDao {
public void print() {
System.out.println("print() method say:BeanAutoWiringDao object is autowiring!");
}
}
|
AshlynMarie22/Test | client/src/context/AlertContext.js | import React from "react";
const AlertContext = React.createContext({
message: "",
type: "",
setAlert: () => {},
});
export default AlertContext;
|
qh4r/nodeFun | nodeschool/functionalJs/p16.js | <gh_stars>0
function getDependencies(tree){
var output = {}, child = [];
if(!tree || !tree.dependencies) return [];
var keys = Object.keys(tree["dependencies"]);
if(keys && keys.length){
keys.forEach(function(key){
output[key+'@'+tree["dependencies"][key].version] = 1;
var child = getDependencies(tree["dependencies"][key]);
if(child && child.length){
child.forEach(function(elem){
output[elem] = 1;
});
}
});
}
return Object.keys(output).sort();
}
module.exports = getDependencies; |
ScalablyTyped/SlinkyTyped | d/dialogflow/src/main/scala/typingsSlinky/dialogflow/mod/google/cloud/dialogflow/v2beta1/DeleteKnowledgeBaseRequest.scala | <filename>d/dialogflow/src/main/scala/typingsSlinky/dialogflow/mod/google/cloud/dialogflow/v2beta1/DeleteKnowledgeBaseRequest.scala
package typingsSlinky.dialogflow.mod.google.cloud.dialogflow.v2beta1
import org.scalablytyped.runtime.StringDictionary
import typingsSlinky.protobufjs.mod.IConversionOptions
import typingsSlinky.protobufjs.mod.Reader
import typingsSlinky.protobufjs.mod.Writer
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/** Represents a DeleteKnowledgeBaseRequest. */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest")
@js.native
/**
* Constructs a new DeleteKnowledgeBaseRequest.
* @param [properties] Properties to set
*/
class DeleteKnowledgeBaseRequest () extends IDeleteKnowledgeBaseRequest {
def this(properties: IDeleteKnowledgeBaseRequest) = this()
/** DeleteKnowledgeBaseRequest force. */
@JSName("force")
var force_DeleteKnowledgeBaseRequest: Boolean = js.native
/** DeleteKnowledgeBaseRequest name. */
@JSName("name")
var name_DeleteKnowledgeBaseRequest: String = js.native
/**
* Converts this DeleteKnowledgeBaseRequest to JSON.
* @returns JSON object
*/
def toJSON(): StringDictionary[js.Any] = js.native
}
object DeleteKnowledgeBaseRequest {
/**
* Creates a new DeleteKnowledgeBaseRequest instance using the specified properties.
* @param [properties] Properties to set
* @returns DeleteKnowledgeBaseRequest instance
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.create")
@js.native
def create(): DeleteKnowledgeBaseRequest = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.create")
@js.native
def create(properties: IDeleteKnowledgeBaseRequest): DeleteKnowledgeBaseRequest = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decode")
@js.native
def decode(reader: js.typedarray.Uint8Array): DeleteKnowledgeBaseRequest = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decode")
@js.native
def decode(reader: js.typedarray.Uint8Array, length: Double): DeleteKnowledgeBaseRequest = js.native
/**
* Decodes a DeleteKnowledgeBaseRequest message from the specified reader or buffer.
* @param reader Reader or buffer to decode from
* @param [length] Message length if known beforehand
* @returns DeleteKnowledgeBaseRequest
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decode")
@js.native
def decode(reader: Reader): DeleteKnowledgeBaseRequest = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decode")
@js.native
def decode(reader: Reader, length: Double): DeleteKnowledgeBaseRequest = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decodeDelimited")
@js.native
def decodeDelimited(reader: js.typedarray.Uint8Array): DeleteKnowledgeBaseRequest = js.native
/**
* Decodes a DeleteKnowledgeBaseRequest message from the specified reader or buffer, length delimited.
* @param reader Reader or buffer to decode from
* @returns DeleteKnowledgeBaseRequest
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.decodeDelimited")
@js.native
def decodeDelimited(reader: Reader): DeleteKnowledgeBaseRequest = js.native
/**
* Encodes the specified DeleteKnowledgeBaseRequest message. Does not implicitly {@link google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.verify|verify} messages.
* @param message DeleteKnowledgeBaseRequest message or plain object to encode
* @param [writer] Writer to encode to
* @returns Writer
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.encode")
@js.native
def encode(message: IDeleteKnowledgeBaseRequest): Writer = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.encode")
@js.native
def encode(message: IDeleteKnowledgeBaseRequest, writer: Writer): Writer = js.native
/**
* Encodes the specified DeleteKnowledgeBaseRequest message, length delimited. Does not implicitly {@link google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.verify|verify} messages.
* @param message DeleteKnowledgeBaseRequest message or plain object to encode
* @param [writer] Writer to encode to
* @returns Writer
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.encodeDelimited")
@js.native
def encodeDelimited(message: IDeleteKnowledgeBaseRequest): Writer = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.encodeDelimited")
@js.native
def encodeDelimited(message: IDeleteKnowledgeBaseRequest, writer: Writer): Writer = js.native
/**
* Creates a DeleteKnowledgeBaseRequest message from a plain object. Also converts values to their respective internal types.
* @param object Plain object
* @returns DeleteKnowledgeBaseRequest
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.fromObject")
@js.native
def fromObject(`object`: StringDictionary[js.Any]): DeleteKnowledgeBaseRequest = js.native
/**
* Creates a plain object from a DeleteKnowledgeBaseRequest message. Also converts values to other types if specified.
* @param message DeleteKnowledgeBaseRequest
* @param [options] Conversion options
* @returns Plain object
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.toObject")
@js.native
def toObject(message: DeleteKnowledgeBaseRequest): StringDictionary[js.Any] = js.native
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.toObject")
@js.native
def toObject(message: DeleteKnowledgeBaseRequest, options: IConversionOptions): StringDictionary[js.Any] = js.native
/**
* Verifies a DeleteKnowledgeBaseRequest message.
* @param message Plain object to verify
* @returns `null` if valid, otherwise the reason why it is not
*/
/* static member */
@JSImport("dialogflow/protos/protos", "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest.verify")
@js.native
def verify(message: StringDictionary[js.Any]): String | Null = js.native
}
|
arkav/open-oryx | src/main/java/dev/arkav/openoryx/game/models/MapInfo.java | <gh_stars>1-10
package dev.arkav.openoryx.game.models;
public class MapInfo {
private String name;
private int height;
private int width;
public MapInfo (String name, int height, int width) {
this.name = name;
this.height = height;
this.width = width;
}
public String getName() {
return name;
}
public int getHeight() {
return height;
}
public int getWidth() {
return width;
}
}
|
mochaaP/ArtPlayer | packages/artplayer/src/contextmenu/aspectRatio.js | import { inverseClass, queryAll } from '../utils';
export default function aspectRatio(option) {
return (art) => {
const { i18n, player } = art;
return {
...option,
html: `${i18n.get('Aspect ratio')}:
<span data-ratio="default" class="art-current">${i18n.get('Default')}</span>
<span data-ratio="4:3">4:3</span>
<span data-ratio="16:9">16:9</span>
`,
click: (contextmenu, event) => {
const { ratio } = event.target.dataset;
if (ratio) {
player.aspectRatio = ratio;
contextmenu.show = false;
}
},
mounted: ($menu) => {
art.on('aspectRatio', (ratio) => {
const $current = queryAll('span', $menu).find((item) => item.dataset.ratio === ratio);
if ($current) {
inverseClass($current, 'art-current');
}
});
},
};
};
}
|
kirchnerlab/libpipe | include/libpipe/ctc/ModificationTimeManager.hpp | <gh_stars>1-10
/*
*
* Copyright (c) 2011 <NAME>
* Copyright (c) 2010 <NAME>
*
* This file is part of libpipe.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef __LIBPIPE_INCLUDE_LIBPIPE_CTC_MODIFICATIONTIMEMANAGER_HPP__
#define __LIBPIPE_INCLUDE_LIBPIPE_CTC_MODIFICATIONTIMEMANAGER_HPP__
#include <libpipe/config.hpp>
#include <libpipe/Request.hpp>
#include <libpipe/ctc/Manager.hpp>
namespace libpipe {
namespace ctc {
/** A manager class that makes use of the modification time information of
* the algoritms it manages.
* @ingroup ctc
*/
class LIBPIPE_EXPORT ModificationTimeManager : public Manager
{
public:
/** Constructor.
*/
ModificationTimeManager();
/** Destructor.
* Virtual, to allow subclassing.
*/
virtual ~ModificationTimeManager();
/** Processes a request, taking into account the current modification time
* of the \c Manager's algorithm.
* @param[in,out] req The request object.
*/
virtual void processRequest(libpipe::Request& req);
};
} // end namespace ctc
} // end namespace libpipe
#endif //__LIBPIPE_INCLUDE_LIBPIPE_CTC_MODIFICATIONTIMEMANAGER_HPP__
|
VIGameStudio/Platut | core/src/com/platut/scripts/PlayerController.java | package com.platut.scripts;
import com.badlogic.ashley.core.Entity;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Contact;
import com.badlogic.gdx.physics.box2d.Fixture;
import com.badlogic.gdx.physics.box2d.WorldManifold;
import com.engine.core.BaseScript;
import com.engine.core.SceneManager;
import com.engine.core.components.ColliderCmp;
import static com.engine.core.Constants.MeterToPixels;
/**
* Created by conor on 18/07/16.
*/
public class PlayerController extends BaseScript {
public PlayerController(SceneManager sceneManager, Entity entity) {
super(sceneManager, entity);
}
OrthographicCamera gameCamera;
ColliderCmp collider;
int direction = 1;
int KEY_LEFT = Input.Keys.LEFT;
int KEY_RIGHT = Input.Keys.RIGHT;
int KEY_JUMP = Input.Keys.SPACE;
final static float MAX_VELOCITY = 15f;
final static float JUMP_VELOCITY = 50f;
boolean jump = false;
boolean grounded = false;
Fixture bodyFixture;
Fixture sensorFixture;
float stillTime = 0;
long lastGroundTime = 0;
@Override
public void start() {
gameCamera = getSceneManager().getCurrentScene().getGameCamera();
gameCamera.position.set(getTransform().position);
collider = getComponent(ColliderCmp.class);
for (Fixture fixture : collider.body.getFixtureList()) {
if (fixture.getUserData().equals("body")) {
bodyFixture = fixture;
} else if (fixture.getUserData().equals("sensor")) {
sensorFixture = fixture;
}
}
}
@Override
public void update(float deltaTime) {
//System.out.println ("grounded: "+grounded);
Vector2 vel = collider.body.getLinearVelocity();
Vector2 pos = collider.body.getPosition();
if (grounded) {
lastGroundTime = System.nanoTime();
} else {
if (System.nanoTime() - lastGroundTime < 100000000) {
grounded = true;
}
}
if (Math.abs(vel.x) > MAX_VELOCITY) {
vel.x = Math.signum(vel.x) * MAX_VELOCITY;
collider.body.setLinearVelocity(vel.x, vel.y);
}
if (!Gdx.input.isKeyPressed(KEY_LEFT) && !Gdx.input.isKeyPressed(KEY_RIGHT)) {
stillTime += Gdx.graphics.getDeltaTime();
collider.body.setLinearVelocity(vel.x * 0.9f, vel.y);
} else {
stillTime = 0;
}
if (!grounded) {
bodyFixture.setFriction(0f);
sensorFixture.setFriction(0f);
} else {
if (!Gdx.input.isKeyPressed(KEY_LEFT) && !Gdx.input.isKeyPressed(KEY_RIGHT) && stillTime > 0.2) {
bodyFixture.setFriction(100f);
sensorFixture.setFriction(100f);
} else {
bodyFixture.setFriction(0.2f);
sensorFixture.setFriction(0.2f);
}
}
if (Gdx.input.isKeyPressed(KEY_LEFT) && vel.x > -MAX_VELOCITY) {
direction = -1;
collider.body.applyLinearImpulse(-2f, 0, pos.x, pos.y, true);
}
if (Gdx.input.isKeyPressed(KEY_RIGHT) && vel.x < MAX_VELOCITY) {
direction = 1;
collider.body.applyLinearImpulse(2f, 0, pos.x, pos.y, true);
}
if (jump) {
jump = false;
collider.body.setLinearVelocity(vel.x, 0);
collider.body.setTransform(pos.x, pos.y + 0.01f, 0);
collider.body.applyLinearImpulse(0, JUMP_VELOCITY, pos.x, pos.y, true);
}
getTransform().position.x = collider.body.getPosition().x * MeterToPixels;
getTransform().position.y = collider.body.getPosition().y * MeterToPixels;
gameCamera.position.lerp(getTransform().position, 0.1f);
}
@Override
public void draw(SpriteBatch batch) {
/*if (collider.body.getLinearVelocity().isZero(1f)) {
idle.setX(getTransform().position.x - 8);
idle.setY(getTransform().position.y - 8);
idle.setScale(direction, 1);
idle.draw(batch);
} else {
run.setX(getTransform().position.x - 8);
run.setY(getTransform().position.y - 8);
run.setScale(direction, 1);
run.draw(batch);
}*/
}
@Override
public boolean keyDown(int keycode) {
if (keycode == KEY_JUMP) jump = true;
return false;
}
@Override
public boolean keyUp(int keycode) {
if (keycode == KEY_JUMP) jump = false;
return false;
}
@Override
public void beginContact(Contact contact, Entity other) {
checkGrounded(contact);
}
@Override
public void endContact(Contact contact, Entity other) {
checkGrounded(contact);
}
private void checkGrounded(Contact contact) {
WorldManifold manifold = contact.getWorldManifold();
float normalAngle = manifold.getNormal().angle();
//System.out.println("normalAngle: "+normalAngle);
}
}
|
ardyno/nuxt.js | packages/cli/test/fixtures/nuxt.async-error.js | export default () => Promise.reject(new Error('Async Config Error'))
|
rawcliffeisaac/leetcode-1 | algorithms/reverse_bits.rb | # https://leetcode.com/problems/reverse-bits/
#
# Reverse bits of a given 32 bits unsigned integer.
#
# For example, given input 43261596 (represented in binary as
# 00000010100101000001111010011100), return 964176192 (represented in binary
# as 00111001011110000010100101000000).
#
# Credits:
#
# Special thanks to @ts for adding this problem and creating all test
# cases.
# @param {Integer} n, a positive integer
# @return {Integer}
def reverse_bits(n)
m = n & 1
31.times do |i|
n >>= 1
m <<= 1
m += n & 1
end
m
end
|
bgoonz/Front-End-Frameworks-Practice | gatsby/gatsby-learning/.cache/fast-refresh-overlay/index.js | import * as React from "react";
import { ErrorBoundary } from "./components/error-boundary";
import { ShadowPortal } from "../shadow-portal";
import { Style } from "./style";
import { BuildError } from "./components/build-error";
import { RuntimeErrors } from "./components/runtime-errors";
import { GraphqlErrors } from "./components/graphql-errors";
import { DevSsrError } from "./components/dev-ssr-error";
const reducer = (state, event) => {
switch (event.action) {
case `CLEAR_COMPILE_ERROR`: {
return { ...state, buildError: null };
}
case `CLEAR_RUNTIME_ERRORS`: {
return { ...state, errors: [] };
}
case `CLEAR_DEV_SSR_ERROR`: {
return { ...state, devSsrError: null };
}
case `SHOW_COMPILE_ERROR`: {
return { ...state, buildError: event.payload };
}
case `SHOW_DEV_SSR_ERROR`: {
return { ...state, devSsrError: event.payload };
}
case `HANDLE_RUNTIME_ERROR`:
case `SHOW_RUNTIME_ERRORS`: {
return { ...state, errors: state.errors.concat(event.payload) };
}
case `SHOW_GRAPHQL_ERRORS`: {
return {
...state,
graphqlErrors: event.payload,
};
}
case `CLEAR_GRAPHQL_ERRORS`: {
return { ...state, graphqlErrors: [] };
}
case `DISMISS`: {
return {
...state,
buildError: null,
errors: [],
graphqlErrors: [],
};
}
default: {
return state;
}
}
};
const initialState = {
errors: [],
buildError: null,
devSsrError: null,
graphqlErrors: [],
};
function DevOverlay({ children }) {
const [state, dispatch] = React.useReducer(reducer, initialState);
React.useEffect(() => {
const gatsbyEvents = window._gatsbyEvents || [];
window._gatsbyEvents = {
push: ([channel, event]) => {
if (channel === `FAST_REFRESH`) {
dispatch(event);
}
},
};
gatsbyEvents.forEach(([channel, event]) => {
if (channel === `FAST_REFRESH`) {
dispatch(event);
}
});
return () => {
window._gatsbyEvents = [];
};
}, [dispatch]);
const dismiss = () => {
dispatch({ action: `DISMISS` });
window._gatsbyEvents = [];
};
const hasBuildError = state.buildError !== null;
const hasRuntimeErrors = Boolean(state.errors.length);
const hasGraphqlErrors = Boolean(state.graphqlErrors.length);
const hasDevSsrError = state.devSsrError !== null;
const hasErrors =
hasBuildError || hasRuntimeErrors || hasGraphqlErrors || hasDevSsrError;
// This component has a deliberate order (priority)
const ErrorComponent = () => {
if (hasBuildError) {
return <BuildError error={state.buildError} />;
}
if (hasRuntimeErrors) {
return <RuntimeErrors errors={state.errors} dismiss={dismiss} />;
}
if (hasGraphqlErrors) {
return <GraphqlErrors errors={state.graphqlErrors} dismiss={dismiss} />;
}
if (hasDevSsrError) {
return <DevSsrError error={state.devSsrError} />;
}
return null;
};
return (
<React.Fragment>
<ErrorBoundary hasErrors={hasErrors}>{children ?? null}</ErrorBoundary>
{hasErrors ? (
<ShadowPortal identifier="gatsby-fast-refresh">
<Style />
<ErrorComponent />
</ShadowPortal>
) : undefined}
</React.Fragment>
);
}
export default DevOverlay;
|
remia/openexr | docs/src/writeTiled1.cpp | void
writeTiled1 (
const char fileName[],
Array2D<GZ>& pixels,
int width,
int height,
int tileWidth,
int tileHeight)
{
Header header (width, height); // 1
header.channels ().insert ("G", Channel (HALF)); // 2
header.channels ().insert ("Z", Channel (FLOAT)); // 3
header.setTileDescription (
TileDescription (tileWidth, tileHeight, ONE_LEVEL)); // 4
TiledOutputFile out (fileName, header); // 5
FrameBuffer frameBuffer; // 6
frameBuffer.insert (
"G", // name // 7
Slice (
HALF, // type // 8
(char*) &pixels[0][0].g, // base // 9
sizeof (pixels[0][0]) * 1, // xStride // 10
sizeof (pixels[0][0]) * width)); // yStride // 11
frameBuffer.insert (
"Z", // name // 12
Slice (
FLOAT, // type // 13
(char*) &pixels[0][0].z, // base // 14
sizeof (pixels[0][0]) * 1, // xStride // 15
sizeof (pixels[0][0]) * width)); // yStride // 16
out.setFrameBuffer (frameBuffer); // 17
out.writeTiles (0, out.numXTiles () - 1, 0, out.numYTiles () - 1); // 18
}
|
sizhongxia/momv_txpro | tm-pro-picture/src/main/java/org/tm/pro/picture/zimg/model/ZimgSucInfo.java | package org.tm.pro.picture.zimg.model;
import java.io.Serializable;
public class ZimgSucInfo implements Serializable {
private static final long serialVersionUID = 1L;
private String md5;
private Long size;
public String getMd5() {
return md5;
}
public void setMd5(String md5) {
this.md5 = md5;
}
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
@Override
public String toString() {
return "ZimgSucInfo [md5=" + md5 + ", size=" + size + "]";
}
}
|
sailingfree/Hardware-genetic-programmng | PhD/lilgp-modified/1.1/kernel/protoapp.h | <filename>PhD/lilgp-modified/1.1/kernel/protoapp.h
/* lil-gp Genetic Programming System, version 1.0, 11 July 1995
* Copyright (C) 1995 Michigan State University
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* <NAME> (<EMAIL>)
* Dr. <NAME> (<EMAIL>)
*
* Computer Science Department
* A-714 Wells Hall
* Michigan State University
* East Lansing, Michigan 48824
* USA
*
*/
#ifndef _PROTOAPP_H
#define _PROTOAPP_H
int app_build_function_sets ( void );
void app_eval_fitness ( individual * );
int app_create_output_streams ( void );
int app_initialize ( int );
void app_uninitialize ( void );
void app_write_checkpoint ( FILE * );
void app_read_checkpoint ( FILE * );
int app_end_of_evaluation ( int, multipop *, int, popstats *, popstats * );
void app_end_of_breeding ( int, multipop * );
#endif
|
EvictionLab/eviction-lab-etl | scripts/utils_census.py | <filename>scripts/utils_census.py<gh_stars>1-10
import os
import sys
import csv
import time
import traceback
import pandas as pd
import sys
import json
from utils_validation import (merge_with_stats)
from utils_logging import logger
from census_patch import CensusPatch as Census
from data_constants import (COUNTY_CROSSWALK,
CENSUS_00_SF1_VARS, CENSUS_00_SF1_VAR_MAP,
CENSUS_00_SF3_VARS, CENSUS_00_SF3_VAR_MAP,
CENSUS_10_VARS, CENSUS_10_VAR_MAP, ACS_VARS,
ACS_VAR_MAP, ACS_12_VARS, ACS_12_VAR_MAP, END_YEAR)
if os.getenv('CENSUS_KEY'):
c = Census(os.getenv('CENSUS_KEY'))
else:
raise Exception('Environment variable CENSUS_KEY not specified')
# all state names (except Puerto Rico)
STATE_FIPS = [
r for r in c.acs5.get(('NAME'), {'for': 'state:*'}) if r['state'] != '72'
]
# map from state FIPS code to state name
STATE_FIPS_MAP = {s['state']: s['NAME'] for s in STATE_FIPS}
# all county names in the US (except Puerto Rico)
STATE_COUNTY_FIPS = [
r for r in c.acs5.get(('NAME'), {'for': 'county:*', 'in': 'state:*'})
if r['state'] != '72'
]
# map from county fips code to county name
COUNTY_FIPS_MAP = {
str(r['state']).zfill(2) + str(r['county']).zfill(3): r['NAME']
for r in STATE_COUNTY_FIPS
}
# Map of geography level to column names to user for join
CENSUS_JOIN_KEYS = {
'states': ['state'],
'counties': ['state', 'county'],
'cities': ['state', 'place'],
'tracts': ['state', 'county', 'tract'],
'block-groups': ['state', 'county', 'tract', 'block group'],
}
# splits a geoid into parts (state, county, tract, block group, block)
def split_geoid(geoid):
parts = {}
if len(geoid) > 1:
parts['state'] = geoid[:2]
if len(geoid) > 4:
parts['county'] = geoid[2:5]
if len(geoid) > 10:
parts['tract'] = geoid[5:11]
if len(geoid) > 11:
parts['bg'] = geoid[11:12]
if len(geoid) > 12:
parts['block'] = geoid[12:]
return parts
# Updates the county, tract, and block group in the census dataframe passed.
# Tracts and block groups are mapped based on columns in the map_df.
#
# - df: dataframe containing results from the Census API
# - map_df: dataframe containing two columns with a mapping of
# source block group to target block group
# - fromField: the column name that contains the source block group ids
# - toField: the column name that contains the target block group ids
def changeBlockGroupsInCensusData(df, map_df, fromField, toField):
# get a map of columns `fromField` : `toField`
bg_dict = pd.Series(map_df[toField].values, index=map_df[fromField]).to_dict()
# loop through the map and update the data frame if needed
for fromBg, toBg in bg_dict.items():
from_parts = split_geoid(fromBg)
to_parts = split_geoid(toBg)
# update the data frame where conditions are met
df.loc[
(df['tract'] == from_parts['tract']) & (df['block group'] == from_parts['bg']) & (df['county'] == from_parts['county']),
['county', 'tract', 'block group']] = [ to_parts['county'], to_parts['tract'], to_parts['bg'] ]
return df
# Updates the county and tract in the census dataframe passed.
# Tracts and block groups are mapped based on columns in the map_df.
def changeTractsInCensusData(df, map_df, fromField, toField):
bg_dict = pd.Series(map_df[toField].values, index=map_df[fromField]).to_dict()
# loop through the map and update the data frame if needed
for fromTract, toTract in bg_dict.items():
from_parts = split_geoid(fromTract)
to_parts = split_geoid(toTract)
# update the data frame where conditions are met
df.loc[
(df['tract'] == from_parts['tract']) & (df['county'] == from_parts['county']),
['county', 'tract']] = [ to_parts['county'], to_parts['tract'] ]
return df
# Grab tracts from the block group 09 -> 00 changes and also add
# any additional tracts from the tracts file. Returns a dataframe
# that contains trt09 -> trt00 mappings.
def get_tract_crosswalk_09_00_df():
block_groups_df = get_block_group_crosswalk_df('changes_09acs_to_00cen.csv')
block_groups_df['trt09'] = block_groups_df['bkg09'].str[:-1]
block_groups_df['trt00'] = block_groups_df['bkg00'].str[:-1]
block_groups_df.drop(['county', 'bkg09', 'bkg00'], axis=1, inplace=True)
block_groups_df.drop_duplicates(inplace=True)
conf_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'conf')
cw_df = pd.read_csv(
os.path.join(conf_dir, 'changes_09acs_to_00cen_tract.csv'),
dtype={'cofips': 'object', 'trt09': 'object', 'trt00': 'object'}
)
return pd.concat([cw_df, block_groups_df])
def get_tract_crosswalk_09_10_df():
block_groups_df = get_block_group_crosswalk_df('changes_09acs_to_10cen.csv')
block_groups_df['trt09'] = block_groups_df['bkg09'].str[:-1]
block_groups_df['trt10'] = block_groups_df['bkg10'].str[:-1]
block_groups_df.drop(['county', 'bkg09', 'bkg10'], axis=1, inplace=True)
block_groups_df.drop_duplicates(inplace=True)
conf_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'conf')
cw_df = pd.read_csv(
os.path.join(conf_dir, 'changes_09acs_to_10cen_tract.csv'),
dtype={'cofips': 'object', 'trt09': 'object', 'trt10': 'object'}
)
return pd.concat([ cw_df, block_groups_df ])
# Loads a block group crosswalk file from the `conf` directory and
# removes any rows that should not be compared
def get_block_group_crosswalk_df(filename):
conf_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'conf')
cw_df = pd.read_csv(
os.path.join(conf_dir, filename),
dtype={
'cofips': 'object',
'bkg00': 'object',
'bkg09': 'object',
'bkg10': 'object'
}
)
# filter out entries where values should not be copied
if 'nocompare' in cw_df.columns:
cw_df = cw_df.loc[cw_df['nocompare'] == 0]
cw_df.drop(['nocompare'], axis=1, inplace=True)
return cw_df
# Census tract names follow rules described here:
# https://www.census.gov/geo/reference/gtc/gtc_ct.html
def create_tract_name(tract):
tract_name = str(tract).lstrip('0')
if tract_name[-2:] == '00':
return tract_name[:-2]
else:
return tract_name[:-2] + '.' + tract_name[-2:]
# Checks the data frame for any GEOIDs in the COUNTY_CROSSWALK data constant
# If there are matches, update the GEOID, name, parent-location with the
# mapped values.
def crosswalk_county(df):
for k, v in COUNTY_CROSSWALK.items():
if (
'name' in df.columns.values and
'parent-location' in df.columns.values
):
df.loc[df['GEOID'] == k, ['GEOID', 'name', 'parent-location']] = (
[v['GEOID'], v['name'], v['parent-location']]
)
elif 'GEOID' in df.columns.values:
df.loc[df['GEOID'] == k, 'GEOID'] = v['GEOID']
return df
# translate ACS 2009 -> 2000 block groups if needed
# NOTE: Some entries are also translate from ACS 2009 -> 2010, this happens when
# `convert_00_geo.py` is run using the a weight of 1
def crosswalk_acs_block_groups(df):
acs_09_00_cw_df = get_block_group_crosswalk_df('changes_09acs_to_00cen.csv')
if not acs_09_00_cw_df.empty:
df = changeBlockGroupsInCensusData(df, acs_09_00_cw_df, 'bkg09', 'bkg00')
return df
# translate ACS 2009 -> 2000 tracts if needed
# NOTE: Some entries are also translate from ACS 2009 -> 2010, this happens when
# `convert_00_geo.py` is run using the a weight of 1
def crosswalk_acs_tracts(df):
acs_09_00_cw_df = get_tract_crosswalk_09_00_df()
if not acs_09_00_cw_df.empty:
df = changeTractsInCensusData(df, acs_09_00_cw_df, 'trt09', 'trt00')
return df
def update_acs12_block_groups(df):
conf_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'conf')
filename = 'changes_12acs_10cen_bkg.csv'
map_df = pd.read_csv(
os.path.join(conf_dir, filename),
dtype={ 'bkg10': 'object', 'bkg12': 'object' }
)
if not map_df.empty:
df = changeBlockGroupsInCensusData(df, map_df, 'bkg12', 'bkg10')
return df
def update_acs12_tracts(df):
conf_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'conf')
filename = 'changes_12acs_10cen_tract.csv'
map_df = pd.read_csv(
os.path.join(conf_dir, filename),
dtype={ 'trt10': 'object', 'trt12': 'object' }
)
if not map_df.empty:
df = changeTractsInCensusData(df, map_df, 'trt12', 'trt10')
return df
# Add years column to data frame
def addDataFrameYears(df, start, end):
df_list = []
for year in range(start, end):
df_copy = df.copy()
df_copy['year'] = year
df_list.append(df_copy)
return df_list
# Handles merging and processing data fetched from the census API
# for the years 2000-2010
def postProcessData2000(sf1_df, sf3_df, acs_df, geo_str):
if not len(sf1_df.columns.values) and not len(sf3_df.columns.values) and not len(acs_df.columns.values):
return
sf1_df.rename(columns=CENSUS_00_SF1_VAR_MAP, inplace=True)
sf3_df.rename(columns=CENSUS_00_SF3_VAR_MAP, inplace=True)
if 'name' in sf3_df.columns.values:
sf3_df.drop('name', axis=1, inplace=True)
# Merge SF1 and SF3 results, then add years 2000-2005
if len(sf1_df.columns.values) and len(sf3_df.columns.values):
sf1_df = crosswalk_county(sf1_df)
sf3_df = crosswalk_county(sf3_df)
# merge sf3 results into sf1 results
log_label = '2000 ' + geo_str + ' sf1 <- sf3'
census_df = merge_with_stats(log_label, sf1_df, sf3_df, on=CENSUS_JOIN_KEYS.get(geo_str), how='left')
census_df = census_df.loc[census_df['state'] != '72'].copy()
census_df_list = addDataFrameYears(census_df, 2000, 2005)
else:
census_df_list = None
# Crosswalk the ACS data, then add years 2005-2010
if len(acs_df.columns.values):
if geo_str == 'block-groups':
acs_df = crosswalk_acs_block_groups(acs_df)
elif geo_str == 'tracts':
acs_df = crosswalk_acs_tracts(acs_df)
acs_df = crosswalk_county(acs_df)
acs_df = acs_df.loc[acs_df['state'] != '72'].copy()
acs_df.rename(columns=ACS_VAR_MAP, inplace=True)
acs_df_list = addDataFrameYears(acs_df, 2005, 2010)
else:
acs_df_list = None
# return concated data
if census_df_list and acs_df_list:
return pd.concat(census_df_list + acs_df_list)
elif acs_df_list:
return pd.concat(acs_df_list)
elif census_df_list:
return pd.concat(census_df_list)
# Handles merging and processing data fetched from the census API
# for the years 2010-current
def postProcessData2010(sf1_df, acs12_df, acs_df, geo_str):
sf1_df.rename(columns=CENSUS_10_VAR_MAP, inplace=True)
acs12_df.rename(columns=ACS_12_VAR_MAP, inplace=True)
if 'name' in acs12_df.columns.values:
acs12_df.drop('name', axis=1, inplace=True)
if len(acs12_df.columns.values) and not acs12_df.empty:
# update ACS12 entries that map to 2010 geography
if geo_str == 'block-groups':
acs12_df = update_acs12_block_groups(acs12_df)
if geo_str == 'tracts':
acs12_df = update_acs12_tracts(acs12_df)
if not sf1_df.empty and not acs12_df.empty:
# Merge vars that are only in ACS to 2010 census
log_label = '2010 ' + geo_str + ' sf1_df <- acs12_df'
sf1_df = merge_with_stats(log_label, sf1_df, acs12_df, on=CENSUS_JOIN_KEYS.get(geo_str), how='left')
if not sf1_df.empty:
sf1_df = sf1_df.loc[sf1_df['state'] != '72'].copy()
sf1_df['year'] = 2010
if not acs_df.empty:
acs_df = acs_df.loc[acs_df['state'] != '72'].copy()
acs_df.rename(columns=ACS_VAR_MAP, inplace=True)
acs_df_list = addDataFrameYears(acs_df, 2011, END_YEAR)
return pd.concat([sf1_df] + acs_df_list)
class CensusDataStore:
def __init__(self):
self.crosswalks = {
'acs_09_00': get_block_group_crosswalk_df('changes_09acs_to_00cen.csv')
}
# Get the crosswalk for a specific county
def getCountyBlockGroupCrosswalk(self, cw_name, county):
cw_df = self.crosswalks[cw_name]
return cw_df.loc[cw_df['cofips'] == county]
# Fetches results from the provided Census API source
def fetchResults(self, source, items, lookup_dict, year=None):
for attempt in range(10):
try:
logger.debug('fetching ' + (str(year) if year else '') + ' ' + source + ' data ' + str(lookup_dict))
if year:
return getattr(c, source).get(items, lookup_dict, year=year)
else:
return getattr(c, source).get(items, lookup_dict)
except:
exctype, value = sys.exc_info()[:2]
logger.debug('received ' + str(exctype.__name__) + ' fetching ' + str(year) + ' ' + source + ' data ' + str(lookup_dict) + ', will retry shortly')
logger.debug(value)
time.sleep(180)
else:
break
else:
# could not retrieve data after 10 attempts (20 min)
logger.error("could not retrieve " + str(year) + " " + source + " data for: " + json.dumps(lookup_dict))
return None
# Returns a dataframe with the results or empty dataframe if error
def fetchData(self, source, items, lookup_dict, year):
results_df = pd.DataFrame(self.fetchResults(source, items, lookup_dict, year=year))
if results_df.empty:
logger.info('received empty result for query: ' + str(year) + ' ' + source + ' data ' + str(lookup_dict))
return results_df
# Fetch data for all states in the US
def fetchStates(self, source, items, year):
lookup_dict = { 'for': 'state:*' }
return self.fetchData(source, items, lookup_dict, year)
# Fetches data for all states for 2000-2009
def fetchAllStateData2000(self):
logger.debug('starting fetch for all state level data for 2000-2009')
census_sf1_df = self.fetchStates('sf1', CENSUS_00_SF1_VARS, 2000)
census_sf3_df = self.fetchStates('sf3', CENSUS_00_SF3_VARS, 2000)
acs_df = self.fetchStates('acs5', ACS_VARS, 2009)
return postProcessData2000(census_sf1_df, census_sf3_df, acs_df, 'states')
# Fetches data for all states for 2010-current
def fetchAllStateData2010(self):
logger.debug('starting fetch for all state level data for 2010-current')
census_df = self.fetchStates('sf1', CENSUS_10_VARS, 2010)
acs_12_df = self.fetchStates('acs5', ACS_12_VARS, 2012)
acs_df = self.fetchStates('acs5', ACS_VARS, 2015)
return postProcessData2010(census_df, acs_12_df, acs_df, 'states')
# Fetch data for all counties in the US
def fetchCounties(self, source, items, year):
lookup_dict = { 'for': 'county:*', 'in': 'state:*' }
return self.fetchData(source, items, lookup_dict, year)
# Fetches data for all counties for 2000-2009
def fetchAllCountyData2000(self):
logger.debug('starting fetch for all county level data for 2000-2009')
census_sf1_df = self.fetchCounties('sf1', CENSUS_00_SF1_VARS, 2000)
census_sf3_df = self.fetchCounties('sf3', CENSUS_00_SF3_VARS, 2000)
acs_df = self.fetchCounties('acs5', ACS_VARS, 2009)
return postProcessData2000(census_sf1_df, census_sf3_df, acs_df, 'counties')
# Fetches data for all counties for 2010-current
def fetchAllCountyData2010(self):
logger.debug('starting fetch for all county level data for 2010-current')
census_df = self.fetchCounties('sf1', CENSUS_10_VARS, 2010)
acs_12_df = self.fetchCounties('acs5', ACS_12_VARS, 2012)
acs_df = self.fetchCounties('acs5', ACS_VARS, 2015)
return postProcessData2010(census_df, acs_12_df, acs_df, 'counties')
# Fetch data for all cities in the US
def fetchCities(self, source, items, year):
lookup_dict = { 'for': 'place:*', 'in': 'state:*' }
return self.fetchData(source, items, lookup_dict , year)
def fetchAllCityData2000(self):
logger.debug('starting fetch for all city level data for 2000-2009')
census_sf1_df = self.fetchCities('sf1', CENSUS_00_SF1_VARS, 2000)
census_sf3_df = self.fetchCities('sf3', CENSUS_00_SF3_VARS, 2000)
acs_df = self.fetchCities('acs5', ACS_VARS, 2009)
# Handle ACS var difference
acs_df['NAME'] = acs_df['NAME'].apply(
lambda x: ','.join(x.split(',')[:-1]).strip()
)
return postProcessData2000(census_sf1_df, census_sf3_df, acs_df, 'cities')
def fetchAllCityData2010(self):
logger.debug('starting fetch for all city level data for 2010-current')
census_df = self.fetchCities('sf1', CENSUS_10_VARS, 2010)
acs_12_df = self.fetchCities('acs5', ACS_12_VARS, 2012)
acs_df = self.fetchCities('acs5', ACS_VARS, 2015)
# Handle ACS var difference
acs_df['NAME'] = acs_df['NAME'].apply(
lambda x: ','.join(x.split(',')[:-1]).strip()
)
return postProcessData2010(census_df, acs_12_df, acs_df, 'cities')
# Fetch data for tracts within a given county
def fetchTractsByCounty(self, source, items, county, year):
parent = 'county:{} state:{}'.format(county[2:], county[0:2])
lookup_dict = { 'for': 'tract:*', 'in': parent }
return self.fetchData(source, items, lookup_dict, year)
# Fetch data for all tracts in the US by looping through counties and fetching
# tracts for each
def fetchTracts(self, source, items, year):
geo_df_list = []
fips_list = [ r for r in self.fetchResults(source, ('NAME'), {'for': 'county:*', 'in': 'state:*'}, year=year) if r['state'] != '72' ]
for f in fips_list:
county = f['state'] + f['county']
geo_df_list.append(self.fetchTractsByCounty(source, items, county, year))
return pd.concat(geo_df_list)
def fetchAllTractData2000(self):
logger.debug('starting fetch for all tract level data for 2000-2009')
census_sf1_df = self.fetchTracts('sf1', CENSUS_00_SF1_VARS, 2000)
census_sf3_df = self.fetchTracts('sf3', CENSUS_00_SF3_VARS, 2000)
acs_df = self.fetchTracts('acs5', ACS_VARS, 2009)
return postProcessData2000(census_sf1_df, census_sf3_df, acs_df, 'tracts')
def fetchAllTractData2010(self):
logger.debug('starting fetch for all tract level data for 2010-current')
census_df = self.fetchTracts('sf1', CENSUS_10_VARS, 2010)
acs_12_df = self.fetchTracts('acs5', ACS_12_VARS, 2012)
acs_df = self.fetchTracts('acs5', ACS_VARS, 2015)
return postProcessData2010(census_df, acs_12_df, acs_df, 'tracts')
# Fetch data for block groups within a given tract
def fetchBlockGroupsByTract(self, source, items, tract, year):
parent = 'county:{} state:{} tract:{}'.format(tract[2:5], tract[0:2], tract[5:])
lookup_dict = { 'for': 'block group:*', 'in': parent }
return self.fetchData(source, items, lookup_dict, year)
# Fetch data for all 2010 block groups in a county by fetching all of
# the tracts within a county, and then looping through those tracts.
def fetchBlockGroupsByCounty(self, source, items, county, year):
geo_df_list = []
lookup_dict = {
'for': 'tract:*',
'in': 'county:' + county[2:] + ' state:' + county[0:2]
}
tract_fips = [ r for r in self.fetchResults(source, ('NAME'), lookup_dict, year=year) if r['state'] != '72' ]
for f in tract_fips:
tract = f['state'] + f['county'] + f['tract']
geo_df_list.append(self.fetchBlockGroupsByTract(source, items, tract, year))
if len(geo_df_list) > 0:
return pd.concat(geo_df_list)
return pd.DataFrame()
def fetchAllBlockGroupData2000(self, county):
logger.debug('starting fetch block group level data for 2000-2009')
census_sf1_df = self.fetchBlockGroupsByCounty('sf1', CENSUS_00_SF1_VARS, county, 2000)
census_sf3_df = self.fetchBlockGroupsByCounty('sf3', CENSUS_00_SF3_VARS, county, 2000)
acs_df = self.fetchBlockGroupsByCounty('acs5', ACS_VARS, county, 2009)
return postProcessData2000(census_sf1_df, census_sf3_df, acs_df, 'block-groups')
def fetchAllBlockGroupData2010(self, county):
logger.debug('starting fetch block group level data for 2010-current')
census_df = self.fetchBlockGroupsByCounty('sf1', CENSUS_10_VARS, county, 2010)
acs_12_df = self.fetchBlockGroupsByCounty('acs5', ACS_12_VARS, county, 2012)
acs_df = self.fetchBlockGroupsByCounty('acs5', ACS_VARS, county, 2015)
return postProcessData2010(census_df, acs_12_df, acs_df, 'block-groups')
|
ccrebolder/line-cookbook | test/integration/replace_or_add/controls/replace_or_add_missing_file.rb | <gh_stars>0
control 'Replace or add to a missing file' do
describe matches('/tmp/missingfile', /^add this line$/) do
its('count') { should eq 1 }
end
describe file_ext('/tmp/missingfile') do
its('size_lines') { should eq 1 }
end
describe matches('/tmp/missingfile_matches_pattern', /^add this line$/) do
its('count') { should eq 1 }
end
describe file_ext('/tmp/missingfile_matches_pattern') do
its('size_lines') { should eq 1 }
end
describe file('/tmp/missingfile_replace_only') do
it { should_not exist }
end
# redo of resource did nothing
describe file('/tmp/chef_resource_status') do
its(:content) { should match(/missing_file redo.*n$/) }
its(:content) { should match(/missing_file matches_pattern redo.*n$/) }
its(:content) { should match(/missing_file replace_only.*n$/) }
end
end
|
adamnok/linden | examples/hello-world/src/main/scala/example/component/HelloWorldComponent.scala | package example.component
import linden.flowers.Component
class HelloWorldComponent() extends Component {
override lazy val render = new Html {
div {
div {
+"Hello, world!"
}
}
}
}
|
airsjon/AirsCore | com.airsltd.core.parse/src/test/java/com/airsltd/core/AbstractAirsStatusMessageTest.java | <gh_stars>0
/**
* Copyright (c) 2012, <NAME>
* 432 NE Ravenna Blvd
* Seattle, WA 98115
*/
package com.airsltd.core;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.airsltd.core.parse.AbstractAirsStatusMessage;
/**
* @author Jon
*
*/
public class AbstractAirsStatusMessageTest {
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* Test method for {@link com.airsltd.core.parse.AbstractAirsStatusMessage#parseString(java.lang.String, java.lang.Throwable)}.
*/
@Test
public final void testParseString() {
// given
AbstractAirsStatusMessage message = AbstractAirsStatusMessage.parseString("An error", new RuntimeException("blarg"));
// when
// then
assertEquals("0:An error:RuntimeException 'blarg':::",message.toString());
assertEquals(0l,message.getId());
assertEquals("",message.toAction());
assertEquals("RuntimeException 'blarg'",message.toDescription());
assertEquals("",message.toExplanation());
assertEquals("An error",message.toMessage());
assertFalse(message.isSupport());
assertEquals("",message.toSupport());
// given
message = AbstractAirsStatusMessage.parseString("12:An error%: Runtime:Stipulated:Could fix:Been 100%% fixed:Look here", new RuntimeException("blarg"));
// when
// then
assertEquals("12:An error%: Runtime:Stipulated:Could fix:Been 100%% fixed:Look here",message.toString());
assertEquals(12l,message.getId());
assertEquals("Been 100% fixed",message.toAction());
assertEquals("Stipulated",message.toDescription());
assertEquals("Could fix",message.toExplanation());
assertEquals("An error: Runtime",message.toMessage());
assertEquals("Look here",message.toSupport());
assertTrue(message.isSupport());
// given
message = AbstractAirsStatusMessage.parseString(null, new RuntimeException("blarg"));
// when
// then
assertEquals("0:System Error:RuntimeException 'blarg':::",message.toString());
assertEquals(0l,message.getId());
// given
// when
message = AbstractAirsStatusMessage.parseString("1024:Registration Successful!:Thank you for registering with ATP. "
+ "You will recieve and email soon to complete the registration process. The email will include a html link "
+ "that you can click to finish your registration.:::", null);
// then
assertEquals(1024, message.getId());
assertEquals("Registration Successful!", message.toMessage());
assertEquals("Thank you for registering with ATP. "
+ "You will recieve and email soon to complete the registration process. The email will include a html link "
+ "that you can click to finish your registration.", message.toDescription());
}
@Test
public final void testPrettyPrint() {
// given
AbstractAirsStatusMessage message = AbstractAirsStatusMessage.parseString("An error", new RuntimeException("blarg"));
// when
// then
assertEquals("[0] An error - RuntimeException 'blarg'",message.niceString());
}
}
|
YuriyPobezhymov/tsunami-security-scanner-plugins | google/portscan/nmap/src/main/java/com/google/tsunami/plugins/portscan/nmap/client/result/Host.java | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.tsunami.plugins.portscan.nmap.client.result;
import static com.google.common.collect.ImmutableList.toImmutableList;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
/** Host element of nmap XML result. */
@AutoValue
public abstract class Host {
public abstract String startTime();
public abstract String endTime();
public abstract String comment();
abstract ImmutableList<Object> valueElements();
private <T> ImmutableList<T> getElements(Class<T> clazz) {
return valueElements().stream()
.filter(clazz::isInstance)
.map(clazz::cast)
.collect(toImmutableList());
}
public ImmutableList<Status> statuses() {
return getElements(Status.class);
}
public ImmutableList<Address> addresses() {
return getElements(Address.class);
}
public ImmutableList<Hostnames> hostnames() {
return getElements(Hostnames.class);
}
public ImmutableList<Smurf> smurfs() {
return getElements(Smurf.class);
}
public ImmutableList<Ports> ports() {
return getElements(Ports.class);
}
public ImmutableList<Os> oses() {
return getElements(Os.class);
}
public ImmutableList<Distance> distances() {
return getElements(Distance.class);
}
public ImmutableList<Uptime> uptimes() {
return getElements(Uptime.class);
}
public ImmutableList<TcpSequence> tcpSequences() {
return getElements(TcpSequence.class);
}
public ImmutableList<IpIdSequence> ipIdSequences() {
return getElements(IpIdSequence.class);
}
public ImmutableList<TcpTsSequence> tcpTsSequences() {
return getElements(TcpTsSequence.class);
}
public ImmutableList<HostScript> hostScripts() {
return getElements(HostScript.class);
}
public ImmutableList<Trace> traces() {
return getElements(Trace.class);
}
public ImmutableList<Times> times() {
return getElements(Times.class);
}
public abstract Builder toBuilder();
public static Builder builder() {
return new AutoValue_Host.Builder();
}
/** Builder for {@link Host}. */
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setStartTime(String value);
public abstract Builder setEndTime(String value);
public abstract Builder setComment(String value);
abstract ImmutableList.Builder<Object> valueElementsBuilder();
public Builder addValueElement(Object valueElement) {
valueElementsBuilder().add(valueElement);
return this;
}
public abstract Host build();
}
}
|
illusorycloud/i-go | tools/region/rpc/proto/region.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.23.0
// protoc v3.14.0
// source: region.proto
package proto
import (
proto "github.com/golang/protobuf/proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type IP struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Ip string `protobuf:"bytes,1,opt,name=ip,proto3" json:"ip,omitempty"`
}
func (x *IP) Reset() {
*x = IP{}
if protoimpl.UnsafeEnabled {
mi := &file_region_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *IP) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*IP) ProtoMessage() {}
func (x *IP) ProtoReflect() protoreflect.Message {
mi := &file_region_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use IP.ProtoReflect.Descriptor instead.
func (*IP) Descriptor() ([]byte, []int) {
return file_region_proto_rawDescGZIP(), []int{0}
}
func (x *IP) GetIp() string {
if x != nil {
return x.Ip
}
return ""
}
type Region struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Region string `protobuf:"bytes,1,opt,name=region,proto3" json:"region,omitempty"`
}
func (x *Region) Reset() {
*x = Region{}
if protoimpl.UnsafeEnabled {
mi := &file_region_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Region) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Region) ProtoMessage() {}
func (x *Region) ProtoReflect() protoreflect.Message {
mi := &file_region_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Region.ProtoReflect.Descriptor instead.
func (*Region) Descriptor() ([]byte, []int) {
return file_region_proto_rawDescGZIP(), []int{1}
}
func (x *Region) GetRegion() string {
if x != nil {
return x.Region
}
return ""
}
type LatLong struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Latitude float64 `protobuf:"fixed64,1,opt,name=Latitude,proto3" json:"Latitude,omitempty"`
Longitude float64 `protobuf:"fixed64,2,opt,name=Longitude,proto3" json:"Longitude,omitempty"`
}
func (x *LatLong) Reset() {
*x = LatLong{}
if protoimpl.UnsafeEnabled {
mi := &file_region_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *LatLong) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*LatLong) ProtoMessage() {}
func (x *LatLong) ProtoReflect() protoreflect.Message {
mi := &file_region_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use LatLong.ProtoReflect.Descriptor instead.
func (*LatLong) Descriptor() ([]byte, []int) {
return file_region_proto_rawDescGZIP(), []int{2}
}
func (x *LatLong) GetLatitude() float64 {
if x != nil {
return x.Latitude
}
return 0
}
func (x *LatLong) GetLongitude() float64 {
if x != nil {
return x.Longitude
}
return 0
}
var File_region_proto protoreflect.FileDescriptor
var file_region_proto_rawDesc = []byte{
0x0a, 0x0c, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x14, 0x0a, 0x02, 0x49, 0x50, 0x12, 0x0e, 0x0a, 0x02, 0x69,
0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x70, 0x22, 0x20, 0x0a, 0x06, 0x52,
0x65, 0x67, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x22, 0x43, 0x0a,
0x07, 0x4c, 0x61, 0x74, 0x4c, 0x6f, 0x6e, 0x67, 0x12, 0x1a, 0x0a, 0x08, 0x4c, 0x61, 0x74, 0x69,
0x74, 0x75, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x4c, 0x61, 0x74, 0x69,
0x74, 0x75, 0x64, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x4c, 0x6f, 0x6e, 0x67, 0x69, 0x74, 0x75, 0x64,
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x4c, 0x6f, 0x6e, 0x67, 0x69, 0x74, 0x75,
0x64, 0x65, 0x32, 0x62, 0x0a, 0x0c, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76,
0x65, 0x72, 0x12, 0x27, 0x0a, 0x09, 0x49, 0x50, 0x32, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x12,
0x09, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x50, 0x1a, 0x0d, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x12, 0x29, 0x0a, 0x0a, 0x49,
0x50, 0x32, 0x4c, 0x61, 0x74, 0x4c, 0x6f, 0x6e, 0x67, 0x12, 0x09, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2e, 0x49, 0x50, 0x1a, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x61, 0x74,
0x4c, 0x6f, 0x6e, 0x67, 0x22, 0x00, 0x42, 0x23, 0x5a, 0x21, 0x69, 0x2d, 0x67, 0x6f, 0x2f, 0x74,
0x6f, 0x6f, 0x6c, 0x73, 0x2f, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x2f, 0x72, 0x70, 0x63, 0x2f,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x33,
}
var (
file_region_proto_rawDescOnce sync.Once
file_region_proto_rawDescData = file_region_proto_rawDesc
)
func file_region_proto_rawDescGZIP() []byte {
file_region_proto_rawDescOnce.Do(func() {
file_region_proto_rawDescData = protoimpl.X.CompressGZIP(file_region_proto_rawDescData)
})
return file_region_proto_rawDescData
}
var file_region_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_region_proto_goTypes = []interface{}{
(*IP)(nil), // 0: proto.IP
(*Region)(nil), // 1: proto.Region
(*LatLong)(nil), // 2: proto.LatLong
}
var file_region_proto_depIdxs = []int32{
0, // 0: proto.RegionServer.IP2Region:input_type -> proto.IP
0, // 1: proto.RegionServer.IP2LatLong:input_type -> proto.IP
1, // 2: proto.RegionServer.IP2Region:output_type -> proto.Region
2, // 3: proto.RegionServer.IP2LatLong:output_type -> proto.LatLong
2, // [2:4] is the sub-list for method output_type
0, // [0:2] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_region_proto_init() }
func file_region_proto_init() {
if File_region_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_region_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IP); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_region_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Region); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_region_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*LatLong); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_region_proto_rawDesc,
NumEnums: 0,
NumMessages: 3,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_region_proto_goTypes,
DependencyIndexes: file_region_proto_depIdxs,
MessageInfos: file_region_proto_msgTypes,
}.Build()
File_region_proto = out.File
file_region_proto_rawDesc = nil
file_region_proto_goTypes = nil
file_region_proto_depIdxs = nil
}
|
xuqplus/hi-leetcode | src/test/java/com/github/xuqplus/hi/leetcode/q0200/q0258/ATest.java | package com.github.xuqplus.hi.leetcode.q0200.q0258;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
/**
* 各位相加
* easy
* https://leetcode-cn.com/problems/add-digits/
*/
@Slf4j
public class ATest {
@Test
void a() {
Solution solution = new Solution();
log.info("{}", solution.addDigits(11));
}
}
class Solution {
public int addDigits(int num) {
while (num > 9) {
int sum = 0;
while (num > 0) {
int c = num % 10;
sum += c;
num /= 10;
}
num = sum;
}
return num;
}
}
|
sgholamian/log-aware-clone-detection | NLPCCd/Camel/3657_2.java | //,temp,sample_2399.java,2,18,temp,sample_2400.java,2,17
//,3
public class xxx {
public void dummy_method(){
if (config.getProtocol().equals(MailUtils.PROTOCOL_IMAP) || config.getProtocol().equals(MailUtils.PROTOCOL_IMAPS)) {
if (copyTo != null) {
Folder destFolder = store.getFolder(copyTo);
if (!destFolder.exists()) {
destFolder.create(Folder.HOLDS_MESSAGES);
}
folder.copyMessages(new Message[]{mail}, destFolder);
}
}
if (delete) {
log.info("exchange processed so flagging message as deleted");
}
}
}; |
nylen/matterbridge | vendor/gitlab.com/golang-commonmark/markdown/helpers.go | <filename>vendor/gitlab.com/golang-commonmark/markdown/helpers.go
// Copyright 2015 The Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package markdown
func parseLinkLabel(s *StateInline, start int, disableNested bool) int {
src := s.Src
labelEnd := -1
max := s.PosMax
oldPos := s.Pos
s.Pos = start + 1
level := 1
found := false
for s.Pos < max {
marker := src[s.Pos]
if marker == ']' {
level--
if level == 0 {
found = true
break
}
}
prevPos := s.Pos
s.Md.Inline.SkipToken(s)
if marker == '[' {
if prevPos == s.Pos-1 {
level++
} else if disableNested {
s.Pos = oldPos
return -1
}
}
}
if found {
labelEnd = s.Pos
}
s.Pos = oldPos
return labelEnd
}
func parseLinkDestination(s string, pos, max int) (url string, lines, endpos int, ok bool) {
start := pos
if pos < max && s[pos] == '<' {
pos++
for pos < max {
b := s[pos]
if b == '\n' || byteIsSpace(b) {
return
}
if b == '>' {
endpos = pos + 1
url = unescapeAll(s[start+1 : pos])
ok = true
return
}
if b == '\\' && pos+1 < max {
pos += 2
continue
}
pos++
}
return
}
level := 0
for pos < max {
b := s[pos]
if b == ' ' {
break
}
if b < 0x20 || b == 0x7f {
break
}
if b == '\\' && pos+1 < max {
pos += 2
continue
}
if b == '(' {
level++
}
if b == ')' {
if level == 0 {
break
}
level--
}
pos++
}
if start == pos {
return
}
if level != 0 {
return
}
url = unescapeAll(s[start:pos])
endpos = pos
ok = true
return
}
func parseLinkTitle(s string, pos, max int) (title string, nlines, endpos int, ok bool) {
lines := 0
start := pos
if pos >= max {
return
}
marker := s[pos]
if marker != '"' && marker != '\'' && marker != '(' {
return
}
pos++
if marker == '(' {
marker = ')'
}
for pos < max {
switch s[pos] {
case marker:
endpos = pos + 1
nlines = lines
title = unescapeAll(s[start+1 : pos])
ok = true
return
case '\n':
lines++
case '\\':
if pos+1 < max {
pos++
if s[pos] == '\n' {
lines++
}
}
}
pos++
}
return
}
|
cgewecke/meta-ci | targets/aragonOS/migrations/4_links.js | const ScriptHelpers = artifacts.require('ScriptHelpers')
const EVMScriptRegistryFactory = artifacts.require('EVMScriptRegistryFactory')
module.exports = async (deployer, network) => {
deployer.deploy(ScriptHelpers)
deployer.link(ScriptHelpers, EVMScriptRegistryFactory)
}
|
developerntuc/adyen-android | googlepay/src/main/java/com/adyen/checkout/googlepay/GooglePayOutputData.java | /*
* Copyright (c) 2019 <NAME>.
*
* This file is open source and available under the MIT license. See the LICENSE file for more info.
*
* Created by caiof on 4/7/2019.
*/
package com.adyen.checkout.googlepay;
import android.text.TextUtils;
import androidx.annotation.Nullable;
import com.adyen.checkout.components.base.OutputData;
import com.adyen.checkout.googlepay.util.GooglePayUtils;
import com.google.android.gms.wallet.PaymentData;
class GooglePayOutputData implements OutputData {
private final PaymentData mPaymentData;
GooglePayOutputData(PaymentData paymentData) {
mPaymentData = paymentData;
}
@Override
public boolean isValid() {
return mPaymentData != null && !TextUtils.isEmpty(GooglePayUtils.findToken(mPaymentData));
}
@Nullable
public PaymentData getPaymentData() {
return mPaymentData;
}
}
|
Ambal/mangos | src/game/Guild.cpp | /*
* This file is part of the CMaNGOS Project. See AUTHORS file for Copyright information
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "Database/DatabaseEnv.h"
#include "WorldPacket.h"
#include "WorldSession.h"
#include "Player.h"
#include "Opcodes.h"
#include "ObjectMgr.h"
#include "Guild.h"
#include "GuildMgr.h"
#include "Chat.h"
#include "SocialMgr.h"
#include "Util.h"
#include "Language.h"
#include "World.h"
#include "Calendar.h"
//// MemberSlot ////////////////////////////////////////////
void MemberSlot::SetMemberStats(Player* player)
{
Name = player->GetName();
Level = player->getLevel();
Class = player->getClass();
ZoneId = player->IsInWorld() ? player->GetZoneId() : player->GetCachedZoneId();
}
void MemberSlot::UpdateLogoutTime()
{
LogoutTime = time(NULL);
}
void MemberSlot::SetPNOTE(std::string pnote)
{
Pnote = pnote;
// pnote now can be used for encoding to DB
CharacterDatabase.escape_string(pnote);
CharacterDatabase.PExecute("UPDATE guild_member SET pnote = '%s' WHERE guid = '%u'", pnote.c_str(), guid.GetCounter());
}
void MemberSlot::SetOFFNOTE(std::string offnote)
{
OFFnote = offnote;
// offnote now can be used for encoding to DB
CharacterDatabase.escape_string(offnote);
CharacterDatabase.PExecute("UPDATE guild_member SET offnote = '%s' WHERE guid = '%u'", offnote.c_str(), guid.GetCounter());
}
void MemberSlot::ChangeRank(uint32 newRank)
{
RankId = newRank;
Player* player = sObjectMgr.GetPlayer(guid);
// If player not online data in data field will be loaded from guild tabs no need to update it !!
if (player)
player->SetRank(newRank);
CharacterDatabase.PExecute("UPDATE guild_member SET rank='%u' WHERE guid='%u'", newRank, guid.GetCounter());
}
//// Guild /////////////////////////////////////////////////
Guild::Guild()
{
m_Id = 0;
m_Name = "";
GINFO = MOTD = "";
m_EmblemStyle = 0;
m_EmblemColor = 0;
m_BorderStyle = 0;
m_BorderColor = 0;
m_BackgroundColor = 0;
m_accountsNumber = 0;
m_CreatedDate = 0;
m_GuildBankMoney = 0;
m_GuildEventLogNextGuid = 0;
m_GuildBankEventLogNextGuid_Money = 0;
for (uint8 i = 0; i < GUILD_BANK_MAX_TABS; ++i)
m_GuildBankEventLogNextGuid_Item[i] = 0;
}
Guild::~Guild()
{
DeleteGuildBankItems();
}
bool Guild::Create(Player* leader, std::string gname)
{
if (sGuildMgr.GetGuildByName(gname))
return false;
WorldSession* lSession = leader->GetSession();
if (!lSession)
return false;
m_LeaderGuid = leader->GetObjectGuid();
m_Name = gname;
GINFO = "";
MOTD = "No message set.";
m_GuildBankMoney = 0;
m_Id = sObjectMgr.GenerateGuildId();
m_CreatedDate = time(0);
DEBUG_LOG("GUILD: creating guild %s to leader: %s", gname.c_str(), m_LeaderGuid.GetString().c_str());
// gname already assigned to Guild::name, use it to encode string for DB
CharacterDatabase.escape_string(gname);
std::string dbGINFO = GINFO;
std::string dbMOTD = MOTD;
CharacterDatabase.escape_string(dbGINFO);
CharacterDatabase.escape_string(dbMOTD);
CharacterDatabase.BeginTransaction();
// CharacterDatabase.PExecute("DELETE FROM guild WHERE guildid='%u'", Id); - MAX(guildid)+1 not exist
CharacterDatabase.PExecute("DELETE FROM guild_member WHERE guildid='%u'", m_Id);
CharacterDatabase.PExecute("INSERT INTO guild (guildid,name,leaderguid,info,motd,createdate,EmblemStyle,EmblemColor,BorderStyle,BorderColor,BackgroundColor,BankMoney) "
"VALUES('%u','%s','%u', '%s', '%s','" UI64FMTD "','%u','%u','%u','%u','%u','" UI64FMTD "')",
m_Id, gname.c_str(), m_LeaderGuid.GetCounter(), dbGINFO.c_str(), dbMOTD.c_str(), uint64(m_CreatedDate), m_EmblemStyle, m_EmblemColor, m_BorderStyle, m_BorderColor, m_BackgroundColor, m_GuildBankMoney);
CharacterDatabase.CommitTransaction();
CreateDefaultGuildRanks(lSession->GetSessionDbLocaleIndex());
return AddMember(m_LeaderGuid, (uint32)GR_GUILDMASTER);
}
void Guild::CreateDefaultGuildRanks(int locale_idx)
{
CharacterDatabase.PExecute("DELETE FROM guild_rank WHERE guildid='%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE guildid = '%u'", m_Id);
CreateRank(sObjectMgr.GetMangosString(LANG_GUILD_MASTER, locale_idx), GR_RIGHT_ALL);
CreateRank(sObjectMgr.GetMangosString(LANG_GUILD_OFFICER, locale_idx), GR_RIGHT_ALL);
CreateRank(sObjectMgr.GetMangosString(LANG_GUILD_VETERAN, locale_idx), GR_RIGHT_GCHATLISTEN | GR_RIGHT_GCHATSPEAK);
CreateRank(sObjectMgr.GetMangosString(LANG_GUILD_MEMBER, locale_idx), GR_RIGHT_GCHATLISTEN | GR_RIGHT_GCHATSPEAK);
CreateRank(sObjectMgr.GetMangosString(LANG_GUILD_INITIATE, locale_idx), GR_RIGHT_GCHATLISTEN | GR_RIGHT_GCHATSPEAK);
SetBankMoneyPerDay((uint32)GR_GUILDMASTER, WITHDRAW_MONEY_UNLIMITED);
}
bool Guild::AddMember(ObjectGuid plGuid, uint32 plRank)
{
Player* pl = sObjectMgr.GetPlayer(plGuid);
if (pl)
{
if (pl->GetGuildId() != 0)
return false;
}
else
{
if (Player::GetGuildIdFromDB(plGuid) != 0) // player already in guild
return false;
}
// remove all player signs from another petitions
// this will be prevent attempt joining player to many guilds and corrupt guild data integrity
Player::RemovePetitionsAndSigns(plGuid, 9);
uint32 lowguid = plGuid.GetCounter();
// fill player data
MemberSlot newmember;
newmember.guid = plGuid;
if (pl)
{
newmember.accountId = pl->GetSession()->GetAccountId();
newmember.Name = pl->GetName();
newmember.Level = pl->getLevel();
newmember.Class = pl->getClass();
newmember.ZoneId = pl->GetZoneId();
}
else
{
// 0 1 2 3 4
QueryResult* result = CharacterDatabase.PQuery("SELECT name,level,class,zone,account FROM characters WHERE guid = '%u'", lowguid);
if (!result)
return false; // player doesn't exist
Field* fields = result->Fetch();
newmember.Name = fields[0].GetCppString();
newmember.Level = fields[1].GetUInt8();
newmember.Class = fields[2].GetUInt8();
newmember.ZoneId = fields[3].GetUInt32();
newmember.accountId = fields[4].GetInt32();
delete result;
if (newmember.Level < 1 || newmember.Level > STRONG_MAX_LEVEL ||
!((1 << (newmember.Class - 1)) & CLASSMASK_ALL_PLAYABLE))
{
sLog.outError("%s has a broken data in field `characters` table, cannot add him to guild.", plGuid.GetString().c_str());
return false;
}
}
newmember.RankId = plRank;
newmember.OFFnote = (std::string)"";
newmember.Pnote = (std::string)"";
newmember.LogoutTime = time(NULL);
newmember.BankResetTimeMoney = 0; // this will force update at first query
for (int i = 0; i < GUILD_BANK_MAX_TABS; ++i)
newmember.BankResetTimeTab[i] = 0;
members[lowguid] = newmember;
std::string dbPnote = newmember.Pnote;
std::string dbOFFnote = newmember.OFFnote;
CharacterDatabase.escape_string(dbPnote);
CharacterDatabase.escape_string(dbOFFnote);
CharacterDatabase.PExecute("INSERT INTO guild_member (guildid,guid,rank,pnote,offnote) VALUES ('%u', '%u', '%u','%s','%s')",
m_Id, lowguid, newmember.RankId, dbPnote.c_str(), dbOFFnote.c_str());
// If player not in game data in data field will be loaded from guild tables, no need to update it!!
if (pl)
{
pl->SetInGuild(m_Id);
pl->SetRank(newmember.RankId);
pl->SetGuildIdInvited(0);
}
UpdateAccountsNumber();
return true;
}
void Guild::SetMOTD(std::string motd)
{
MOTD = motd;
// motd now can be used for encoding to DB
CharacterDatabase.escape_string(motd);
CharacterDatabase.PExecute("UPDATE guild SET motd='%s' WHERE guildid='%u'", motd.c_str(), m_Id);
}
void Guild::SetGINFO(std::string ginfo)
{
GINFO = ginfo;
// ginfo now can be used for encoding to DB
CharacterDatabase.escape_string(ginfo);
CharacterDatabase.PExecute("UPDATE guild SET info='%s' WHERE guildid='%u'", ginfo.c_str(), m_Id);
}
bool Guild::LoadGuildFromDB(QueryResult* guildDataResult)
{
if (!guildDataResult)
return false;
Field* fields = guildDataResult->Fetch();
m_Id = fields[0].GetUInt32();
m_Name = fields[1].GetCppString();
m_LeaderGuid = ObjectGuid(HIGHGUID_PLAYER, fields[2].GetUInt32());
m_EmblemStyle = fields[3].GetUInt32();
m_EmblemColor = fields[4].GetUInt32();
m_BorderStyle = fields[5].GetUInt32();
m_BorderColor = fields[6].GetUInt32();
m_BackgroundColor = fields[7].GetUInt32();
GINFO = fields[8].GetCppString();
MOTD = fields[9].GetCppString();
m_CreatedDate = time_t(fields[10].GetUInt64());
m_GuildBankMoney = fields[11].GetUInt64();
uint32 purchasedTabs = fields[12].GetUInt32();
if (purchasedTabs > GUILD_BANK_MAX_TABS)
purchasedTabs = GUILD_BANK_MAX_TABS;
m_TabListMap.resize(purchasedTabs);
for (uint8 i = 0; i < purchasedTabs; ++i)
m_TabListMap[i] = new GuildBankTab;
return true;
}
bool Guild::CheckGuildStructure()
{
// Repair the structure of guild
// If the guildmaster doesn't exist or isn't the member of guild
// attempt to promote another member
int32 GM_rights = GetRank(m_LeaderGuid);
if (GM_rights == -1)
{
if (DelMember(m_LeaderGuid))
return false; // guild will disbanded and deleted in caller
}
else if (GM_rights != GR_GUILDMASTER)
SetLeader(m_LeaderGuid);
// Allow only 1 guildmaster, set other to officer
for (MemberList::iterator itr = members.begin(); itr != members.end(); ++itr)
if (itr->second.RankId == GR_GUILDMASTER && m_LeaderGuid != itr->second.guid)
itr->second.ChangeRank(GR_OFFICER);
return true;
}
bool Guild::LoadRanksFromDB(QueryResult* guildRanksResult)
{
if (!guildRanksResult)
{
sLog.outError("Guild %u has broken `guild_rank` data, creating new...", m_Id);
CreateDefaultGuildRanks(0);
return true;
}
Field* fields;
bool broken_ranks = false;
// GUILD RANKS are sequence starting from 0 = GUILD_MASTER (ALL PRIVILEGES) to max 9 (lowest privileges)
// the lower rank id is considered higher rank - so promotion does rank-- and demotion does rank++
// between ranks in sequence cannot be gaps - so 0,1,2,4 cannot be
// min ranks count is 5 and max is 10.
do
{
fields = guildRanksResult->Fetch();
// condition that would be true when all ranks in QueryResult will be processed and guild without ranks is being processed
if (!fields)
break;
uint32 guildId = fields[0].GetUInt32();
if (guildId < m_Id)
{
// there is in table guild_rank record which doesn't have guildid in guild table, report error
sLog.outErrorDb("Guild %u does not exist but it has a record in guild_rank table, deleting it!", guildId);
CharacterDatabase.PExecute("DELETE FROM guild_rank WHERE guildid = '%u'", guildId);
continue;
}
if (guildId > m_Id) // we loaded all ranks for this guild already, break cycle
break;
uint32 rankID = fields[1].GetUInt32();
std::string rankName = fields[2].GetCppString();
uint32 rankRights = fields[3].GetUInt32();
uint32 rankMoney = fields[4].GetUInt32();
if (rankID != m_Ranks.size()) // guild_rank.ids are sequence 0,1,2,3..
broken_ranks = true;
// first rank is guildmaster, prevent loss leader rights
if (m_Ranks.empty())
rankRights |= GR_RIGHT_ALL;
AddRank(rankName, rankRights, rankMoney);
}
while (guildRanksResult->NextRow());
if (m_Ranks.size() < GUILD_RANKS_MIN_COUNT) // if too few ranks, renew them
{
m_Ranks.clear();
sLog.outError("Guild %u has broken `guild_rank` data, creating new...", m_Id);
CreateDefaultGuildRanks(0); // 0 is default locale_idx
broken_ranks = false;
}
// guild_rank have wrong numbered ranks, repair
if (broken_ranks)
{
sLog.outError("Guild %u has broken `guild_rank` data, repairing...", m_Id);
CharacterDatabase.BeginTransaction();
CharacterDatabase.PExecute("DELETE FROM guild_rank WHERE guildid='%u'", m_Id);
for (size_t i = 0; i < m_Ranks.size(); ++i)
{
std::string name = m_Ranks[i].Name;
uint32 rights = m_Ranks[i].Rights;
CharacterDatabase.escape_string(name);
CharacterDatabase.PExecute("INSERT INTO guild_rank (guildid,rid,rname,rights) VALUES ('%u', '%u', '%s', '%u')", m_Id, uint32(i), name.c_str(), rights);
}
CharacterDatabase.CommitTransaction();
}
return true;
}
bool Guild::LoadMembersFromDB(QueryResult* guildMembersResult)
{
if (!guildMembersResult)
return false;
do
{
Field* fields = guildMembersResult->Fetch();
// this condition will be true when all rows in QueryResult are processed and new guild without members is going to be loaded - prevent crash
if (!fields)
break;
uint32 guildId = fields[0].GetUInt32();
if (guildId < m_Id)
{
// there is in table guild_member record which doesn't have guildid in guild table, report error
sLog.outErrorDb("Guild %u does not exist but it has a record in guild_member table, deleting it!", guildId);
CharacterDatabase.PExecute("DELETE FROM guild_member WHERE guildid = '%u'", guildId);
continue;
}
if (guildId > m_Id)
// we loaded all members for this guild already, break cycle
break;
MemberSlot newmember;
uint32 lowguid = fields[1].GetUInt32();
newmember.guid = ObjectGuid(HIGHGUID_PLAYER, lowguid);
newmember.RankId = fields[2].GetUInt32();
// don't allow member to have not existing rank!
if (newmember.RankId >= m_Ranks.size())
newmember.RankId = GetLowestRank();
newmember.Pnote = fields[3].GetCppString();
newmember.OFFnote = fields[4].GetCppString();
newmember.BankResetTimeMoney = fields[5].GetUInt32();
newmember.BankRemMoney = fields[6].GetUInt32();
for (int i = 0; i < GUILD_BANK_MAX_TABS; ++i)
{
newmember.BankResetTimeTab[i] = fields[7 + (2 * i)].GetUInt32();
newmember.BankRemSlotsTab[i] = fields[8 + (2 * i)].GetUInt32();
}
newmember.Name = fields[19].GetCppString();
newmember.Level = fields[20].GetUInt8();
newmember.Class = fields[21].GetUInt8();
newmember.ZoneId = fields[22].GetUInt32();
newmember.LogoutTime = fields[23].GetUInt64();
newmember.accountId = fields[24].GetInt32();
// this code will remove not existing character guids from guild
if (newmember.Level < 1 || newmember.Level > STRONG_MAX_LEVEL) // can be at broken `data` field
{
sLog.outError("%s has a broken data in field `characters`.`data`, deleting him from guild!", newmember.guid.GetString().c_str());
CharacterDatabase.PExecute("DELETE FROM guild_member WHERE guid = '%u'", lowguid);
continue;
}
if (!newmember.ZoneId)
{
sLog.outError("%s has broken zone-data", newmember.guid.GetString().c_str());
// here it will also try the same, to get the zone from characters-table, but additional it tries to find
// the zone through xy coords .. this is a bit redundant, but shouldn't be called often
newmember.ZoneId = Player::GetZoneIdFromDB(newmember.guid);
}
if (!((1 << (newmember.Class - 1)) & CLASSMASK_ALL_PLAYABLE)) // can be at broken `class` field
{
sLog.outError("%s has a broken data in field `characters`.`class`, deleting him from guild!", newmember.guid.GetString().c_str());
CharacterDatabase.PExecute("DELETE FROM guild_member WHERE guid = '%u'", lowguid);
continue;
}
members[lowguid] = newmember;
}
while (guildMembersResult->NextRow());
if (members.empty())
return false;
UpdateAccountsNumber();
return true;
}
void Guild::SetLeader(ObjectGuid guid)
{
MemberSlot* slot = GetMemberSlot(guid);
if (!slot)
return;
m_LeaderGuid = guid;
slot->ChangeRank(GR_GUILDMASTER);
CharacterDatabase.PExecute("UPDATE guild SET leaderguid='%u' WHERE guildid='%u'", guid.GetCounter(), m_Id);
}
/**
* Remove character from guild
*
* @param guid Character that removed from guild
* @param isDisbanding Flag set if function called from Guild::Disband, so not need update DB in per-member mode only or leader update
*
* @return true, if guild need to be disband and erase (no members or can't setup leader)
*/
bool Guild::DelMember(ObjectGuid guid, bool isDisbanding)
{
uint32 lowguid = guid.GetCounter();
// guild master can be deleted when loading guild and guid doesn't exist in characters table
// or when he is removed from guild by gm command
if (m_LeaderGuid == guid && !isDisbanding)
{
MemberSlot* oldLeader = NULL;
MemberSlot* best = NULL;
ObjectGuid newLeaderGUID;
for (Guild::MemberList::iterator i = members.begin(); i != members.end(); ++i)
{
if (i->first == lowguid)
{
oldLeader = &(i->second);
continue;
}
if (!best || best->RankId > i->second.RankId)
{
best = &(i->second);
newLeaderGUID = ObjectGuid(HIGHGUID_PLAYER, i->first);
}
}
if (!best)
return true;
SetLeader(newLeaderGUID);
// If player not online data in data field will be loaded from guild tabs no need to update it !!
if (Player* newLeader = sObjectMgr.GetPlayer(newLeaderGUID))
newLeader->SetRank(GR_GUILDMASTER);
// when leader non-exist (at guild load with deleted leader only) not send broadcasts
if (oldLeader)
{
BroadcastEvent(GE_LEADER_CHANGED, oldLeader->Name.c_str(), best->Name.c_str());
BroadcastEvent(GE_LEFT, guid, oldLeader->Name.c_str());
}
}
members.erase(lowguid);
Player* player = sObjectMgr.GetPlayer(guid);
// If player not online data in data field will be loaded from guild tabs no need to update it !!
if (player)
{
player->SetInGuild(0);
player->SetRank(0);
}
CharacterDatabase.PExecute("DELETE FROM guild_member WHERE guid = '%u'", lowguid);
if (!isDisbanding)
UpdateAccountsNumber();
return members.empty();
}
void Guild::BroadcastToGuild(WorldSession* session, const std::string& msg, uint32 language)
{
if (session && session->GetPlayer() && HasRankRight(session->GetPlayer()->GetRank(), GR_RIGHT_GCHATSPEAK))
{
WorldPacket data;
ChatHandler::FillMessageData(&data, session, CHAT_MSG_GUILD, language, msg.c_str());
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
Player* pl = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (pl && pl->GetSession() && HasRankRight(pl->GetRank(), GR_RIGHT_GCHATLISTEN) && !pl->GetSocial()->HasIgnore(session->GetPlayer()->GetObjectGuid()))
pl->GetSession()->SendPacket(&data);
}
}
}
void Guild::BroadcastToOfficers(WorldSession* session, const std::string& msg, uint32 language)
{
if (session && session->GetPlayer() && HasRankRight(session->GetPlayer()->GetRank(), GR_RIGHT_OFFCHATSPEAK))
{
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
WorldPacket data;
ChatHandler::FillMessageData(&data, session, CHAT_MSG_OFFICER, language, msg.c_str());
Player* pl = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (pl && pl->GetSession() && HasRankRight(pl->GetRank(), GR_RIGHT_OFFCHATLISTEN) && !pl->GetSocial()->HasIgnore(session->GetPlayer()->GetObjectGuid()))
pl->GetSession()->SendPacket(&data);
}
}
}
void Guild::BroadcastPacket(WorldPacket* packet)
{
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
Player* player = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (player)
player->GetSession()->SendPacket(packet);
}
}
void Guild::BroadcastPacketToRank(WorldPacket* packet, uint32 rankId)
{
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
if (itr->second.RankId == rankId)
{
Player* player = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (player)
player->GetSession()->SendPacket(packet);
}
}
}
// add new event to all already connected guild memebers
void Guild::MassInviteToEvent(WorldSession* session, uint32 minLevel, uint32 maxLevel, uint32 minRank)
{
uint32 count = 0;
WorldPacket data(SMSG_CALENDAR_FILTER_GUILD);
data << uint32(count); // count placeholder
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
// not sure if needed, maybe client checks it as well
if (count >= CALENDAR_MAX_INVITES)
{
sCalendarMgr.SendCalendarCommandResult(session->GetPlayer(), CALENDAR_ERROR_INVITES_EXCEEDED);
return;
}
MemberSlot const* member = &itr->second;
uint32 level = Player::GetLevelFromDB(member->guid);
if (member->guid != session->GetPlayer()->GetObjectGuid() && level >= minLevel && level <= maxLevel && member->RankId <= minRank)
{
data << member->guid.WriteAsPacked();
data << uint8(level);
++count;
}
}
data.put<uint32>(0, count);
session->SendPacket(&data);
}
void Guild::CreateRank(std::string name_, uint32 rights)
{
if (m_Ranks.size() >= GUILD_RANKS_MAX_COUNT)
return;
// ranks are sequence 0,1,2,... where 0 means guildmaster
uint32 new_rank_id = m_Ranks.size();
AddRank(name_, rights, 0);
// existing records in db should be deleted before calling this procedure and m_PurchasedTabs must be loaded already
for (uint32 i = 0; i < uint32(GetPurchasedTabs()); ++i)
{
// create bank rights with 0
CharacterDatabase.PExecute("INSERT INTO guild_bank_right (guildid,TabId,rid) VALUES ('%u','%u','%u')", m_Id, i, new_rank_id);
}
// name now can be used for encoding to DB
CharacterDatabase.escape_string(name_);
CharacterDatabase.PExecute("INSERT INTO guild_rank (guildid,rid,rname,rights) VALUES ('%u', '%u', '%s', '%u')", m_Id, new_rank_id, name_.c_str(), rights);
}
void Guild::AddRank(const std::string& name_, uint32 rights, uint32 money)
{
m_Ranks.push_back(RankInfo(name_, rights, money));
}
void Guild::DelRank()
{
// client won't allow to have less than GUILD_RANKS_MIN_COUNT ranks in guild
if (m_Ranks.size() <= GUILD_RANKS_MIN_COUNT)
return;
// delete lowest guild_rank
uint32 rank = GetLowestRank();
CharacterDatabase.PExecute("DELETE FROM guild_rank WHERE rid>='%u' AND guildid='%u'", rank, m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE rid>='%u' AND guildid='%u'", rank, m_Id);
m_Ranks.pop_back();
}
std::string Guild::GetRankName(uint32 rankId)
{
if (rankId >= m_Ranks.size())
return "<unknown>";
return m_Ranks[rankId].Name;
}
uint32 Guild::GetRankRights(uint32 rankId)
{
if (rankId >= m_Ranks.size())
return 0;
return m_Ranks[rankId].Rights;
}
void Guild::SetRankName(uint32 rankId, std::string name_)
{
if (rankId >= m_Ranks.size())
return;
m_Ranks[rankId].Name = name_;
// name now can be used for encoding to DB
CharacterDatabase.escape_string(name_);
CharacterDatabase.PExecute("UPDATE guild_rank SET rname='%s' WHERE rid='%u' AND guildid='%u'", name_.c_str(), rankId, m_Id);
}
void Guild::SetRankRights(uint32 rankId, uint32 rights)
{
if (rankId >= m_Ranks.size())
return;
m_Ranks[rankId].Rights = rights;
CharacterDatabase.PExecute("UPDATE guild_rank SET rights='%u' WHERE rid='%u' AND guildid='%u'", rights, rankId, m_Id);
}
/**
* Disband guild including cleanup structures and DB
*
* Note: guild object need deleted after this in caller code.
*/
void Guild::Disband()
{
BroadcastEvent(GE_DISBANDED);
while (!members.empty())
{
MemberList::const_iterator itr = members.begin();
DelMember(ObjectGuid(HIGHGUID_PLAYER, itr->first), true);
}
CharacterDatabase.BeginTransaction();
CharacterDatabase.PExecute("DELETE FROM guild WHERE guildid = '%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_rank WHERE guildid = '%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_bank_tab WHERE guildid = '%u'", m_Id);
// Free bank tab used memory and delete items stored in them
DeleteGuildBankItems(true);
CharacterDatabase.PExecute("DELETE FROM guild_bank_item WHERE guildid = '%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE guildid = '%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_bank_eventlog WHERE guildid = '%u'", m_Id);
CharacterDatabase.PExecute("DELETE FROM guild_eventlog WHERE guildid = '%u'", m_Id);
CharacterDatabase.CommitTransaction();
sGuildMgr.RemoveGuild(m_Id);
}
void Guild::Roster(WorldSession* session /*= NULL*/)
{
// we can only guess size
WorldPacket data(SMSG_GUILD_ROSTER, (4 + MOTD.length() + 1 + GINFO.length() + 1 + 4 + m_Ranks.size() * (4 + 4 + GUILD_BANK_MAX_TABS * (4 + 4)) + members.size() * 50));
data << uint32(members.size());
data << MOTD;
data << GINFO;
data << uint32(m_Ranks.size());
for (RankList::const_iterator ritr = m_Ranks.begin(); ritr != m_Ranks.end(); ++ritr)
{
data << uint32(ritr->Rights);
data << uint32(ritr->BankMoneyPerDay); // count of: withdraw gold(gold/day) Note: in game set gold, in packet set bronze.
for (int i = 0; i < GUILD_BANK_MAX_TABS; ++i)
{
data << uint32(ritr->TabRight[i]); // for TAB_i rights: view tabs = 0x01, deposit items =0x02
data << uint32(ritr->TabSlotPerDay[i]); // for TAB_i count of: withdraw items(stack/day)
}
}
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
if (Player* pl = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first)))
{
data << pl->GetObjectGuid();
data << uint8(1);
data << pl->GetName();
data << uint32(itr->second.RankId);
data << uint8(pl->getLevel());
data << uint8(pl->getClass());
data << uint8(0); // new 2.4.0
data << uint32(pl->GetZoneId());
data << itr->second.Pnote;
data << itr->second.OFFnote;
}
else
{
data << ObjectGuid(HIGHGUID_PLAYER, itr->first);
data << uint8(0);
data << itr->second.Name;
data << uint32(itr->second.RankId);
data << uint8(itr->second.Level);
data << uint8(itr->second.Class);
data << uint8(0); // new 2.4.0
data << uint32(itr->second.ZoneId);
data << float(float(time(NULL) - itr->second.LogoutTime) / DAY);
data << itr->second.Pnote;
data << itr->second.OFFnote;
}
}
if (session)
session->SendPacket(&data);
else
BroadcastPacket(&data);
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_ROSTER)");
}
void Guild::Query(WorldSession* session)
{
WorldPacket data(SMSG_GUILD_QUERY_RESPONSE, (8 * 32 + 200)); // we can only guess size
data << uint32(m_Id);
data << m_Name;
for (size_t i = 0 ; i < GUILD_RANKS_MAX_COUNT; ++i) // show always 10 ranks
{
if (i < m_Ranks.size())
data << m_Ranks[i].Name;
else
data << uint8(0); // null string
}
data << uint32(m_EmblemStyle);
data << uint32(m_EmblemColor);
data << uint32(m_BorderStyle);
data << uint32(m_BorderColor);
data << uint32(m_BackgroundColor);
data << uint32(0); // probably real ranks count
session->SendPacket(&data);
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_QUERY_RESPONSE)");
}
void Guild::SetEmblem(uint32 emblemStyle, uint32 emblemColor, uint32 borderStyle, uint32 borderColor, uint32 backgroundColor)
{
m_EmblemStyle = emblemStyle;
m_EmblemColor = emblemColor;
m_BorderStyle = borderStyle;
m_BorderColor = borderColor;
m_BackgroundColor = backgroundColor;
CharacterDatabase.PExecute("UPDATE guild SET EmblemStyle=%u, EmblemColor=%u, BorderStyle=%u, BorderColor=%u, BackgroundColor=%u WHERE guildid = %u", m_EmblemStyle, m_EmblemColor, m_BorderStyle, m_BorderColor, m_BackgroundColor, m_Id);
}
/**
* Return the number of accounts that are in the guild after possible update if required
* A player may have many characters in the guild, but with the same account
*/
uint32 Guild::GetAccountsNumber()
{
// not need recalculation
if (m_accountsNumber)
return m_accountsNumber;
// We use a set to be sure each element will be unique
std::set<uint32> accountsIdSet;
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
accountsIdSet.insert(itr->second.accountId);
m_accountsNumber = accountsIdSet.size();
return m_accountsNumber;
}
// *************************************************
// Guild Eventlog part
// *************************************************
// Display guild eventlog
void Guild::DisplayGuildEventLog(WorldSession* session)
{
// Sending result
WorldPacket data(MSG_GUILD_EVENT_LOG_QUERY, 0);
// count, max count == 100
data << uint8(m_GuildEventLog.size());
for (GuildEventLog::const_iterator itr = m_GuildEventLog.begin(); itr != m_GuildEventLog.end(); ++itr)
{
// Event type
data << uint8(itr->EventType);
// Player 1
data << ObjectGuid(HIGHGUID_PLAYER, itr->PlayerGuid1);
// Player 2 not for left/join guild events
if (itr->EventType != GUILD_EVENT_LOG_JOIN_GUILD && itr->EventType != GUILD_EVENT_LOG_LEAVE_GUILD)
data << ObjectGuid(HIGHGUID_PLAYER, itr->PlayerGuid2);
// New Rank - only for promote/demote guild events
if (itr->EventType == GUILD_EVENT_LOG_PROMOTE_PLAYER || itr->EventType == GUILD_EVENT_LOG_DEMOTE_PLAYER)
data << uint8(itr->NewRank);
// Event timestamp
data << uint32(time(NULL) - itr->TimeStamp);
}
session->SendPacket(&data);
DEBUG_LOG("WORLD: Sent (MSG_GUILD_EVENT_LOG_QUERY)");
}
// Load guild eventlog from DB
void Guild::LoadGuildEventLogFromDB()
{
// 0 1 2 3 4 5
QueryResult* result = CharacterDatabase.PQuery("SELECT LogGuid, EventType, PlayerGuid1, PlayerGuid2, NewRank, TimeStamp FROM guild_eventlog WHERE guildid=%u ORDER BY TimeStamp DESC,LogGuid DESC LIMIT %u", m_Id, GUILD_EVENTLOG_MAX_RECORDS);
if (!result)
return;
bool isNextLogGuidSet = false;
// uint32 configCount = sWorld.getConfig(CONFIG_UINT32_GUILD_EVENT_LOG_COUNT);
// First event in list will be the oldest and the latest event is last event in list
do
{
Field* fields = result->Fetch();
if (!isNextLogGuidSet)
{
m_GuildEventLogNextGuid = fields[0].GetUInt32();
isNextLogGuidSet = true;
}
// Fill entry
GuildEventLogEntry NewEvent;
NewEvent.EventType = fields[1].GetUInt8();
NewEvent.PlayerGuid1 = fields[2].GetUInt32();
NewEvent.PlayerGuid2 = fields[3].GetUInt32();
NewEvent.NewRank = fields[4].GetUInt8();
NewEvent.TimeStamp = fields[5].GetUInt64();
// There can be a problem if more events have same TimeStamp the ORDER can be broken when fields[0].GetUInt32() == configCount, but
// events with same timestamp can appear when there is lag, and we naively suppose that mangos isn't laggy
// but if problem appears, player will see set of guild events that have same timestamp in bad order
// Add entry to list
m_GuildEventLog.push_front(NewEvent);
}
while (result->NextRow());
delete result;
}
// Add entry to guild eventlog
void Guild::LogGuildEvent(uint8 EventType, ObjectGuid playerGuid1, ObjectGuid playerGuid2, uint8 newRank)
{
GuildEventLogEntry NewEvent;
// Create event
NewEvent.EventType = EventType;
NewEvent.PlayerGuid1 = playerGuid1.GetCounter();
NewEvent.PlayerGuid2 = playerGuid2.GetCounter();
NewEvent.NewRank = newRank;
NewEvent.TimeStamp = uint32(time(NULL));
// Count new LogGuid
m_GuildEventLogNextGuid = (m_GuildEventLogNextGuid + 1) % sWorld.getConfig(CONFIG_UINT32_GUILD_EVENT_LOG_COUNT);
// Check max records limit
if (m_GuildEventLog.size() >= GUILD_EVENTLOG_MAX_RECORDS)
m_GuildEventLog.pop_front();
// Add event to list
m_GuildEventLog.push_back(NewEvent);
// Save event to DB
CharacterDatabase.PExecute("DELETE FROM guild_eventlog WHERE guildid='%u' AND LogGuid='%u'", m_Id, m_GuildEventLogNextGuid);
CharacterDatabase.PExecute("INSERT INTO guild_eventlog (guildid, LogGuid, EventType, PlayerGuid1, PlayerGuid2, NewRank, TimeStamp) VALUES ('%u','%u','%u','%u','%u','%u','" UI64FMTD "')",
m_Id, m_GuildEventLogNextGuid, uint32(NewEvent.EventType), NewEvent.PlayerGuid1, NewEvent.PlayerGuid2, uint32(NewEvent.NewRank), NewEvent.TimeStamp);
}
// *************************************************
// Guild Bank part
// *************************************************
// Bank content related
void Guild::DisplayGuildBankContent(WorldSession* session, uint8 TabId)
{
GuildBankTab const* tab = m_TabListMap[TabId];
if (!IsMemberHaveRights(session->GetPlayer()->GetGUIDLow(), TabId, GUILD_BANK_RIGHT_VIEW_TAB))
return;
WorldPacket data(SMSG_GUILD_BANK_LIST, 1200);
data << uint64(GetGuildBankMoney());
data << uint8(TabId);
// remaining slots for today
data << uint32(GetMemberSlotWithdrawRem(session->GetPlayer()->GetGUIDLow(), TabId));
data << uint8(0); // Tell client that there's no tab info in this packet
data << uint8(GUILD_BANK_MAX_SLOTS);
for (int i = 0; i < GUILD_BANK_MAX_SLOTS; ++i)
AppendDisplayGuildBankSlot(data, tab, i);
session->SendPacket(&data);
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_BANK_LIST)");
}
void Guild::DisplayGuildBankMoneyUpdate(WorldSession* session)
{
WorldPacket data(SMSG_GUILD_BANK_LIST, 8 + 1 + 4 + 1 + 1);
data << uint64(GetGuildBankMoney());
data << uint8(0); // TabId, default 0
data << uint32(GetMemberSlotWithdrawRem(session->GetPlayer()->GetGUIDLow(), 0));
data << uint8(0); // Tell that there's no tab info in this packet
data << uint8(0); // not send items
BroadcastPacket(&data);
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_BANK_LIST)");
}
void Guild::DisplayGuildBankContentUpdate(uint8 TabId, int32 slot1, int32 slot2)
{
GuildBankTab const* tab = m_TabListMap[TabId];
WorldPacket data(SMSG_GUILD_BANK_LIST, 1200);
data << uint64(GetGuildBankMoney());
data << uint8(TabId);
size_t rempos = data.wpos();
data << uint32(0); // item withdraw amount, will be filled later
data << uint8(0); // Tell client that there's no tab info in this packet
if (slot2 == -1) // single item in slot1
{
data << uint8(1); // item count
AppendDisplayGuildBankSlot(data, tab, slot1);
}
else // 2 items (in slot1 and slot2)
{
data << uint8(2); // item count
if (slot1 > slot2)
std::swap(slot1, slot2);
AppendDisplayGuildBankSlot(data, tab, slot1);
AppendDisplayGuildBankSlot(data, tab, slot2);
}
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
Player* player = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (!player)
continue;
if (!IsMemberHaveRights(itr->first, TabId, GUILD_BANK_RIGHT_VIEW_TAB))
continue;
data.put<uint32>(rempos, uint32(GetMemberSlotWithdrawRem(player->GetGUIDLow(), TabId)));
player->GetSession()->SendPacket(&data);
}
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_BANK_LIST)");
}
void Guild::DisplayGuildBankContentUpdate(uint8 TabId, GuildItemPosCountVec const& slots)
{
GuildBankTab const* tab = m_TabListMap[TabId];
WorldPacket data(SMSG_GUILD_BANK_LIST, 1200);
data << uint64(GetGuildBankMoney());
data << uint8(TabId);
size_t rempos = data.wpos();
data << uint32(0); // item withdraw amount, will be filled later
data << uint8(0); // Tell client that there's no tab info in this packet
data << uint8(slots.size()); // updates count
for (GuildItemPosCountVec::const_iterator itr = slots.begin(); itr != slots.end(); ++itr)
AppendDisplayGuildBankSlot(data, tab, itr->Slot);
for (MemberList::const_iterator itr = members.begin(); itr != members.end(); ++itr)
{
Player* player = ObjectAccessor::FindPlayer(ObjectGuid(HIGHGUID_PLAYER, itr->first));
if (!player)
continue;
if (!IsMemberHaveRights(itr->first, TabId, GUILD_BANK_RIGHT_VIEW_TAB))
continue;
data.put<uint32>(rempos, uint32(GetMemberSlotWithdrawRem(player->GetGUIDLow(), TabId)));
player->GetSession()->SendPacket(&data);
}
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_BANK_LIST)");
}
Item* Guild::GetItem(uint8 TabId, uint8 SlotId)
{
if (TabId >= GetPurchasedTabs() || SlotId >= GUILD_BANK_MAX_SLOTS)
return NULL;
return m_TabListMap[TabId]->Slots[SlotId];
}
// *************************************************
// Tab related
void Guild::DisplayGuildBankTabsInfo(WorldSession* session)
{
WorldPacket data(SMSG_GUILD_BANK_LIST, 500);
data << uint64(GetGuildBankMoney());
data << uint8(0); // TabInfo packet must be for TabId 0
data << uint32(GetMemberSlotWithdrawRem(session->GetPlayer()->GetGUIDLow(), 0));
data << uint8(1); // Tell client that this packet includes tab info
data << uint8(GetPurchasedTabs()); // here is the number of tabs
for (uint8 i = 0; i < GetPurchasedTabs(); ++i)
{
data << m_TabListMap[i]->Name.c_str();
data << m_TabListMap[i]->Icon.c_str();
}
data << uint8(0); // Do not send tab content
session->SendPacket(&data);
DEBUG_LOG("WORLD: Sent (SMSG_GUILD_BANK_LIST)");
}
void Guild::CreateNewBankTab()
{
if (GetPurchasedTabs() >= GUILD_BANK_MAX_TABS)
return;
uint32 tabId = GetPurchasedTabs(); // next free id
m_TabListMap.push_back(new GuildBankTab);
CharacterDatabase.BeginTransaction();
CharacterDatabase.PExecute("DELETE FROM guild_bank_tab WHERE guildid='%u' AND TabId='%u'", m_Id, tabId);
CharacterDatabase.PExecute("INSERT INTO guild_bank_tab (guildid,TabId) VALUES ('%u','%u')", m_Id, tabId);
CharacterDatabase.CommitTransaction();
}
void Guild::SetGuildBankTabInfo(uint8 TabId, std::string Name, std::string Icon)
{
if (m_TabListMap[TabId]->Name == Name && m_TabListMap[TabId]->Icon == Icon)
return;
m_TabListMap[TabId]->Name = Name;
m_TabListMap[TabId]->Icon = Icon;
CharacterDatabase.escape_string(Name);
CharacterDatabase.escape_string(Icon);
CharacterDatabase.PExecute("UPDATE guild_bank_tab SET TabName='%s',TabIcon='%s' WHERE guildid='%u' AND TabId='%u'", Name.c_str(), Icon.c_str(), m_Id, uint32(TabId));
}
uint32 Guild::GetBankRights(uint32 rankId, uint8 TabId) const
{
if (rankId >= m_Ranks.size() || TabId >= GUILD_BANK_MAX_TABS)
return 0;
return m_Ranks[rankId].TabRight[TabId];
}
// *************************************************
// Guild bank loading related
// This load should be called on startup only
void Guild::LoadGuildBankFromDB()
{
// 0 1 2 3
QueryResult* result = CharacterDatabase.PQuery("SELECT TabId, TabName, TabIcon, TabText FROM guild_bank_tab WHERE guildid='%u' ORDER BY TabId", m_Id);
if (!result)
{
m_TabListMap.clear();
return;
}
do
{
Field* fields = result->Fetch();
uint8 tabId = fields[0].GetUInt8();
if (tabId >= GetPurchasedTabs())
{
sLog.outError("Table `guild_bank_tab` have not purchased tab %u for guild %u, skipped", tabId, m_Id);
continue;
}
GuildBankTab* NewTab = new GuildBankTab;
NewTab->Name = fields[1].GetCppString();
NewTab->Icon = fields[2].GetCppString();
NewTab->Text = fields[3].GetCppString();
m_TabListMap[tabId] = NewTab;
}
while (result->NextRow());
delete result;
// data needs to be at first place for Item::LoadFromDB
// 0 1 2 3 4 5
result = CharacterDatabase.PQuery("SELECT data, text, TabId, SlotId, item_guid, item_entry FROM guild_bank_item JOIN item_instance ON item_guid = guid WHERE guildid='%u' ORDER BY TabId", m_Id);
if (!result)
return;
do
{
Field* fields = result->Fetch();
uint8 TabId = fields[2].GetUInt8();
uint8 SlotId = fields[3].GetUInt8();
uint32 ItemGuid = fields[4].GetUInt32();
uint32 ItemEntry = fields[5].GetUInt32();
if (TabId >= GetPurchasedTabs())
{
sLog.outError("Guild::LoadGuildBankFromDB: Invalid tab for item (GUID: %u id: #%u) in guild bank, skipped.", ItemGuid, ItemEntry);
continue;
}
if (SlotId >= GUILD_BANK_MAX_SLOTS)
{
sLog.outError("Guild::LoadGuildBankFromDB: Invalid slot for item (GUID: %u id: #%u) in guild bank, skipped.", ItemGuid, ItemEntry);
continue;
}
ItemPrototype const* proto = ObjectMgr::GetItemPrototype(ItemEntry);
if (!proto)
{
sLog.outError("Guild::LoadGuildBankFromDB: Unknown item (GUID: %u id: #%u) in guild bank, skipped.", ItemGuid, ItemEntry);
continue;
}
Item* pItem = NewItemOrBag(proto);
if (!pItem->LoadFromDB(ItemGuid, fields))
{
CharacterDatabase.PExecute("DELETE FROM guild_bank_item WHERE guildid='%u' AND TabId='%u' AND SlotId='%u'", m_Id, uint32(TabId), uint32(SlotId));
sLog.outError("Item GUID %u not found in item_instance, deleting from Guild Bank!", ItemGuid);
delete pItem;
continue;
}
pItem->AddToWorld();
m_TabListMap[TabId]->Slots[SlotId] = pItem;
}
while (result->NextRow());
delete result;
}
// *************************************************
// Money deposit/withdraw related
void Guild::SendMoneyInfo(WorldSession* session, uint32 LowGuid)
{
WorldPacket data(MSG_GUILD_BANK_MONEY_WITHDRAWN, 4);
data << uint32(GetMemberMoneyWithdrawRem(LowGuid));
session->SendPacket(&data);
DEBUG_LOG("WORLD: Sent MSG_GUILD_BANK_MONEY_WITHDRAWN");
}
bool Guild::MemberMoneyWithdraw(uint32 amount, uint32 LowGuid)
{
uint32 MoneyWithDrawRight = GetMemberMoneyWithdrawRem(LowGuid);
if (MoneyWithDrawRight < amount || GetGuildBankMoney() < amount)
return false;
SetBankMoney(GetGuildBankMoney() - amount);
if (MoneyWithDrawRight < WITHDRAW_MONEY_UNLIMITED)
{
MemberList::iterator itr = members.find(LowGuid);
if (itr == members.end())
return false;
itr->second.BankRemMoney -= amount;
CharacterDatabase.PExecute("UPDATE guild_member SET BankRemMoney='%u' WHERE guildid='%u' AND guid='%u'",
itr->second.BankRemMoney, m_Id, LowGuid);
}
return true;
}
void Guild::SetBankMoney(int64 money)
{
if (money < 0) // I don't know how this happens, it does!!
money = 0;
m_GuildBankMoney = money;
CharacterDatabase.PExecute("UPDATE guild SET BankMoney='" UI64FMTD "' WHERE guildid='%u'", money, m_Id);
}
// *************************************************
// Item per day and money per day related
bool Guild::MemberItemWithdraw(uint8 TabId, uint32 LowGuid)
{
uint32 SlotsWithDrawRight = GetMemberSlotWithdrawRem(LowGuid, TabId);
if (SlotsWithDrawRight == 0)
return false;
if (SlotsWithDrawRight < WITHDRAW_SLOT_UNLIMITED)
{
MemberList::iterator itr = members.find(LowGuid);
if (itr == members.end())
return false;
--itr->second.BankRemSlotsTab[TabId];
CharacterDatabase.PExecute("UPDATE guild_member SET BankRemSlotsTab%u='%u' WHERE guildid='%u' AND guid='%u'",
uint32(TabId), itr->second.BankRemSlotsTab[TabId], m_Id, LowGuid);
}
return true;
}
bool Guild::IsMemberHaveRights(uint32 LowGuid, uint8 TabId, uint32 rights) const
{
MemberList::const_iterator itr = members.find(LowGuid);
if (itr == members.end())
return false;
if (itr->second.RankId == GR_GUILDMASTER)
return true;
return (GetBankRights(itr->second.RankId, TabId) & rights) == rights;
}
uint32 Guild::GetMemberSlotWithdrawRem(uint32 LowGuid, uint8 TabId)
{
MemberList::iterator itr = members.find(LowGuid);
if (itr == members.end())
return 0;
if (itr->second.RankId == GR_GUILDMASTER)
return WITHDRAW_SLOT_UNLIMITED;
if ((GetBankRights(itr->second.RankId, TabId) & GUILD_BANK_RIGHT_VIEW_TAB) != GUILD_BANK_RIGHT_VIEW_TAB)
return 0;
uint32 curTime = uint32(time(NULL) / MINUTE);
if (curTime - itr->second.BankResetTimeTab[TabId] >= 24 * HOUR / MINUTE)
{
itr->second.BankResetTimeTab[TabId] = curTime;
itr->second.BankRemSlotsTab[TabId] = GetBankSlotPerDay(itr->second.RankId, TabId);
CharacterDatabase.PExecute("UPDATE guild_member SET BankResetTimeTab%u='%u', BankRemSlotsTab%u='%u' WHERE guildid='%u' AND guid='%u'",
uint32(TabId), itr->second.BankResetTimeTab[TabId], uint32(TabId), itr->second.BankRemSlotsTab[TabId], m_Id, LowGuid);
}
return itr->second.BankRemSlotsTab[TabId];
}
uint32 Guild::GetMemberMoneyWithdrawRem(uint32 LowGuid)
{
MemberList::iterator itr = members.find(LowGuid);
if (itr == members.end())
return 0;
if (itr->second.RankId == GR_GUILDMASTER)
return WITHDRAW_MONEY_UNLIMITED;
uint32 curTime = uint32(time(NULL) / MINUTE); // minutes
// 24 hours
if (curTime > itr->second.BankResetTimeMoney + 24 * HOUR / MINUTE)
{
itr->second.BankResetTimeMoney = curTime;
itr->second.BankRemMoney = GetBankMoneyPerDay(itr->second.RankId);
CharacterDatabase.PExecute("UPDATE guild_member SET BankResetTimeMoney='%u', BankRemMoney='%u' WHERE guildid='%u' AND guid='%u'",
itr->second.BankResetTimeMoney, itr->second.BankRemMoney, m_Id, LowGuid);
}
return itr->second.BankRemMoney;
}
void Guild::SetBankMoneyPerDay(uint32 rankId, uint32 money)
{
if (rankId >= m_Ranks.size())
return;
if (rankId == GR_GUILDMASTER)
money = WITHDRAW_MONEY_UNLIMITED;
m_Ranks[rankId].BankMoneyPerDay = money;
for (MemberList::iterator itr = members.begin(); itr != members.end(); ++itr)
if (itr->second.RankId == rankId)
itr->second.BankResetTimeMoney = 0;
CharacterDatabase.PExecute("UPDATE guild_rank SET BankMoneyPerDay='%u' WHERE rid='%u' AND guildid='%u'", money, rankId, m_Id);
CharacterDatabase.PExecute("UPDATE guild_member SET BankResetTimeMoney='0' WHERE guildid='%u' AND rank='%u'", m_Id, rankId);
}
void Guild::SetBankRightsAndSlots(uint32 rankId, uint8 TabId, uint32 right, uint32 nbSlots, bool db)
{
if (rankId >= m_Ranks.size() || TabId >= GetPurchasedTabs())
{
// TODO remove next line, It is there just to repair existing bug in deleting guild rank
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE guildid='%u' AND rid='%u' AND TabId='%u'", m_Id, rankId, TabId);
return;
}
if (rankId == GR_GUILDMASTER)
{
nbSlots = WITHDRAW_SLOT_UNLIMITED;
right = GUILD_BANK_RIGHT_FULL;
}
m_Ranks[rankId].TabSlotPerDay[TabId] = nbSlots;
m_Ranks[rankId].TabRight[TabId] = right;
if (db)
{
for (MemberList::iterator itr = members.begin(); itr != members.end(); ++itr)
if (itr->second.RankId == rankId)
for (int i = 0; i < GUILD_BANK_MAX_TABS; ++i)
itr->second.BankResetTimeTab[i] = 0;
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE guildid='%u' AND TabId='%u' AND rid='%u'", m_Id, uint32(TabId), rankId);
CharacterDatabase.PExecute("INSERT INTO guild_bank_right (guildid,TabId,rid,gbright,SlotPerDay) VALUES "
"('%u','%u','%u','%u','%u')", m_Id, uint32(TabId), rankId, m_Ranks[rankId].TabRight[TabId], m_Ranks[rankId].TabSlotPerDay[TabId]);
CharacterDatabase.PExecute("UPDATE guild_member SET BankResetTimeTab%u='0' WHERE guildid='%u' AND rank='%u'", uint32(TabId), m_Id, rankId);
}
}
uint32 Guild::GetBankMoneyPerDay(uint32 rankId)
{
if (rankId >= m_Ranks.size())
return 0;
if (rankId == GR_GUILDMASTER)
return WITHDRAW_MONEY_UNLIMITED;
return m_Ranks[rankId].BankMoneyPerDay;
}
uint32 Guild::GetBankSlotPerDay(uint32 rankId, uint8 TabId)
{
if (rankId >= m_Ranks.size() || TabId >= GUILD_BANK_MAX_TABS)
return 0;
if (rankId == GR_GUILDMASTER)
return WITHDRAW_SLOT_UNLIMITED;
return m_Ranks[rankId].TabSlotPerDay[TabId];
}
// *************************************************
// Rights per day related
bool Guild::LoadBankRightsFromDB(QueryResult* guildBankTabRightsResult)
{
if (!guildBankTabRightsResult)
return true;
do
{
Field* fields = guildBankTabRightsResult->Fetch();
// prevent crash when all rights in result are already processed
if (!fields)
break;
uint32 guildId = fields[0].GetUInt32();
if (guildId < m_Id)
{
// there is in table guild_bank_right record which doesn't have guildid in guild table, report error
sLog.outErrorDb("Guild %u does not exist but it has a record in guild_bank_right table, deleting it!", guildId);
CharacterDatabase.PExecute("DELETE FROM guild_bank_right WHERE guildid = '%u'", guildId);
continue;
}
if (guildId > m_Id)
// we loaded all ranks for this guild bank already, break cycle
break;
uint8 TabId = fields[1].GetUInt8();
uint32 rankId = fields[2].GetUInt32();
uint16 right = fields[3].GetUInt16();
uint16 SlotPerDay = fields[4].GetUInt16();
SetBankRightsAndSlots(rankId, TabId, right, SlotPerDay, false);
}
while (guildBankTabRightsResult->NextRow());
return true;
}
// *************************************************
// Bank log related
void Guild::LoadGuildBankEventLogFromDB()
{
// Money log is in TabId = GUILD_BANK_MONEY_LOGS_TAB
// uint32 configCount = sWorld.getConfig(CONFIG_UINT32_GUILD_BANK_EVENT_LOG_COUNT);
// cycle through all purchased guild bank item tabs
for (uint32 tabId = 0; tabId < uint32(GetPurchasedTabs()); ++tabId)
{
// 0 1 2 3 4 5 6
QueryResult* result = CharacterDatabase.PQuery("SELECT LogGuid, EventType, PlayerGuid, ItemOrMoney, ItemStackCount, DestTabId, TimeStamp FROM guild_bank_eventlog WHERE guildid='%u' AND TabId='%u' ORDER BY TimeStamp DESC,LogGuid DESC LIMIT %u", m_Id, tabId, GUILD_BANK_MAX_LOGS);
if (!result)
continue;
bool isNextLogGuidSet = false;
do
{
Field* fields = result->Fetch();
GuildBankEventLogEntry NewEvent;
NewEvent.EventType = fields[1].GetUInt8();
NewEvent.PlayerGuid = fields[2].GetUInt32();
NewEvent.ItemOrMoney = fields[3].GetUInt32();
NewEvent.ItemStackCount = fields[4].GetUInt8();
NewEvent.DestTabId = fields[5].GetUInt8();
NewEvent.TimeStamp = fields[6].GetUInt64();
// if newEvent is moneyEvent, move it to moneyEventTab in DB and report error
if (NewEvent.isMoneyEvent())
{
uint32 logGuid = fields[0].GetUInt32();
CharacterDatabase.PExecute("UPDATE guild_bank_eventlog SET TabId='%u' WHERE guildid='%u' AND TabId='%u' AND LogGuid='%u'", GUILD_BANK_MONEY_LOGS_TAB, m_Id, tabId, logGuid);
sLog.outError("GuildBankEventLog ERROR: MoneyEvent LogGuid %u for Guild %u had incorrectly set its TabId to %u, correcting it to %u TabId", logGuid, m_Id, tabId, GUILD_BANK_MONEY_LOGS_TAB);
continue;
}
else
// add event to list
// events are ordered from oldest (in beginning) to latest (in the end)
m_GuildBankEventLog_Item[tabId].push_front(NewEvent);
if (!isNextLogGuidSet)
{
m_GuildBankEventLogNextGuid_Item[tabId] = fields[0].GetUInt32();
// we don't have to do m_GuildBankEventLogNextGuid_Item[tabId] %= configCount; - it will be done when creating new record
isNextLogGuidSet = true;
}
}
while (result->NextRow());
delete result;
}
// special handle for guild bank money log
// 0 1 2 3 4 5 6
QueryResult* result = CharacterDatabase.PQuery("SELECT LogGuid, EventType, PlayerGuid, ItemOrMoney, ItemStackCount, DestTabId, TimeStamp FROM guild_bank_eventlog WHERE guildid='%u' AND TabId='%u' ORDER BY TimeStamp DESC,LogGuid DESC LIMIT %u", m_Id, GUILD_BANK_MONEY_LOGS_TAB, GUILD_BANK_MAX_LOGS);
if (!result)
return;
bool isNextMoneyLogGuidSet = false;
do
{
Field* fields = result->Fetch();
if (!isNextMoneyLogGuidSet)
{
m_GuildBankEventLogNextGuid_Money = fields[0].GetUInt32();
// we don't have to do m_GuildBankEventLogNextGuid_Money %= configCount; - it will be done when creating new record
isNextMoneyLogGuidSet = true;
}
GuildBankEventLogEntry NewEvent;
NewEvent.EventType = fields[1].GetUInt8();
NewEvent.PlayerGuid = fields[2].GetUInt32();
NewEvent.ItemOrMoney = fields[3].GetUInt32();
NewEvent.ItemStackCount = fields[4].GetUInt8();
NewEvent.DestTabId = fields[5].GetUInt8();
NewEvent.TimeStamp = fields[6].GetUInt64();
// if newEvent is not moneyEvent, then report error
if (!NewEvent.isMoneyEvent())
sLog.outError("GuildBankEventLog ERROR: MoneyEvent LogGuid %u for Guild %u is not MoneyEvent - ignoring...", fields[0].GetUInt32(), m_Id);
else
// add event to list
// events are ordered from oldest (in beginning) to latest (in the end)
m_GuildBankEventLog_Money.push_front(NewEvent);
}
while (result->NextRow());
delete result;
}
void Guild::DisplayGuildBankLogs(WorldSession* session, uint8 TabId)
{
if (TabId > GUILD_BANK_MAX_TABS)
return;
if (TabId == GUILD_BANK_MAX_TABS)
{
// Here we display money logs
WorldPacket data(MSG_GUILD_BANK_LOG_QUERY, m_GuildBankEventLog_Money.size() * (4 * 4 + 1) + 1 + 1);
data << uint8(TabId); // Here GUILD_BANK_MAX_TABS
data << uint8(m_GuildBankEventLog_Money.size()); // number of log entries
for (GuildBankEventLog::const_iterator itr = m_GuildBankEventLog_Money.begin(); itr != m_GuildBankEventLog_Money.end(); ++itr)
{
data << uint8(itr->EventType);
data << ObjectGuid(HIGHGUID_PLAYER, itr->PlayerGuid);
if (itr->EventType == GUILD_BANK_LOG_DEPOSIT_MONEY ||
itr->EventType == GUILD_BANK_LOG_WITHDRAW_MONEY ||
itr->EventType == GUILD_BANK_LOG_REPAIR_MONEY ||
itr->EventType == GUILD_BANK_LOG_UNK1 ||
itr->EventType == GUILD_BANK_LOG_UNK2)
{
data << uint32(itr->ItemOrMoney);
}
else
{
data << uint32(itr->ItemOrMoney);
data << uint32(itr->ItemStackCount);
if (itr->EventType == GUILD_BANK_LOG_MOVE_ITEM || itr->EventType == GUILD_BANK_LOG_MOVE_ITEM2)
data << uint8(itr->DestTabId); // moved tab
}
data << uint32(time(NULL) - itr->TimeStamp);
}
session->SendPacket(&data);
}
else
{
// here we display current tab logs
WorldPacket data(MSG_GUILD_BANK_LOG_QUERY, m_GuildBankEventLog_Item[TabId].size() * (4 * 4 + 1 + 1) + 1 + 1);
data << uint8(TabId); // Here a real Tab Id
// number of log entries
data << uint8(m_GuildBankEventLog_Item[TabId].size());
for (GuildBankEventLog::const_iterator itr = m_GuildBankEventLog_Item[TabId].begin(); itr != m_GuildBankEventLog_Item[TabId].end(); ++itr)
{
data << uint8(itr->EventType);
data << ObjectGuid(HIGHGUID_PLAYER, itr->PlayerGuid);
if (itr->EventType == GUILD_BANK_LOG_DEPOSIT_MONEY ||
itr->EventType == GUILD_BANK_LOG_WITHDRAW_MONEY ||
itr->EventType == GUILD_BANK_LOG_REPAIR_MONEY ||
itr->EventType == GUILD_BANK_LOG_UNK1 ||
itr->EventType == GUILD_BANK_LOG_UNK2)
{
data << uint32(itr->ItemOrMoney);
}
else
{
data << uint32(itr->ItemOrMoney);
data << uint32(itr->ItemStackCount);
if (itr->EventType == GUILD_BANK_LOG_MOVE_ITEM || itr->EventType == GUILD_BANK_LOG_MOVE_ITEM2)
data << uint8(itr->DestTabId); // moved tab
}
data << uint32(time(NULL) - itr->TimeStamp);
}
session->SendPacket(&data);
}
DEBUG_LOG("WORLD: Sent (MSG_GUILD_BANK_LOG_QUERY)");
}
void Guild::LogBankEvent(uint8 EventType, uint8 TabId, uint32 PlayerGuidLow, uint32 ItemOrMoney, uint8 ItemStackCount, uint8 DestTabId)
{
// create Event
GuildBankEventLogEntry NewEvent;
NewEvent.EventType = EventType;
NewEvent.PlayerGuid = PlayerGuidLow;
NewEvent.ItemOrMoney = ItemOrMoney;
NewEvent.ItemStackCount = ItemStackCount;
NewEvent.DestTabId = DestTabId;
NewEvent.TimeStamp = uint32(time(NULL));
// add new event to the end of event list
uint32 currentTabId = TabId;
uint32 currentLogGuid = 0;
if (NewEvent.isMoneyEvent())
{
m_GuildBankEventLogNextGuid_Money = (m_GuildBankEventLogNextGuid_Money + 1) % sWorld.getConfig(CONFIG_UINT32_GUILD_BANK_EVENT_LOG_COUNT);
currentLogGuid = m_GuildBankEventLogNextGuid_Money;
currentTabId = GUILD_BANK_MONEY_LOGS_TAB;
if (m_GuildBankEventLog_Money.size() >= GUILD_BANK_MAX_LOGS)
m_GuildBankEventLog_Money.pop_front();
m_GuildBankEventLog_Money.push_back(NewEvent);
}
else
{
m_GuildBankEventLogNextGuid_Item[TabId] = ((m_GuildBankEventLogNextGuid_Item[TabId]) + 1) % sWorld.getConfig(CONFIG_UINT32_GUILD_BANK_EVENT_LOG_COUNT);
currentLogGuid = m_GuildBankEventLogNextGuid_Item[TabId];
if (m_GuildBankEventLog_Item[TabId].size() >= GUILD_BANK_MAX_LOGS)
m_GuildBankEventLog_Item[TabId].pop_front();
m_GuildBankEventLog_Item[TabId].push_back(NewEvent);
}
// save event to database
CharacterDatabase.PExecute("DELETE FROM guild_bank_eventlog WHERE guildid='%u' AND LogGuid='%u' AND TabId='%u'", m_Id, currentLogGuid, currentTabId);
CharacterDatabase.PExecute("INSERT INTO guild_bank_eventlog (guildid,LogGuid,TabId,EventType,PlayerGuid,ItemOrMoney,ItemStackCount,DestTabId,TimeStamp) VALUES ('%u','%u','%u','%u','%u','%u','%u','%u','" UI64FMTD "')",
m_Id, currentLogGuid, currentTabId, uint32(NewEvent.EventType), NewEvent.PlayerGuid, NewEvent.ItemOrMoney, uint32(NewEvent.ItemStackCount), uint32(NewEvent.DestTabId), NewEvent.TimeStamp);
}
bool Guild::AddGBankItemToDB(uint32 GuildId, uint32 BankTab , uint32 BankTabSlot , uint32 GUIDLow, uint32 Entry)
{
CharacterDatabase.PExecute("DELETE FROM guild_bank_item WHERE guildid = '%u' AND TabId = '%u'AND SlotId = '%u'", GuildId, BankTab, BankTabSlot);
CharacterDatabase.PExecute("INSERT INTO guild_bank_item (guildid,TabId,SlotId,item_guid,item_entry) "
"VALUES ('%u', '%u', '%u', '%u', '%u')", GuildId, BankTab, BankTabSlot, GUIDLow, Entry);
return true;
}
void Guild::AppendDisplayGuildBankSlot(WorldPacket& data, GuildBankTab const* tab, int slot)
{
Item* pItem = tab->Slots[slot];
uint32 entry = pItem ? pItem->GetEntry() : 0;
data << uint8(slot);
data << uint32(entry);
if (entry)
{
data << uint32(0); // 3.3.0 (0x8000, 0x8020)
data << uint32(pItem->GetItemRandomPropertyId()); // random item property id + 8
if (pItem->GetItemRandomPropertyId())
data << uint32(pItem->GetItemSuffixFactor()); // SuffixFactor + 4
data << uint32(pItem->GetCount()); // +12 ITEM_FIELD_STACK_COUNT
data << uint32(0); // +16 Unknown value
data << uint8(0); // +20
uint8 enchCount = 0;
size_t enchCountPos = data.wpos();
data << uint8(enchCount); // number of enchantments
for (uint32 i = PERM_ENCHANTMENT_SLOT; i < MAX_ENCHANTMENT_SLOT; ++i)
{
if (uint32 enchId = pItem->GetEnchantmentId(EnchantmentSlot(i)))
{
data << uint8(i);
data << uint32(enchId);
++enchCount;
}
}
data.put<uint8>(enchCountPos, enchCount);
}
}
Item* Guild::StoreItem(uint8 tabId, GuildItemPosCountVec const& dest, Item* pItem)
{
if (!pItem)
return NULL;
Item* lastItem = pItem;
for (GuildItemPosCountVec::const_iterator itr = dest.begin(); itr != dest.end();)
{
uint8 slot = itr->Slot;
uint32 count = itr->Count;
++itr;
if (itr == dest.end())
{
lastItem = _StoreItem(tabId, slot, pItem, count, false);
break;
}
lastItem = _StoreItem(tabId, slot, pItem, count, true);
}
return lastItem;
}
// Return stored item (if stored to stack, it can diff. from pItem). And pItem ca be deleted in this case.
Item* Guild::_StoreItem(uint8 tab, uint8 slot, Item* pItem, uint32 count, bool clone)
{
if (!pItem)
return NULL;
DEBUG_LOG("GUILD STORAGE: StoreItem tab = %u, slot = %u, item = %u, count = %u", tab, slot, pItem->GetEntry(), count);
Item* pItem2 = m_TabListMap[tab]->Slots[slot];
if (!pItem2)
{
if (clone)
pItem = pItem->CloneItem(count);
else
pItem->SetCount(count);
if (!pItem)
return NULL;
m_TabListMap[tab]->Slots[slot] = pItem;
pItem->SetGuidValue(ITEM_FIELD_CONTAINED, ObjectGuid());
pItem->SetGuidValue(ITEM_FIELD_OWNER, ObjectGuid());
AddGBankItemToDB(GetId(), tab, slot, pItem->GetGUIDLow(), pItem->GetEntry());
pItem->FSetState(ITEM_NEW);
pItem->SaveToDB(); // not in inventory and can be save standalone
return pItem;
}
else
{
pItem2->SetCount(pItem2->GetCount() + count);
pItem2->FSetState(ITEM_CHANGED);
pItem2->SaveToDB(); // not in inventory and can be save standalone
if (!clone)
{
pItem->RemoveFromWorld();
pItem->DeleteFromDB();
delete pItem;
}
return pItem2;
}
}
void Guild::RemoveItem(uint8 tab, uint8 slot)
{
m_TabListMap[tab]->Slots[slot] = NULL;
CharacterDatabase.PExecute("DELETE FROM guild_bank_item WHERE guildid='%u' AND TabId='%u' AND SlotId='%u'",
GetId(), uint32(tab), uint32(slot));
}
InventoryResult Guild::_CanStoreItem_InSpecificSlot(uint8 tab, uint8 slot, GuildItemPosCountVec& dest, uint32& count, bool swap, Item* pSrcItem) const
{
Item* pItem2 = m_TabListMap[tab]->Slots[slot];
// ignore move item (this slot will be empty at move)
if (pItem2 == pSrcItem)
pItem2 = NULL;
uint32 need_space;
// empty specific slot - check item fit to slot
if (!pItem2 || swap)
{
// non empty stack with space
need_space = pSrcItem->GetMaxStackCount();
}
// non empty slot, check item type
else
{
// check item type
if (pItem2->GetEntry() != pSrcItem->GetEntry())
return EQUIP_ERR_ITEM_CANT_STACK;
// check free space
if (pItem2->GetCount() >= pSrcItem->GetMaxStackCount())
return EQUIP_ERR_ITEM_CANT_STACK;
need_space = pSrcItem->GetMaxStackCount() - pItem2->GetCount();
}
if (need_space > count)
need_space = count;
GuildItemPosCount newPosition = GuildItemPosCount(slot, need_space);
if (!newPosition.isContainedIn(dest))
{
dest.push_back(newPosition);
count -= need_space;
}
return EQUIP_ERR_OK;
}
InventoryResult Guild::_CanStoreItem_InTab(uint8 tab, GuildItemPosCountVec& dest, uint32& count, bool merge, Item* pSrcItem, uint8 skip_slot) const
{
for (uint32 j = 0; j < GUILD_BANK_MAX_SLOTS; ++j)
{
// skip specific slot already processed in first called _CanStoreItem_InSpecificSlot
if (j == skip_slot)
continue;
Item* pItem2 = m_TabListMap[tab]->Slots[j];
// ignore move item (this slot will be empty at move)
if (pItem2 == pSrcItem)
pItem2 = NULL;
// if merge skip empty, if !merge skip non-empty
if ((pItem2 != NULL) != merge)
continue;
if (pItem2)
{
if (pItem2->GetEntry() == pSrcItem->GetEntry() && pItem2->GetCount() < pSrcItem->GetMaxStackCount())
{
uint32 need_space = pSrcItem->GetMaxStackCount() - pItem2->GetCount();
if (need_space > count)
need_space = count;
GuildItemPosCount newPosition = GuildItemPosCount(j, need_space);
if (!newPosition.isContainedIn(dest))
{
dest.push_back(newPosition);
count -= need_space;
if (count == 0)
return EQUIP_ERR_OK;
}
}
}
else
{
uint32 need_space = pSrcItem->GetMaxStackCount();
if (need_space > count)
need_space = count;
GuildItemPosCount newPosition = GuildItemPosCount(j, need_space);
if (!newPosition.isContainedIn(dest))
{
dest.push_back(newPosition);
count -= need_space;
if (count == 0)
return EQUIP_ERR_OK;
}
}
}
return EQUIP_ERR_OK;
}
InventoryResult Guild::CanStoreItem(uint8 tab, uint8 slot, GuildItemPosCountVec& dest, uint32 count, Item* pItem, bool swap) const
{
DEBUG_LOG("GUILD STORAGE: CanStoreItem tab = %u, slot = %u, item = %u, count = %u", tab, slot, pItem->GetEntry(), count);
if (count > pItem->GetCount())
return EQUIP_ERR_COULDNT_SPLIT_ITEMS;
if (pItem->IsSoulBound())
return EQUIP_ERR_CANT_DROP_SOULBOUND;
// in specific slot
if (slot != NULL_SLOT)
{
InventoryResult res = _CanStoreItem_InSpecificSlot(tab, slot, dest, count, swap, pItem);
if (res != EQUIP_ERR_OK)
return res;
if (count == 0)
return EQUIP_ERR_OK;
}
// not specific slot or have space for partly store only in specific slot
// search stack in tab for merge to
if (pItem->GetMaxStackCount() > 1)
{
InventoryResult res = _CanStoreItem_InTab(tab, dest, count, true, pItem, slot);
if (res != EQUIP_ERR_OK)
return res;
if (count == 0)
return EQUIP_ERR_OK;
}
// search free slot in bag for place to
InventoryResult res = _CanStoreItem_InTab(tab, dest, count, false, pItem, slot);
if (res != EQUIP_ERR_OK)
return res;
if (count == 0)
return EQUIP_ERR_OK;
return EQUIP_ERR_BANK_FULL;
}
void Guild::SetGuildBankTabText(uint8 TabId, std::string text)
{
if (TabId >= GetPurchasedTabs())
return;
if (!m_TabListMap[TabId])
return;
if (m_TabListMap[TabId]->Text == text)
return;
utf8truncate(text, 500); // DB and client size limitation
m_TabListMap[TabId]->Text = text;
CharacterDatabase.escape_string(text);
CharacterDatabase.PExecute("UPDATE guild_bank_tab SET TabText='%s' WHERE guildid='%u' AND TabId='%u'", text.c_str(), m_Id, uint32(TabId));
// announce
SendGuildBankTabText(NULL, TabId);
}
void Guild::SendGuildBankTabText(WorldSession* session, uint8 TabId)
{
GuildBankTab const* tab = m_TabListMap[TabId];
WorldPacket data(MSG_QUERY_GUILD_BANK_TEXT, 1 + tab->Text.size() + 1);
data << uint8(TabId);
data << tab->Text;
if (session)
session->SendPacket(&data);
else
BroadcastPacket(&data);
}
void Guild::SwapItems(Player* pl, uint8 BankTab, uint8 BankTabSlot, uint8 BankTabDst, uint8 BankTabSlotDst, uint32 SplitedAmount)
{
// empty operation
if (BankTab == BankTabDst && BankTabSlot == BankTabSlotDst)
return;
Item* pItemSrc = GetItem(BankTab, BankTabSlot);
if (!pItemSrc) // may prevent crash
return;
if (SplitedAmount > pItemSrc->GetCount())
return; // cheating?
else if (SplitedAmount == pItemSrc->GetCount())
SplitedAmount = 0; // no split
Item* pItemDst = GetItem(BankTabDst, BankTabSlotDst);
if (BankTab != BankTabDst)
{
// check dest pos rights (if different tabs)
if (!IsMemberHaveRights(pl->GetGUIDLow(), BankTabDst, GUILD_BANK_RIGHT_DEPOSIT_ITEM))
return;
// check source pos rights (if different tabs)
uint32 remRight = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTab);
if (remRight <= 0)
return;
}
if (SplitedAmount)
{
// Bank -> Bank item split (in empty or non empty slot
GuildItemPosCountVec dest;
InventoryResult msg = CanStoreItem(BankTabDst, BankTabSlotDst, dest, SplitedAmount, pItemSrc, false);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemSrc, NULL);
return;
}
Item* pNewItem = pItemSrc->CloneItem(SplitedAmount);
if (!pNewItem)
{
pl->SendEquipError(EQUIP_ERR_ITEM_NOT_FOUND, pItemSrc, NULL);
return;
}
CharacterDatabase.BeginTransaction();
if (BankTab != BankTabDst)
LogBankEvent(GUILD_BANK_LOG_MOVE_ITEM, BankTab, pl->GetGUIDLow(), pItemSrc->GetEntry(), SplitedAmount, BankTabDst);
pl->ItemRemovedQuestCheck(pItemSrc->GetEntry(), SplitedAmount);
pItemSrc->SetCount(pItemSrc->GetCount() - SplitedAmount);
pItemSrc->FSetState(ITEM_CHANGED);
pItemSrc->SaveToDB(); // not in inventory and can be save standalone
StoreItem(BankTabDst, dest, pNewItem);
CharacterDatabase.CommitTransaction();
}
else // non split
{
GuildItemPosCountVec gDest;
InventoryResult msg = CanStoreItem(BankTabDst, BankTabSlotDst, gDest, pItemSrc->GetCount(), pItemSrc, false);
if (msg == EQUIP_ERR_OK) // merge to
{
CharacterDatabase.BeginTransaction();
if (BankTab != BankTabDst)
LogBankEvent(GUILD_BANK_LOG_MOVE_ITEM, BankTab, pl->GetGUIDLow(), pItemSrc->GetEntry(), pItemSrc->GetCount(), BankTabDst);
RemoveItem(BankTab, BankTabSlot);
StoreItem(BankTabDst, gDest, pItemSrc);
CharacterDatabase.CommitTransaction();
}
else // swap
{
gDest.clear();
msg = CanStoreItem(BankTabDst, BankTabSlotDst, gDest, pItemSrc->GetCount(), pItemSrc, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemSrc, NULL);
return;
}
GuildItemPosCountVec gSrc;
msg = CanStoreItem(BankTab, BankTabSlot, gSrc, pItemDst->GetCount(), pItemDst, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemDst, NULL);
return;
}
if (BankTab != BankTabDst)
{
// check source pos rights (item swapped to src)
if (!IsMemberHaveRights(pl->GetGUIDLow(), BankTab, GUILD_BANK_RIGHT_DEPOSIT_ITEM))
return;
// check dest pos rights (item swapped to src)
uint32 remRightDst = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTabDst);
if (remRightDst <= 0)
return;
}
CharacterDatabase.BeginTransaction();
if (BankTab != BankTabDst)
{
LogBankEvent(GUILD_BANK_LOG_MOVE_ITEM, BankTab, pl->GetGUIDLow(), pItemSrc->GetEntry(), pItemSrc->GetCount(), BankTabDst);
LogBankEvent(GUILD_BANK_LOG_MOVE_ITEM, BankTabDst, pl->GetGUIDLow(), pItemDst->GetEntry(), pItemDst->GetCount(), BankTab);
}
RemoveItem(BankTab, BankTabSlot);
RemoveItem(BankTabDst, BankTabSlotDst);
StoreItem(BankTab, gSrc, pItemDst);
StoreItem(BankTabDst, gDest, pItemSrc);
CharacterDatabase.CommitTransaction();
}
}
DisplayGuildBankContentUpdate(BankTab, BankTabSlot, BankTab == BankTabDst ? BankTabSlotDst : -1);
if (BankTab != BankTabDst)
DisplayGuildBankContentUpdate(BankTabDst, BankTabSlotDst);
}
void Guild::MoveFromBankToChar(Player* pl, uint8 BankTab, uint8 BankTabSlot, uint8 PlayerBag, uint8 PlayerSlot, uint32 SplitedAmount)
{
Item* pItemBank = GetItem(BankTab, BankTabSlot);
Item* pItemChar = pl->GetItemByPos(PlayerBag, PlayerSlot);
if (!pItemBank) // Problem to get bank item
return;
if (SplitedAmount > pItemBank->GetCount())
return; // cheating?
else if (SplitedAmount == pItemBank->GetCount())
SplitedAmount = 0; // no split
if (SplitedAmount)
{
// Bank -> Char split to slot (patly move)
Item* pNewItem = pItemBank->CloneItem(SplitedAmount);
if (!pNewItem)
{
pl->SendEquipError(EQUIP_ERR_ITEM_NOT_FOUND, pItemBank, NULL);
return;
}
ItemPosCountVec dest;
InventoryResult msg = pl->CanStoreItem(PlayerBag, PlayerSlot, dest, pNewItem, false);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pNewItem, NULL);
delete pNewItem;
return;
}
// check source pos rights (item moved to inventory)
uint32 remRight = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTab);
if (remRight <= 0)
{
delete pNewItem;
return;
}
CharacterDatabase.BeginTransaction();
LogBankEvent(GUILD_BANK_LOG_WITHDRAW_ITEM, BankTab, pl->GetGUIDLow(), pItemBank->GetEntry(), SplitedAmount);
pItemBank->SetCount(pItemBank->GetCount() - SplitedAmount);
pItemBank->FSetState(ITEM_CHANGED);
pItemBank->SaveToDB(); // not in inventory and can be save standalone
pl->MoveItemToInventory(dest, pNewItem, true);
pl->SaveInventoryAndGoldToDB();
MemberItemWithdraw(BankTab, pl->GetGUIDLow());
CharacterDatabase.CommitTransaction();
}
else // Bank -> Char swap with slot (move)
{
ItemPosCountVec dest;
InventoryResult msg = pl->CanStoreItem(PlayerBag, PlayerSlot, dest, pItemBank, false);
if (msg == EQUIP_ERR_OK) // merge case
{
// check source pos rights (item moved to inventory)
uint32 remRight = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTab);
if (remRight <= 0)
return;
CharacterDatabase.BeginTransaction();
LogBankEvent(GUILD_BANK_LOG_WITHDRAW_ITEM, BankTab, pl->GetGUIDLow(), pItemBank->GetEntry(), pItemBank->GetCount());
RemoveItem(BankTab, BankTabSlot);
pl->MoveItemToInventory(dest, pItemBank, true);
pl->SaveInventoryAndGoldToDB();
MemberItemWithdraw(BankTab, pl->GetGUIDLow());
CharacterDatabase.CommitTransaction();
}
else // Bank <-> Char swap items
{
// check source pos rights (item swapped to bank)
if (!IsMemberHaveRights(pl->GetGUIDLow(), BankTab, GUILD_BANK_RIGHT_DEPOSIT_ITEM))
return;
if (pItemChar)
{
if (!pItemChar->CanBeTraded())
{
pl->SendEquipError(EQUIP_ERR_ITEMS_CANT_BE_SWAPPED, pItemChar, NULL);
return;
}
}
ItemPosCountVec iDest;
msg = pl->CanStoreItem(PlayerBag, PlayerSlot, iDest, pItemBank, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemBank, NULL);
return;
}
GuildItemPosCountVec gDest;
if (pItemChar)
{
msg = CanStoreItem(BankTab, BankTabSlot, gDest, pItemChar->GetCount(), pItemChar, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemChar, NULL);
return;
}
}
// check source pos rights (item moved to inventory)
uint32 remRight = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTab);
if (remRight <= 0)
return;
if (pItemChar)
{
// logging item move to bank
if (pl->GetSession()->GetSecurity() > SEC_PLAYER && sWorld.getConfig(CONFIG_BOOL_GM_LOG_TRADE))
{
sLog.outCommand(pl->GetSession()->GetAccountId(), "GM %s (Account: %u) deposit item: %s (Entry: %d Count: %u) to guild bank (Guild ID: %u )",
pl->GetName(), pl->GetSession()->GetAccountId(),
pItemChar->GetProto()->Name1, pItemChar->GetEntry(), pItemChar->GetCount(),
m_Id);
}
}
CharacterDatabase.BeginTransaction();
LogBankEvent(GUILD_BANK_LOG_WITHDRAW_ITEM, BankTab, pl->GetGUIDLow(), pItemBank->GetEntry(), pItemBank->GetCount());
if (pItemChar)
LogBankEvent(GUILD_BANK_LOG_DEPOSIT_ITEM, BankTab, pl->GetGUIDLow(), pItemChar->GetEntry(), pItemChar->GetCount());
RemoveItem(BankTab, BankTabSlot);
if (pItemChar)
{
pl->MoveItemFromInventory(PlayerBag, PlayerSlot, true);
pItemChar->DeleteFromInventoryDB();
}
if (pItemChar)
StoreItem(BankTab, gDest, pItemChar);
pl->MoveItemToInventory(iDest, pItemBank, true);
pl->SaveInventoryAndGoldToDB();
MemberItemWithdraw(BankTab, pl->GetGUIDLow());
CharacterDatabase.CommitTransaction();
}
}
DisplayGuildBankContentUpdate(BankTab, BankTabSlot);
}
void Guild::MoveFromCharToBank(Player* pl, uint8 PlayerBag, uint8 PlayerSlot, uint8 BankTab, uint8 BankTabSlot, uint32 SplitedAmount)
{
Item* pItemBank = GetItem(BankTab, BankTabSlot);
Item* pItemChar = pl->GetItemByPos(PlayerBag, PlayerSlot);
if (!pItemChar) // Problem to get item from player
return;
if (!pItemChar->CanBeTraded())
{
pl->SendEquipError(EQUIP_ERR_ITEMS_CANT_BE_SWAPPED, pItemChar, NULL);
return;
}
// check source pos rights (item moved to bank)
if (!IsMemberHaveRights(pl->GetGUIDLow(), BankTab, GUILD_BANK_RIGHT_DEPOSIT_ITEM))
return;
if (SplitedAmount > pItemChar->GetCount())
return; // cheating?
else if (SplitedAmount == pItemChar->GetCount())
SplitedAmount = 0; // no split
if (SplitedAmount)
{
// Char -> Bank split to empty or non-empty slot (partly move)
GuildItemPosCountVec dest;
InventoryResult msg = CanStoreItem(BankTab, BankTabSlot, dest, SplitedAmount, pItemChar, false);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemChar, NULL);
return;
}
Item* pNewItem = pItemChar->CloneItem(SplitedAmount);
if (!pNewItem)
{
pl->SendEquipError(EQUIP_ERR_ITEM_NOT_FOUND, pItemChar, NULL);
return;
}
// logging item move to bank (before items merge
if (pl->GetSession()->GetSecurity() > SEC_PLAYER && sWorld.getConfig(CONFIG_BOOL_GM_LOG_TRADE))
{
sLog.outCommand(pl->GetSession()->GetAccountId(), "GM %s (Account: %u) deposit item: %s (Entry: %d Count: %u) to guild bank (Guild ID: %u )",
pl->GetName(), pl->GetSession()->GetAccountId(),
pItemChar->GetProto()->Name1, pItemChar->GetEntry(), SplitedAmount, m_Id);
}
CharacterDatabase.BeginTransaction();
LogBankEvent(GUILD_BANK_LOG_DEPOSIT_ITEM, BankTab, pl->GetGUIDLow(), pItemChar->GetEntry(), SplitedAmount);
pl->ItemRemovedQuestCheck(pItemChar->GetEntry(), SplitedAmount);
pItemChar->SetCount(pItemChar->GetCount() - SplitedAmount);
pItemChar->SetState(ITEM_CHANGED);
pl->SaveInventoryAndGoldToDB();
StoreItem(BankTab, dest, pNewItem);
CharacterDatabase.CommitTransaction();
DisplayGuildBankContentUpdate(BankTab, dest);
}
else // Char -> Bank swap with empty or non-empty (move)
{
GuildItemPosCountVec dest;
InventoryResult msg = CanStoreItem(BankTab, BankTabSlot, dest, pItemChar->GetCount(), pItemChar, false);
if (msg == EQUIP_ERR_OK) // merge
{
// logging item move to bank
if (pl->GetSession()->GetSecurity() > SEC_PLAYER && sWorld.getConfig(CONFIG_BOOL_GM_LOG_TRADE))
{
sLog.outCommand(pl->GetSession()->GetAccountId(), "GM %s (Account: %u) deposit item: %s (Entry: %d Count: %u) to guild bank (Guild ID: %u )",
pl->GetName(), pl->GetSession()->GetAccountId(),
pItemChar->GetProto()->Name1, pItemChar->GetEntry(), pItemChar->GetCount(),
m_Id);
}
CharacterDatabase.BeginTransaction();
LogBankEvent(GUILD_BANK_LOG_DEPOSIT_ITEM, BankTab, pl->GetGUIDLow(), pItemChar->GetEntry(), pItemChar->GetCount());
pl->MoveItemFromInventory(PlayerBag, PlayerSlot, true);
pItemChar->DeleteFromInventoryDB();
StoreItem(BankTab, dest, pItemChar);
pl->SaveInventoryAndGoldToDB();
CharacterDatabase.CommitTransaction();
DisplayGuildBankContentUpdate(BankTab, dest);
}
else // Char <-> Bank swap items (posible NULL bank item)
{
ItemPosCountVec iDest;
if (pItemBank)
{
msg = pl->CanStoreItem(PlayerBag, PlayerSlot, iDest, pItemBank, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemBank, NULL);
return;
}
}
GuildItemPosCountVec gDest;
msg = CanStoreItem(BankTab, BankTabSlot, gDest, pItemChar->GetCount(), pItemChar, true);
if (msg != EQUIP_ERR_OK)
{
pl->SendEquipError(msg, pItemChar, NULL);
return;
}
if (pItemBank)
{
// check bank pos rights (item swapped with inventory)
uint32 remRight = GetMemberSlotWithdrawRem(pl->GetGUIDLow(), BankTab);
if (remRight <= 0)
return;
}
// logging item move to bank
if (pl->GetSession()->GetSecurity() > SEC_PLAYER && sWorld.getConfig(CONFIG_BOOL_GM_LOG_TRADE))
{
sLog.outCommand(pl->GetSession()->GetAccountId(), "GM %s (Account: %u) deposit item: %s (Entry: %d Count: %u) to guild bank (Guild ID: %u )",
pl->GetName(), pl->GetSession()->GetAccountId(),
pItemChar->GetProto()->Name1, pItemChar->GetEntry(), pItemChar->GetCount(),
m_Id);
}
CharacterDatabase.BeginTransaction();
if (pItemBank)
LogBankEvent(GUILD_BANK_LOG_WITHDRAW_ITEM, BankTab, pl->GetGUIDLow(), pItemBank->GetEntry(), pItemBank->GetCount());
LogBankEvent(GUILD_BANK_LOG_DEPOSIT_ITEM, BankTab, pl->GetGUIDLow(), pItemChar->GetEntry(), pItemChar->GetCount());
pl->MoveItemFromInventory(PlayerBag, PlayerSlot, true);
pItemChar->DeleteFromInventoryDB();
if (pItemBank)
RemoveItem(BankTab, BankTabSlot);
StoreItem(BankTab, gDest, pItemChar);
if (pItemBank)
pl->MoveItemToInventory(iDest, pItemBank, true);
pl->SaveInventoryAndGoldToDB();
if (pItemBank)
MemberItemWithdraw(BankTab, pl->GetGUIDLow());
CharacterDatabase.CommitTransaction();
DisplayGuildBankContentUpdate(BankTab, gDest);
}
}
}
void Guild::BroadcastEvent(GuildEvents event, ObjectGuid guid, char const* str1 /*=NULL*/, char const* str2 /*=NULL*/, char const* str3 /*=NULL*/)
{
uint8 strCount = !str1 ? 0 : (!str2 ? 1 : (!str3 ? 2 : 3));
WorldPacket data(SMSG_GUILD_EVENT, 1 + 1 + 1 * strCount + (!guid ? 0 : 8));
data << uint8(event);
data << uint8(strCount);
if (str3)
{
data << str1;
data << str2;
data << str3;
}
else if (str2)
{
data << str1;
data << str2;
}
else if (str1)
data << str1;
if (guid)
data << ObjectGuid(guid);
BroadcastPacket(&data);
DEBUG_LOG("WORLD: Sent SMSG_GUILD_EVENT");
}
void Guild::DeleteGuildBankItems(bool alsoInDB /*= false*/)
{
for (size_t i = 0; i < m_TabListMap.size(); ++i)
{
for (uint8 j = 0; j < GUILD_BANK_MAX_SLOTS; ++j)
{
if (Item* pItem = m_TabListMap[i]->Slots[j])
{
pItem->RemoveFromWorld();
if (alsoInDB)
pItem->DeleteFromDB();
delete pItem;
}
}
delete m_TabListMap[i];
}
m_TabListMap.clear();
}
bool GuildItemPosCount::isContainedIn(GuildItemPosCountVec const& vec) const
{
for (GuildItemPosCountVec::const_iterator itr = vec.begin(); itr != vec.end(); ++itr)
if (itr->Slot == this->Slot)
return true;
return false;
}
|
July1921/go-zero | core/iox/read.go | package iox
import (
"bufio"
"bytes"
"io"
"io/ioutil"
"os"
"strings"
)
type (
textReadOptions struct {
keepSpace bool
withoutBlanks bool
omitPrefix string
}
// TextReadOption defines the method to customize the text reading functions.
TextReadOption func(*textReadOptions)
)
// DupReadCloser returns two io.ReadCloser that read from the first will be written to the second.
// The first returned reader needs to be read first, because the content
// read from it will be written to the underlying buffer of the second reader.
func DupReadCloser(reader io.ReadCloser) (io.ReadCloser, io.ReadCloser) {
var buf bytes.Buffer
tee := io.TeeReader(reader, &buf)
return ioutil.NopCloser(tee), ioutil.NopCloser(&buf)
}
// KeepSpace customizes the reading functions to keep leading and tailing spaces.
func KeepSpace() TextReadOption {
return func(o *textReadOptions) {
o.keepSpace = true
}
}
// ReadBytes reads exactly the bytes with the length of len(buf)
func ReadBytes(reader io.Reader, buf []byte) error {
var got int
for got < len(buf) {
n, err := reader.Read(buf[got:])
if err != nil {
return err
}
got += n
}
return nil
}
// ReadText reads content from the given file with leading and tailing spaces trimmed.
func ReadText(filename string) (string, error) {
content, err := ioutil.ReadFile(filename)
if err != nil {
return "", err
}
return strings.TrimSpace(string(content)), nil
}
// ReadTextLines reads the text lines from given file.
func ReadTextLines(filename string, opts ...TextReadOption) ([]string, error) {
var readOpts textReadOptions
for _, opt := range opts {
opt(&readOpts)
}
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
if !readOpts.keepSpace {
line = strings.TrimSpace(line)
}
if readOpts.withoutBlanks && len(line) == 0 {
continue
}
if len(readOpts.omitPrefix) > 0 && strings.HasPrefix(line, readOpts.omitPrefix) {
continue
}
lines = append(lines, line)
}
return lines, scanner.Err()
}
// WithoutBlank customizes the reading functions to ignore blank lines.
func WithoutBlank() TextReadOption {
return func(o *textReadOptions) {
o.withoutBlanks = true
}
}
// OmitWithPrefix customizes the reading functions to ignore the lines with given leading prefix.
func OmitWithPrefix(prefix string) TextReadOption {
return func(o *textReadOptions) {
o.omitPrefix = prefix
}
}
|
vitaliyRusinov/Social | Source/CoreLayer/TransitionModerator/UIViewController+ViperTransitionModeratorProtocol.h | //
// UIViewController+ViperTransitionModeratorProtocol.h
// Social
//
// Created by <NAME> on 4/12/16.
// Copyright © 2016 OCP. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UIViewController (ViperTransitionModeratorProtocol)
@end
|
toeb/sine | src/userinterface.qt.controls/ValueWidget.cpp | #include "ValueWidget.h"
using namespace nspace;
void ValueWidget::propertyChanging(IModifiableValue * , ValueHolder){
if(oldvalue){
}
if(newvalue){
_ValueHolder=newvalue;
updateWidgetValue();
}
onValueHolderChanging(oldvalue,newvalue);
}
ValueWidget::ValueWidget(QWidget * parent):DynamicWidget(parent),_ValueHolder(0){}
void ValueWidget::onDataContextChanging(Object * oldvalue, Object * newvalue){
if(newvalue){
setValueHolder(dynamic_cast<IModifiableValue*>(newvalue));
}
} |
fakeNetflix/twitter-repo-whiskey | src/test/java/com/twitter/whiskey/nio/EchoServer.java | <reponame>fakeNetflix/twitter-repo-whiskey
/*
* Copyright (c) 2015 Twitter, Inc. All rights reserved.
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package com.twitter.whiskey.nio;
import com.twitter.whiskey.util.Platform;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
/**
* @author <NAME>
*/
class EchoServer {
private final static int NUM_THREADS = 5;
private final int port;
private final ExecutorService executor;
private ServerSocket serverSocket;
private List<Task> tasks = new ArrayList<>();
EchoServer(int port) throws IOException {
this.port = port;
this.executor = Executors.newFixedThreadPool(NUM_THREADS);
}
void addTask(Task task) {
synchronized (tasks) {
tasks.add(task);
}
}
ServerSocket createServerSocket(int port) throws Exception {
return new ServerSocket(port);
}
void start() throws Exception {
serverSocket = createServerSocket(port);
executor.execute(new AcceptTask(serverSocket, executor));
}
void stop() throws IOException {
executor.shutdownNow();
serverSocket.close();
}
private final class AcceptTask implements Runnable {
private final ServerSocket serverSocket;
private final Executor executor;
private AcceptTask(ServerSocket serverSocket, Executor executor) {
this.serverSocket = serverSocket;
this.executor = executor;
}
@Override
public void run() {
try {
final java.net.Socket socket = serverSocket.accept();
executor.execute(new Runnable() {
@Override
public void run() {
synchronized (tasks) {
try {
for (EchoServer.Task task : tasks) {
task.execute(serverSocket, socket);
}
} catch (IOException ioe) {
throw new AssertionError(ioe);
}
}
}
});
} catch (IOException | RejectedExecutionException ioe) {
Platform.LOGGER.debug("IOE: " + ioe);
}
}
}
interface Task {
public void execute(ServerSocket serverSocket, java.net.Socket socket) throws IOException;
}
final static class EchoTask implements Task {
@Override
public void execute(ServerSocket serverSocket, java.net.Socket socket) throws IOException {
InputStream in = socket.getInputStream();
OutputStream out = socket.getOutputStream();
byte[] buf = new byte[4096];
while (true) {
try {
int nread;
while ((nread = in.read(buf)) != -1) {
out.write(buf, 0, nread);
}
} catch (IOException ioe) {
throw new AssertionError(ioe);
}
}
}
}
}
|
mkinsner/llvm | libcxx/test/libcxx/depr/exception.unexpected/unexpected_disabled_cpp17.fail.cpp | //===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// UNSUPPORTED: c++03, c++11, c++14
// test unexpected
#include <exception>
void f() {}
int main(int, char**) {
using T = std::unexpected_handler; // expected-error {{no type named 'unexpected_handler' in namespace 'std'}}
std::unexpected(); // expected-error {{no member named 'unexpected' in namespace 'std'}}
std::get_unexpected(); // expected-error {{no member named 'get_unexpected' in namespace 'std'}}
std::set_unexpected(f); // expected-error {{no type named 'set_unexpected' in namespace 'std'}}
return 0;
}
|
IHTSDO/snow-owl | core/com.b2international.snowowl.retrofit/src/com/b2international/snowowl/retrofit/PromiseCallAdapterFactory.java | /*
* Copyright 2011-2017 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.retrofit;
import java.lang.annotation.Annotation;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.Optional;
import com.b2international.snowowl.core.events.util.Promise;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.ResponseBody;
import retrofit2.CallAdapter;
import retrofit2.Retrofit;
/**
* @since 5.10.13
*/
public final class PromiseCallAdapterFactory extends CallAdapter.Factory {
private final ObjectMapper mapper;
private final Class<? extends Error> errorType;
public PromiseCallAdapterFactory(final ObjectMapper mapper, final Class<? extends Error> errorType) {
this.mapper = mapper;
this.errorType = errorType;
}
@Override
public CallAdapter<?, ?> get(Type returnType, Annotation[] annotations, Retrofit retrofit) {
if (getRawType(returnType) != Promise.class) {
return null;
}
if (!(returnType instanceof ParameterizedType)) {
throw new IllegalStateException("Promise must have generic type (e.g., Promise<ResponseBody>)");
}
Optional<Annotation> headerAnnotation = Arrays.asList(annotations).stream().filter(annotation -> annotation instanceof ExtractHeaderProperty)
.findFirst();
if (headerAnnotation.isPresent()) {
String property = ((ExtractHeaderProperty) headerAnnotation.get()).value();
return new PromiseCallAdapter<ResponseBody, String>(ResponseBody.class, mapper, errorType, property);
}
Type responseType = getParameterUpperBound(0, (ParameterizedType) returnType);
return new PromiseCallAdapter<>(responseType, mapper, errorType);
}
} |
imavroukakis/opentelemetry-java-instrumentation | instrumentation/kubernetes-client-7.0/src/main/java/io/opentelemetry/javaagent/instrumentation/kubernetesclient/KubernetesVerb.java | /*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.kubernetesclient;
public enum KubernetesVerb {
GET("get"),
LIST("list"),
CREATE("create"),
UPDATE("update"),
DELETE("delete"),
PATCH("patch"),
WATCH("watch"),
DELETE_COLLECTION("deleteCollection");
private final String value;
KubernetesVerb(String value) {
this.value = value;
}
public static KubernetesVerb of(
String httpVerb, boolean hasNamePathParam, boolean hasWatchParam) {
if (hasWatchParam) {
return WATCH;
}
switch (httpVerb) {
case "GET":
if (!hasNamePathParam) {
return LIST;
}
return GET;
case "POST":
return CREATE;
case "PUT":
return UPDATE;
case "PATCH":
return PATCH;
case "DELETE":
if (!hasNamePathParam) {
return DELETE_COLLECTION;
}
return DELETE;
}
throw new IllegalArgumentException("invalid HTTP verb for kubernetes client");
}
public String value() {
return value;
}
}
|
wjrlabs/ServerSpringBootNetty | src/test/java/br/com/wjrlabs/test/business/echo/CommandEchoTest.java | <filename>src/test/java/br/com/wjrlabs/test/business/echo/CommandEchoTest.java<gh_stars>0
package br.com.wjrlabs.test.business.echo;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import br.com.wjrlabs.business.CommandFactory;
import br.com.wjrlabs.business.protocol.echo.CommandEcho;
import br.com.wjrlabs.codecs.MessageDecoder;
import br.com.wjrlabs.codecs.MessageEncoder;
import br.com.wjrlabs.messages.MessageFactory;
import br.com.wjrlabs.server.MessageHandler;
import br.com.wjrlabs.test.buffer.EchoMessageBuffer;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import io.netty.channel.embedded.EmbeddedChannel;
@ExtendWith(SpringExtension.class)
@TestPropertySource(locations = "classpath:application-dev.properties")
@ContextConfiguration(classes = {
CommandEcho.class
})
public class CommandEchoTest {
@Autowired
private AutowireCapableBeanFactory beanFactory;
@Autowired
private ApplicationContext context;
private MessageFactory messageFactory;
private CommandFactory commandFactory;
@Test
public void test() {
messageFactory=new MessageFactory(beanFactory);
commandFactory=new CommandFactory(context);
MessageDecoder encoder = new MessageDecoder(this.messageFactory);
MessageEncoder decoder=new MessageEncoder();
EmbeddedChannel channel = new EmbeddedChannel(encoder,decoder,new MessageHandler(commandFactory));
assertThat(channel.writeInbound(Unpooled.copiedBuffer(EchoMessageBuffer.getBuffer())), is(equalTo(Boolean.FALSE)));
ByteBuf buff = channel.flushOutbound().readOutbound();
assertThat(ByteBufUtil.getBytes(buff), is(equalTo(EchoMessageBuffer.getBuffer())));
}
}
|
Yannic/chromium | fuchsia/engine/browser/fuchsia_media_resource_provider_impl.cc | <reponame>Yannic/chromium
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "fuchsia/engine/browser/fuchsia_media_resource_provider_impl.h"
#include <lib/fidl/cpp/interface_handle.h>
#include <lib/sys/cpp/component_context.h>
#include "base/bind.h"
#include "base/command_line.h"
#include "base/fuchsia/process_context.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/document_service.h"
#include "content/public/browser/permission_controller.h"
#include "content/public/browser/render_frame_host.h"
#include "fuchsia/engine/browser/frame_impl.h"
#include "media/base/media_switches.h"
void FuchsiaMediaResourceProviderImpl::Bind(
content::RenderFrameHost* frame_host,
mojo::PendingReceiver<media::mojom::FuchsiaMediaResourceProvider>
receiver) {
// The object will delete itself when connection to the frame is broken.
new FuchsiaMediaResourceProviderImpl(frame_host, std::move(receiver));
}
FuchsiaMediaResourceProviderImpl::FuchsiaMediaResourceProviderImpl(
content::RenderFrameHost* render_frame_host,
mojo::PendingReceiver<media::mojom::FuchsiaMediaResourceProvider> receiver)
: DocumentService(render_frame_host, std::move(receiver)) {}
FuchsiaMediaResourceProviderImpl::~FuchsiaMediaResourceProviderImpl() = default;
void FuchsiaMediaResourceProviderImpl::CreateAudioConsumer(
fidl::InterfaceRequest<fuchsia::media::AudioConsumer> request) {
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kDisableAudioOutput)) {
LOG(WARNING)
<< "Could not create AudioConsumer because audio output feature flag "
"was not enabled.";
return;
}
auto factory = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::media::SessionAudioConsumerFactory>();
factory->CreateAudioConsumer(
FrameImpl::FromRenderFrameHost(render_frame_host())->media_session_id(),
std::move(request));
}
void FuchsiaMediaResourceProviderImpl::CreateAudioCapturer(
fidl::InterfaceRequest<fuchsia::media::AudioCapturer> request) {
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kDisableAudioInput)) {
LOG(WARNING)
<< "Could not create AudioCapturer because audio input feature flag "
"was not enabled.";
return;
}
if (render_frame_host()
->GetBrowserContext()
->GetPermissionController()
->GetPermissionStatusForFrame(
content::PermissionType::AUDIO_CAPTURE, render_frame_host(),
origin().GetURL()) != blink::mojom::PermissionStatus::GRANTED) {
DLOG(WARNING)
<< "Received CreateAudioCapturer request from an origin that doesn't "
"have AUDIO_CAPTURE permission.";
return;
}
auto factory = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::media::Audio>();
factory->CreateAudioCapturer(std::move(request), /*loopback=*/false);
}
|
mikiec84/winsparkle | 3rdparty/wxWidgets/src/msw/taskbar.cpp | /////////////////////////////////////////////////////////////////////////
// File: src/msw/taskbar.cpp
// Purpose: Implements wxTaskBarIcon class for manipulating icons on
// the Windows task bar.
// Author: <NAME>
// Modified by: <NAME>
// Created: 24/3/98
// RCS-ID: $Id: taskbar.cpp 61508 2009-07-23 20:30:22Z VZ $
// Copyright: (c)
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#if wxUSE_TASKBARICON
#ifndef WX_PRECOMP
#include "wx/window.h"
#include "wx/frame.h"
#include "wx/utils.h"
#include "wx/menu.h"
#endif
#include "wx/msw/wrapshl.h"
#include <string.h>
#include "wx/taskbar.h"
#include "wx/dynlib.h"
#ifndef NIN_BALLOONTIMEOUT
#define NIN_BALLOONTIMEOUT 0x0404
#define NIN_BALLOONUSERCLICK 0x0405
#endif
#ifndef NIM_SETVERSION
#define NIM_SETVERSION 0x00000004
#endif
#ifndef NIF_INFO
#define NIF_INFO 0x00000010
#endif
// initialized on demand
static UINT gs_msgTaskbar = 0;
static UINT gs_msgRestartTaskbar = 0;
IMPLEMENT_DYNAMIC_CLASS(wxTaskBarIcon, wxEvtHandler)
// ============================================================================
// implementation
// ============================================================================
// wrapper around Shell_NotifyIcon(): this function is not present in Win95
// shell32.dll so load it dynamically to allow programs using wxTaskBarIcon to
// start under this OS
static BOOL wxShellNotifyIcon(DWORD dwMessage, NOTIFYICONDATA *pData)
{
#if wxUSE_DYNLIB_CLASS
typedef BOOL (WINAPI *Shell_NotifyIcon_t)(DWORD, NOTIFYICONDATA *);
static Shell_NotifyIcon_t s_pfnShell_NotifyIcon = NULL;
static bool s_initialized = false;
if ( !s_initialized )
{
s_initialized = true;
wxLogNull noLog;
wxDynamicLibrary dllShell("shell32.dll");
if ( dllShell.IsLoaded() )
{
wxDL_INIT_FUNC_AW(s_pfn, Shell_NotifyIcon, dllShell);
}
// NB: it's ok to destroy dllShell here, we link to shell32.dll
// implicitly so it won't be unloaded
}
return s_pfnShell_NotifyIcon ? (*s_pfnShell_NotifyIcon)(dwMessage, pData)
: FALSE;
#else // !wxUSE_DYNLIB_CLASS
return Shell_NotifyIcon(dwMessage, pData);
#endif // wxUSE_DYNLIB_CLASS/!wxUSE_DYNLIB_CLASS
}
// ----------------------------------------------------------------------------
// wxTaskBarIconWindow: helper window
// ----------------------------------------------------------------------------
// NB: this class serves two purposes:
// 1. win32 needs a HWND associated with taskbar icon, this provides it
// 2. we need wxTopLevelWindow so that the app doesn't exit when
// last frame is closed but there still is a taskbar icon
class wxTaskBarIconWindow : public wxFrame
{
public:
wxTaskBarIconWindow(wxTaskBarIcon *icon)
: wxFrame(NULL, wxID_ANY, wxEmptyString, wxDefaultPosition, wxDefaultSize, 0),
m_icon(icon)
{
}
WXLRESULT MSWWindowProc(WXUINT msg,
WXWPARAM wParam, WXLPARAM lParam)
{
if (msg == gs_msgRestartTaskbar || msg == gs_msgTaskbar)
{
return m_icon->WindowProc(msg, wParam, lParam);
}
else
{
return wxFrame::MSWWindowProc(msg, wParam, lParam);
}
}
private:
wxTaskBarIcon *m_icon;
};
// ----------------------------------------------------------------------------
// NotifyIconData: wrapper around NOTIFYICONDATA
// ----------------------------------------------------------------------------
struct NotifyIconData : public NOTIFYICONDATA
{
NotifyIconData(WXHWND hwnd)
{
memset(this, 0, sizeof(NOTIFYICONDATA));
cbSize = sizeof(NOTIFYICONDATA);
hWnd = (HWND) hwnd;
uCallbackMessage = gs_msgTaskbar;
uFlags = NIF_MESSAGE;
// we use the same id for all taskbar icons as we don't need it to
// distinguish between them
uID = 99;
}
};
// ----------------------------------------------------------------------------
// wxTaskBarIcon
// ----------------------------------------------------------------------------
wxTaskBarIcon::wxTaskBarIcon()
{
m_win = NULL;
m_iconAdded = false;
RegisterWindowMessages();
}
wxTaskBarIcon::~wxTaskBarIcon()
{
if ( m_iconAdded )
RemoveIcon();
if ( m_win )
{
// we must use delete and not Destroy() here because the latter will
// only schedule the window to be deleted during the next idle event
// processing but we may not get any idle events if there are no other
// windows left in the program
delete m_win;
}
}
// Operations
bool wxTaskBarIcon::SetIcon(const wxIcon& icon, const wxString& tooltip)
{
// NB: we have to create the window lazily because of backward compatibility,
// old applications may create a wxTaskBarIcon instance before wxApp
// is initialized (as samples/taskbar used to do)
if (!m_win)
{
m_win = new wxTaskBarIconWindow(this);
}
m_icon = icon;
m_strTooltip = tooltip;
NotifyIconData notifyData(GetHwndOf(m_win));
if (icon.Ok())
{
notifyData.uFlags |= NIF_ICON;
notifyData.hIcon = GetHiconOf(icon);
}
// set NIF_TIP even for an empty tooltip: otherwise it would be impossible
// to remove an existing tooltip using this function
notifyData.uFlags |= NIF_TIP;
if ( !tooltip.empty() )
{
wxStrlcpy(notifyData.szTip, tooltip.wx_str(), WXSIZEOF(notifyData.szTip));
}
bool ok = wxShellNotifyIcon(m_iconAdded ? NIM_MODIFY
: NIM_ADD, ¬ifyData) != 0;
if ( !ok )
{
wxLogLastError(wxT("wxShellNotifyIcon(NIM_MODIFY/ADD)"));
}
if ( !m_iconAdded && ok )
m_iconAdded = true;
return ok;
}
#if wxUSE_TASKBARICON_BALLOONS
bool
wxTaskBarIcon::ShowBalloon(const wxString& title,
const wxString& text,
unsigned msec,
int flags)
{
wxCHECK_MSG( m_iconAdded, false,
wxT("can't be used before the icon is created") );
const HWND hwnd = GetHwndOf(m_win);
// we need to enable version 5.0 behaviour to receive notifications about
// the balloon disappearance
NotifyIconData notifyData(hwnd);
notifyData.uFlags = 0;
notifyData.uVersion = 3 /* NOTIFYICON_VERSION for Windows XP */;
if ( !wxShellNotifyIcon(NIM_SETVERSION, ¬ifyData) )
{
wxLogLastError(wxT("wxShellNotifyIcon(NIM_SETVERSION)"));
}
// do show the balloon now
notifyData = NotifyIconData(hwnd);
notifyData.uFlags |= NIF_INFO;
notifyData.uTimeout = msec;
wxStrlcpy(notifyData.szInfo, text.wx_str(), WXSIZEOF(notifyData.szInfo));
wxStrlcpy(notifyData.szInfoTitle, title.wx_str(),
WXSIZEOF(notifyData.szInfoTitle));
if ( flags & wxICON_INFORMATION )
notifyData.dwInfoFlags |= NIIF_INFO;
else if ( flags & wxICON_WARNING )
notifyData.dwInfoFlags |= NIIF_WARNING;
else if ( flags & wxICON_ERROR )
notifyData.dwInfoFlags |= NIIF_ERROR;
bool ok = wxShellNotifyIcon(NIM_MODIFY, ¬ifyData) != 0;
if ( !ok )
{
wxLogLastError(wxT("wxShellNotifyIcon(NIM_MODIFY)"));
}
return ok;
}
#endif // wxUSE_TASKBARICON_BALLOONS
bool wxTaskBarIcon::RemoveIcon()
{
if (!m_iconAdded)
return false;
m_iconAdded = false;
NotifyIconData notifyData(GetHwndOf(m_win));
bool ok = wxShellNotifyIcon(NIM_DELETE, ¬ifyData) != 0;
if ( !ok )
{
wxLogLastError(wxT("wxShellNotifyIcon(NIM_DELETE)"));
}
return ok;
}
#if wxUSE_MENUS
bool wxTaskBarIcon::PopupMenu(wxMenu *menu)
{
wxASSERT_MSG( m_win != NULL, wxT("taskbar icon not initialized") );
static bool s_inPopup = false;
if (s_inPopup)
return false;
s_inPopup = true;
int x, y;
wxGetMousePosition(&x, &y);
m_win->Move(x, y);
m_win->PushEventHandler(this);
menu->UpdateUI();
// the SetForegroundWindow() and PostMessage() calls are needed to work
// around Win32 bug with the popup menus shown for the notifications as
// documented at http://support.microsoft.com/kb/q135788/
::SetForegroundWindow(GetHwndOf(m_win));
bool rval = m_win->PopupMenu(menu, 0, 0);
::PostMessage(GetHwndOf(m_win), WM_NULL, 0, 0L);
m_win->PopEventHandler(false);
s_inPopup = false;
return rval;
}
#endif // wxUSE_MENUS
void wxTaskBarIcon::RegisterWindowMessages()
{
static bool s_registered = false;
if ( !s_registered )
{
// Taskbar restart msg will be sent to us if the icon needs to be redrawn
gs_msgRestartTaskbar = RegisterWindowMessage(wxT("TaskbarCreated"));
// Also register the taskbar message here
gs_msgTaskbar = ::RegisterWindowMessage(wxT("wxTaskBarIconMessage"));
s_registered = true;
}
}
// ----------------------------------------------------------------------------
// wxTaskBarIcon window proc
// ----------------------------------------------------------------------------
long wxTaskBarIcon::WindowProc(unsigned int msg,
unsigned int WXUNUSED(wParam),
long lParam)
{
if ( msg == gs_msgRestartTaskbar ) // does the icon need to be redrawn?
{
m_iconAdded = false;
SetIcon(m_icon, m_strTooltip);
return 0;
}
// this function should only be called for gs_msg(Restart)Taskbar messages
wxASSERT( msg == gs_msgTaskbar );
wxEventType eventType = 0;
switch ( lParam )
{
case WM_LBUTTONDOWN:
eventType = wxEVT_TASKBAR_LEFT_DOWN;
break;
case WM_LBUTTONUP:
eventType = wxEVT_TASKBAR_LEFT_UP;
break;
case WM_RBUTTONDOWN:
eventType = wxEVT_TASKBAR_RIGHT_DOWN;
break;
case WM_RBUTTONUP:
eventType = wxEVT_TASKBAR_RIGHT_UP;
break;
case WM_LBUTTONDBLCLK:
eventType = wxEVT_TASKBAR_LEFT_DCLICK;
break;
case WM_RBUTTONDBLCLK:
eventType = wxEVT_TASKBAR_RIGHT_DCLICK;
break;
case WM_MOUSEMOVE:
eventType = wxEVT_TASKBAR_MOVE;
break;
case NIN_BALLOONTIMEOUT:
eventType = wxEVT_TASKBAR_BALLOON_TIMEOUT;
break;
case NIN_BALLOONUSERCLICK:
eventType = wxEVT_TASKBAR_BALLOON_CLICK;
break;
}
if ( eventType )
{
wxTaskBarIconEvent event(eventType, this);
ProcessEvent(event);
}
return 0;
}
#endif // wxUSE_TASKBARICON
|
webfolderio/doma | doma-core/src/main/java/org/seasar/doma/internal/jdbc/sql/OptionalDomainResultListParameter.java | package org.seasar.doma.internal.jdbc.sql;
import java.util.Optional;
import org.seasar.doma.jdbc.domain.DomainType;
public class OptionalDomainResultListParameter<BASIC, DOMAIN>
extends ScalarResultListParameter<BASIC, Optional<DOMAIN>> {
public OptionalDomainResultListParameter(DomainType<BASIC, DOMAIN> domainType) {
super(() -> domainType.createOptionalScalar());
}
}
|
dailydrip/exchequer-server | app/admin/coupon.rb | <filename>app/admin/coupon.rb
ActiveAdmin.register Coupon do
index do
column :offer
column :name
column :code
column :percent_off do |p|
"#{p.percent_off * 100} %" if p.percent_off
end
column :amount_off
actions
end
form do |f|
f.semantic_errors
f.inputs do
f.input :offer,
as: :searchable_select,
collection: Offer.all,
member_label: proc { |offer| offer.name.to_s }
f.input :name
f.input :code
f.inputs 'Discount amount - pick only one' do
f.input :percent_off, label: 'Percent Off (in %)', input_html: { value: (f.object.percent_off.present? ? f.object.percent_off : 0) * 100 }
end
f.input :amount_off
end
f.actions
end
controller do
def create
percent = percent_in_decimal
coupon = Coupon.create(permitted_params[:coupon].merge!(percent_off: percent))
if coupon.errors.present?
redirect_to collection_path, notice: coupon.errors.full_messages
else
redirect_to collection_path
end
end
def update
percent = percent_in_decimal
if resource.update(permitted_params[:coupon].merge!(percent_off: percent))
redirect_to collection_path
else
redirect_to collection_path, notice: resource.errors.full_messages
end
end
def percent_in_decimal
return nil if permitted_params[:coupon][:percent_off].to_f.zero?
percent = permitted_params[:coupon][:percent_off].to_f
percent /= 100 if percent
percent
end
end
permit_params do
%i[offer_id name percent_off amount_off code]
end
end
|
zealoussnow/chromium | tools/perf/cli_tools/soundwave/studies/health_study.py | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core.services import dashboard_service
from cli_tools.soundwave.tables import timeseries
CLOUD_PATH = 'gs://chrome-health-tvdata/datasets/health_study.csv'
SYSTEM_HEALTH = [
{
'test_suite': 'system_health.memory_mobile',
'measurement': ('memory:{browser}:all_processes:reported_by_os:'
'private_footprint_size'),
},
{
'test_suite': 'system_health.common_mobile',
'measurement': 'cpu_time_percentage'
}
]
STARTUP_BY_BROWSER = {
'chrome': {
'test_suite': 'startup.mobile',
'measurement': 'first_contentful_paint_time',
'test_case': 'intent_coldish_bbc'
},
'webview': {
'test_suite': 'system_health.webview_startup',
'measurement': 'webview_startup_wall_time_avg',
'test_case': 'load:chrome:blank'
}
}
APK_SIZE = {
'test_suite': 'resource_sizes:Monochrome.minimal.apks',
'measurement': 'Specifics:normalized apk size',
'bot': 'ChromiumPerf:android-builder-perf',
}
def IterSystemHealthBots():
yield 'ChromiumPerf:android-go-perf'
yield 'ChromiumPerf:android-go_webview-perf'
yield 'ChromiumPerf:android-pixel2-perf'
yield 'ChromiumPerf:android-pixel2_webview-perf'
def GetBrowserFromBot(bot):
return 'webview' if 'webview' in bot else 'chrome'
def GetHealthCheckStories():
description = dashboard_service.Describe('system_health.common_mobile')
return description['caseTags']['health_check']
def IterTestPaths():
test_cases = GetHealthCheckStories()
for bot in IterSystemHealthBots():
browser = GetBrowserFromBot(bot)
# Startup.
yield timeseries.Key.FromDict(STARTUP_BY_BROWSER[browser], bot=bot)
# Memory.
if bot == 'ChromiumPerf:android-pixel2_webview-perf':
# The pixel2 webview bot incorrectly reports memory as if coming from
# chrome. TODO(crbug.com/972620): Remove this when bug is fixed.
browser = 'chrome'
for series in SYSTEM_HEALTH:
measurement = series['measurement'].format(browser=browser)
for test_case in test_cases:
yield timeseries.Key.FromDict(
series, bot=bot, measurement=measurement, test_case=test_case)
# APK size.
yield timeseries.Key.FromDict(APK_SIZE)
|
CoderSong2015/Apache-Trafodion | dcs/src/main/java/org/trafodion/dcs/servermt/serverSql/TrafStatement.java | <reponame>CoderSong2015/Apache-Trafodion<filename>dcs/src/main/java/org/trafodion/dcs/servermt/serverSql/TrafStatement.java<gh_stars>100-1000
/**
* @@@ START COPYRIGHT @@@
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* @@@ END COPYRIGHT @@@
*/
package org.trafodion.dcs.servermt.serverSql;
import java.sql.*;
import java.net.*;
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
import java.nio.channels.spi.*;
import java.util.*;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.concurrent.*;
//import java.util.HashMap;
//import java.util.Map;
import org.trafodion.dcs.Constants;
import org.trafodion.dcs.servermt.ServerConstants;
import org.trafodion.dcs.util.*;
import org.trafodion.dcs.servermt.serverDriverInputOutput.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class TrafStatement {
private static final Log LOG = LogFactory.getLog(TrafStatement.class);
private String serverWorkerName = "";
private String stmtLabel = "";
private int stmtHandle = 0;
private Object stmt = null;
// private Statement stmt = null;
// private PreparedStatement pstmt = null;
private int paramCount = 0;
private long paramLength = 0;
private Descriptor2List paramDescList = null;
private boolean isResultSet = false;
private boolean isSpj = false;
private int sqlStmtType = ServerConstants.TYPE_UNKNOWN;
// result sets
private int resultSetCount;
private Integer curKey;
private Descriptor2List outDescList;
private ConcurrentHashMap<Integer, TrafResultSet> resultSetList = new ConcurrentHashMap<Integer, TrafResultSet>();
private Random random = new Random();
public TrafStatement(String serverWorkerName, String stmtLabel, Connection conn, String sqlString, int sqlStmtType) throws SQLException {
init();
this.stmtLabel = stmtLabel;
stmtHandle = this.hashCode();
this.serverWorkerName = serverWorkerName;
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". constructor TrafStatement[" + stmtLabel + "/" + stmtHandle + "]");
setStatement(conn, sqlString, sqlStmtType);
}
void init(){
reset();
}
void reset(){
stmt = null;
paramDescList = null;
paramCount = 0;
paramLength = 0;
resultSetCount = 0;
curKey = 0;
outDescList = null;
isResultSet = false;
isSpj = false;
sqlStmtType = ServerConstants.TYPE_UNKNOWN;
}
public void closeTStatement() {
try {
if (stmt != null){
if (stmt instanceof Statement){
Statement st = (Statement)stmt;
if (st.isClosed() == false){
st.close();
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". T2 st.close(" + stmtLabel + ")");
}
}
else if (stmt instanceof PreparedStatement){
PreparedStatement pst = (PreparedStatement)stmt;
if (pst.isClosed() == false){
pst.close();
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". T2 pst.close(" + stmtLabel + ")");
}
}
}
} catch (SQLException sql){}
closeAllTResultSets();
reset();
}
public void closeTResultSet(){
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". TrafStatement closeTResultSet (" + stmtLabel + ")");
try {
if (curKey != 0){
resultSetList.get(curKey).closeTResultSet();
resultSetList.remove(curKey);
}
} catch (Exception e){}
}
public void closeAllTResultSets() {
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". closeAllTResultSets resultSetCount : " + resultSetCount);
if (resultSetCount != 0){
Integer key;
Iterator<Integer> keySetIterator = resultSetList.keySet().iterator();
while (keySetIterator.hasNext()) {
key = keySetIterator.next();
resultSetList.get(key).closeTResultSet();
}
resultSetList.clear();
}
resultSetCount = 0;
curKey = 0;
}
public boolean getNextTResultSet(){
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". getNextTResultSet key :" + (curKey + 1) );
Integer key = curKey + 1;
if (key <= resultSetCount){
if (resultSetList.containsKey(key)){
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". getNextTResultSet returns true ");
++curKey;
return true;
}
}
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". getNextTResultSet returns false ");
return false;
}
public void setFirstTResultSet(){
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". setFirstTResultSet");
curKey = 1;
}
public void addTResultSet(TrafResultSet trs){
Integer maxKey = 0;
Integer key = 0;
Iterator<Integer> keySetIterator = resultSetList.keySet().iterator();
while (keySetIterator.hasNext()) {
key = keySetIterator.next();
if (key > maxKey) maxKey = key;
}
key = maxKey + 1;
resultSetList.put(key, trs);
resultSetCount++;
curKey = 1;
if (LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". addTResultSet key :" + key);
}
//=====================================================
public void setOutDescList(Descriptor2List outDescList){
this.outDescList = outDescList;
}
public void setParamCount(int paramCount){
this.paramCount = paramCount;
}
public void setParamDescList(Descriptor2List paramDescList){
this.paramDescList = paramDescList;
}
public void setParamLength(long paramLength){
this.paramLength = paramLength;
}
public void setIsResultSet(boolean isResultSet){
this.isResultSet = isResultSet;
}
public void setIsSpj(boolean isSpj){
this.isSpj = isSpj;
}
public void setStatement(Connection conn, String sqlString, int sqlStmtType) throws SQLException{
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". TrafStatement.setStatement [" + stmtLabel + "]");
closeTStatement();
this.sqlStmtType = sqlStmtType;
switch (sqlStmtType){
case ServerConstants.TYPE_SELECT:
case ServerConstants.TYPE_EXPLAIN:
case ServerConstants.TYPE_CATOLOG:
isResultSet = true;
break;
case ServerConstants.TYPE_CALL:
isSpj = true;
case ServerConstants.TYPE_UPDATE:
case ServerConstants.TYPE_DELETE:
case ServerConstants.TYPE_INSERT:
case ServerConstants.TYPE_INSERT_PARAM:
case ServerConstants.TYPE_CREATE:
case ServerConstants.TYPE_GRANT:
case ServerConstants.TYPE_DROP:
case ServerConstants.TYPE_CONTROL:
isResultSet = false;
default:
}
if (sqlString != null){
if (isSpj == true){
stmt = conn.prepareCall(sqlString);
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". T2 conn.prepareCall [" + stmtLabel + "] sqlString :" + sqlString);
}
else {
stmt = conn.prepareStatement(sqlString);
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". T2 conn.prepareStatement [" + stmtLabel + "] sqlString :" + sqlString);
}
}
else {
this.stmt = conn.createStatement();
if(LOG.isDebugEnabled())
LOG.debug(serverWorkerName + ". T2 conn.createStatement [" + stmtLabel + "]");
}
}
public void setSqlStmtType(int sqlStmtType){
this.sqlStmtType = sqlStmtType;
}
//================================================
public Object getStatement(){
return stmt;
}
public Descriptor2List getOutDescList(){
return outDescList;
}
public Descriptor2List getParamDescList(){
return paramDescList;
}
public int getParamCount(){
return paramCount;
}
public long getParamLength(){
return paramLength;
}
public boolean getIsResultSet(){
return isResultSet;
}
public boolean getIsSpj(){
return isSpj;
}
public TrafResultSet getTrafResultSet(){
return resultSetList.get(curKey);
}
public int getSqlStmtType(){
return sqlStmtType;
}
public Integer getStmtHandle(){
return stmtHandle;
}
}
|
xloem/DIY-LAPTOP | SOFTWARE/A64-TERES/linux-a64/arch/metag/kernel/kick.c | <reponame>xloem/DIY-LAPTOP
/*
* Copyright (C) 2009 Imagination Technologies
*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file COPYING in the main directory of this archive
* for more details.
*
* The Meta KICK interrupt mechanism is generally a useful feature, so
* we provide an interface for registering multiple interrupt
* handlers. All the registered interrupt handlers are "chained". When
* a KICK interrupt is received the first function in the list is
* called. If that interrupt handler cannot handle the KICK the next
* one is called, then the next until someone handles it (or we run
* out of functions). As soon as one function handles the interrupt no
* other handlers are called.
*
* The only downside of chaining interrupt handlers is that each
* handler must be able to detect whether the KICK was intended for it
* or not. For example, when the IPI handler runs and it sees that
* there are no IPI messages it must not signal that the KICK was
* handled, thereby giving the other handlers a chance to run.
*
* The reason that we provide our own interface for calling KICK
* handlers instead of using the generic kernel infrastructure is that
* the KICK handlers require access to a CPU's pTBI structure. So we
* pass it as an argument.
*/
#include <linux/export.h>
#include <linux/kernel.h>
#include <linux/mm.h>
#include <linux/types.h>
#include <asm/traps.h>
/*
* All accesses/manipulations of kick_handlers_list should be
* performed while holding kick_handlers_lock.
*/
static DEFINE_SPINLOCK(kick_handlers_lock);
static LIST_HEAD(kick_handlers_list);
void kick_register_func(struct kick_irq_handler *kh)
{
unsigned long flags;
spin_lock_irqsave(&kick_handlers_lock, flags);
list_add_tail(&kh->list, &kick_handlers_list);
spin_unlock_irqrestore(&kick_handlers_lock, flags);
}
EXPORT_SYMBOL(kick_register_func);
void kick_unregister_func(struct kick_irq_handler *kh)
{
unsigned long flags;
spin_lock_irqsave(&kick_handlers_lock, flags);
list_del(&kh->list);
spin_unlock_irqrestore(&kick_handlers_lock, flags);
}
EXPORT_SYMBOL(kick_unregister_func);
TBIRES
kick_handler(TBIRES State, int SigNum, int Triggers, int Inst, PTBI pTBI)
{
struct kick_irq_handler *kh;
struct list_head *lh;
int handled = 0;
TBIRES ret;
head_end(State, ~INTS_OFF_MASK);
/* If we interrupted user code handle any critical sections. */
if (State.Sig.SaveMask & TBICTX_PRIV_BIT)
restart_critical_section(State);
trace_hardirqs_off();
/*
* There is no need to disable interrupts here because we
* can't nest KICK interrupts in a KICK interrupt handler.
*/
spin_lock(&kick_handlers_lock);
list_for_each(lh, &kick_handlers_list) {
kh = list_entry(lh, struct kick_irq_handler, list);
ret = kh->func(State, SigNum, Triggers, Inst, pTBI, &handled);
if (handled)
break;
}
spin_unlock(&kick_handlers_lock);
WARN_ON(!handled);
return tail_end(ret);
}
|
Over-Run/SquidCraft | src/main/java/io/github/overrun/squidcraft/config/Configs.java | package io.github.overrun.squidcraft.config;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.github.overrun.squidcraft.config.CompressorRecipe.Entry;
import java.io.*;
import static io.github.overrun.squidcraft.SquidCraft.logger;
import static io.github.overrun.squidcraft.item.Items.COMPRESSED_SQUID_BLOCK;
import static io.github.overrun.squidcraft.item.Items.SQUID_BLOCK;
import static net.minecraft.block.Blocks.*;
/**
* @author squid233
* @since 2020/12/27
*/
public final class Configs {
public static final File CFG_FILE = new File("config/squidcraft/config.json");
private static final Gson GSON = new GsonBuilder()
.setPrettyPrinting()
.disableHtmlEscaping()
.registerTypeAdapter(Configurator.class, new Configurator.Serializer())
.create();
private static Configurator configurator =
new Configurator(new CobblestoneFarmRoll[0], new CompressorRecipe[0]);
public static final boolean FAILED = false;
public static final boolean SUCCESS = true;
public static boolean init() {
logger.info("Loading configs!");
File parent = CFG_FILE.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
configurator = new Configurator(
new CobblestoneFarmRoll[]{
new CobblestoneFarmRoll(OBSIDIAN, 1),
new CobblestoneFarmRoll(ANCIENT_DEBRIS, 2),
new CobblestoneFarmRoll(DIAMOND_ORE, 3),
new CobblestoneFarmRoll(EMERALD_ORE, 4),
new CobblestoneFarmRoll(NETHER_QUARTZ_ORE, 5),
new CobblestoneFarmRoll(NETHER_GOLD_ORE, 6),
new CobblestoneFarmRoll(GOLD_ORE, 7),
new CobblestoneFarmRoll(REDSTONE_ORE, 8),
new CobblestoneFarmRoll(LAPIS_ORE, 9),
new CobblestoneFarmRoll(IRON_ORE, 10),
new CobblestoneFarmRoll(COAL_ORE, 11),
new CobblestoneFarmRoll(STONE, 50),
new CobblestoneFarmRoll(COBBLESTONE, 100)
},
new CompressorRecipe[]{
new CompressorRecipe(
new Entry(SQUID_BLOCK, 9),
new Entry(COMPRESSED_SQUID_BLOCK)
)
}
);
store();
} else {
try (Reader r = new FileReader(CFG_FILE)) {
configurator = GSON.fromJson(r, Configurator.class);
} catch (IOException e) {
logger.error("Can't read configs!");
logger.catching(e);
return FAILED;
}
}
logger.info("Loaded all configs");
return SUCCESS;
}
public static void store() {
try (Writer w = new FileWriter(CFG_FILE)) {
w.write(GSON.toJson(configurator));
} catch (IOException e) {
logger.error("Can't write configs to local!");
logger.catching(e);
}
}
public static Configurator getConfigurator() {
return configurator;
}
}
|
SubscribeIT/ngDesk | ngDesk-Module-Service/src/main/java/com/ngdesk/module/task/dao/TaskAPI.java | <gh_stars>1-10
package com.ngdesk.module.task.dao;
import java.util.Date;
import java.util.Optional;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import com.ngdesk.commons.exceptions.ForbiddenException;
import com.ngdesk.commons.exceptions.NotFoundException;
import com.ngdesk.commons.managers.AuthManager;
import com.ngdesk.module.role.dao.RoleService;
import com.ngdesk.repositories.task.TaskRepository;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
@RestController
public class TaskAPI {
@Autowired
private TaskRepository taskRepository;
@Autowired
private AuthManager auth;
@Autowired
private RoleService roleService;
@PostMapping("/module/{moduleId}/task")
@Operation(summary = "Post Task", description = "Post a single Task")
public Task addTask(@Valid @RequestBody Task task, @PathVariable String moduleId) {
if (!roleService.isSystemAdmin()) {
throw new ForbiddenException("FORBIDDEN");
}
task.setDateCreated(new Date());
task.setCompanyId(auth.getUserDetails().getCompanyId());
task.setCreatedBy(auth.getUserDetails().getUserId());
task.setDateUpdated(new Date());
task.setLastUpdatedBy(auth.getUserDetails().getUserId());
task.setStopDate(task.getStopDate());
return taskRepository.save(task, "tasks");
}
@PutMapping("/module/{moduleId}/task")
@Operation(summary = "Put Task", description = "Update a Task")
public Task updateTask(@Valid @RequestBody Task task, @PathVariable String moduleId) {
if (!roleService.isSystemAdmin()) {
throw new ForbiddenException("FORBIDDEN");
}
Optional<Task> optionalExistingTask = taskRepository.findById(task.getTaskId(), "tasks");
if (optionalExistingTask.isEmpty()) {
String vars[] = { "TASK" };
throw new NotFoundException("DAO_NOT_FOUND", vars);
}
Task existingTask = optionalExistingTask.get();
task.setDateCreated(existingTask.getDateCreated());
task.setCreatedBy(existingTask.getCreatedBy());
task.setLastExecuted(existingTask.getLastExecuted());
task.setDateUpdated(new Date());
task.setLastUpdatedBy(auth.getUserDetails().getUserId());
task.setCompanyId(auth.getUserDetails().getCompanyId());
task.setStopDate(task.getStopDate());
return taskRepository.save(task, "tasks");
}
@DeleteMapping("/module/{moduleId}/task/{taskId}")
@Operation(summary = "Deletes a Task", description = "Deletes a Task")
public void deleteTask(@Parameter(description = "Task ID", required = true) @PathVariable("taskId") String taskId,
@Parameter(description = "Module ID", required = true) @PathVariable("moduleId") String moduleId) {
if (!roleService.isSystemAdmin()) {
throw new ForbiddenException("FORBIDDEN");
}
String companyId = auth.getUserDetails().getCompanyId();
if (taskRepository.findById(moduleId, "modules_" + companyId).isEmpty()) {
String vars[] = { "MODULE" };
throw new NotFoundException("DAO_NOT_FOUND", vars);
}
Optional<Task> existingTask = taskRepository.findById(taskId, "tasks");
if (existingTask.isEmpty()) {
String vars[] = { "TASK" };
throw new NotFoundException("DAO_NOT_FOUND", vars);
}
taskRepository.deleteById(taskId, "tasks");
}
}
|
adamnok/linden | componens/linden-core/src/main/scala/linden/store/optimizing/AppendableOptimizing.scala | /*
* MIT LICENCE
*
* Copyright (c) 2021 <NAME> [<EMAIL>]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package linden.store.optimizing
import linden.store.ReadStore
import linden.store.flow.OptimizingStore
import linden.store.optimizing.AppendableOptimizing.Action.*
import linden.store.optimizing.AppendableOptimizing.{Action, AppendableBy, AppendableStore}
import linden.util.dsl.*
object AppendableOptimizing {
type AppendableValue[V] = (Seq[V], Seq[Action[V]])
type AppendableStore[V] = ReadStore[AppendableValue[V]]
trait AppendableBy[T] {
type SequalizeItem
type Key
def sequalize(it: T): Seq[SequalizeItem]
def createKey(it: SequalizeItem): Key
}
abstract class SeqAppendableBy[V, K](by: V => K) extends AppendableBy[Seq[V]] {
override type SequalizeItem = V
override type Key = K
override def sequalize(it: Seq[V]): Seq[SequalizeItem] = it
override def createKey(it: SequalizeItem): K = by(it)
}
trait Action[+T]
object Action {
case object Clear extends Action[Nothing]
case class ToTop[+T](values: Seq[T]) extends Action[T]
case class ToBottom[+T](values: Seq[T]) extends Action[T]
}
}
trait AppendableOptimizing[T] {
abstractOptimizing: AbstractOptimizing[T] =>
def appendable()(using by: AppendableBy[T]): OptimizingStore[Seq[by.SequalizeItem], Seq[Action[by.SequalizeItem]]] = {
val optimizing = appendable[by.SequalizeItem, by.Key](
sequalize = { it => by.sequalize(it) },
by = { it => by.createKey(it) }
)
new OptimizingStore(optimizing)
}
private def appendable[G, H](sequalize: T => Seq[G], by: G => H): AppendableStore[G] = {
case class Item(key: H, value: G):
override def equals(obj: Any): Boolean =
obj.asInstanceOf[Matchable] match
case Item(o_key, _) => o_key == key
case _ => false
source
.map(it => sequalize(it).map(it => Item(by(it), it)))
.zipWithLastValue
.map { (previousSeq, newSeq) =>
val actions: Seq[Action[G]] = (previousSeq, newSeq) match {
case (None, _) => Seq()
case (Some(Seq()), seq) => Seq(ToBottom(seq.map(_.value)))
case (Some(_), Seq()) => Seq(Clear)
case (Some(last), seq) =>
val lastHead = last.head
val lastLast = last.last
val tops = seq.takeWhile(_ != lastHead)
.map(_.value)
.let(it => Option.when(it.nonEmpty)(it))
.map(ToTop(_))
.toSeq
val bottoms = seq.takeLastWhile(_ != lastLast)
.map(_.value)
.let(it => Option.when(it.nonEmpty)(it))
.map(ToBottom(_))
.toSeq
tops ++ bottoms
}
newSeq.map(_.value) -> actions
}
}
}
|
navikt/skjemabygging-formio | packages/fyllut/server/commit_version.test.js | <filename>packages/fyllut/server/commit_version.test.js
import { gitVersionFromIndexHtml } from "./commit_version";
import fs from "fs";
import child_process from "child_process";
import { buildDirectoryIndexHtml } from "./context";
const htmlDocWithVersion = (version) => `
<!doctype html>
<html lang="nb-NO">
<head>
<meta charset="utf-8"/>
<meta name="git-version" content="${version}"/>
</head>
<body>
Brødteksten kom hit gitt
</body>
</html>
`;
it("parses built index.html and extracts the git commit", () => {
const htmlString = htmlDocWithVersion("flesk");
const version = gitVersionFromIndexHtml(htmlString);
expect(version).toEqual("flesk");
});
async function getGitVersion() {
return new Promise((resolve, reject) => {
child_process.exec("git describe --always --match 'NOT A TAG' --abbrev=0 --dirty", (error, stdout, stderr) =>
resolve(stdout.trim())
);
});
}
it.skip("finds index.html from the build folder and uses that", async () => {
if (!process.env.CI) {
// this test will very often fail when developing as it depends on the run build
// being up to date
return;
}
const currentGitVersion = await getGitVersion();
const html = fs.readFileSync(buildDirectoryIndexHtml);
const version = gitVersionFromIndexHtml(html);
expect(currentGitVersion).toEqual(version);
});
|
mahaplatform/mahaplatform.com | src/apps/finance/admin/views/invoices/show.js | import { Page } from '@admin'
import Payments from './payments'
import Payment from './payment'
import Details from './details'
import Clone from './clone'
import Send from './send'
import Edit from './edit'
import Void from './void'
import React from 'react'
const getTabs = ({ audits, invoice, payments }) => {
const items = [
{ label: 'Details', component: <Details audits={ audits } invoice={ invoice } payments={ payments } /> }
]
if(payments.length > 0) {
items.push({ label: 'Payments', component: <Payments invoice={ invoice } payments={ payments } /> })
}
return { items }
}
const getTasks = ({ invoice }, { admin }) => {
if(invoice.status === 'voided') return null
const entity = invoice.status === 'paid' ? 'Receipt' : 'Invoice'
const items = []
if(invoice.status !== 'paid' && invoice.payments.length === 0) {
items.push({ label: 'Edit Invoice', modal: <Edit invoice={ invoice } /> })
}
items.push({ label: 'Clone Invoice', modal: <Clone invoice={ invoice } /> })
if(invoice.status !== 'paid' && invoice.payments.length === 0) {
items.push({ label: 'Void Invoice', modal: <Void invoice={ invoice } /> })
}
if(invoice.status !== 'paid') {
items.push({ label: 'Receive Payment', modal: <Payment invoice={ invoice } /> })
}
items.push({ label: `Send ${entity}`, modal: <Send invoice={ invoice } /> })
items.push({ label: `View Public ${entity}`, link: `${process.env.ADMIN_HOST}/finance/invoices/${invoice.code}` })
items.push({ label: 'Download Invoice', url: `${process.env.ADMIN_HOST}/finance/invoices/${invoice.code}/download` })
return { items }
}
const mapResourcesToPage = (props, context) => ({
audits: `/api/admin/finance_invoices/${props.params.id}/audits`,
invoice: `/api/admin/finance/invoices/${props.params.id}`,
payments: `/api/admin/finance/invoices/${props.params.id}/payments`
})
const mapPropsToPage = (props, context, resources, page) => ({
title: 'Invoice',
tabs: getTabs(resources, context),
tasks: getTasks(resources, context)
})
export default Page(mapResourcesToPage, mapPropsToPage)
|
fengjixuchui/Push | source/push[exe]/sl/file.c | <filename>source/push[exe]/sl/file.c
#include <sl.h>
#include "push.h"
#include "file.h"
#define FIELD_OFFSET(type, field) ((LONG)(UINT_B)&(((type *)0)->field))
#define PTR_ADD_OFFSET(Pointer, Offset) ((VOID*)((UINT_B)(Pointer) + (UINT_B)(Offset)))
BOOLEAN SymLinkTargetCmp( WCHAR *Name, WCHAR *dest )
{
VOID *fileHandle;
WCHAR szName[1024];
BYTE *reparseData;
BOOLEAN bDirectory = FALSE;
DWORD dwFlagsAndAttributes;
IO_STATUS_BLOCK isb;
NTSTATUS status;
UINT32 reparseBufferSize = 17000;
REPARSE_DATA_BUFFER *reparseInfo = Memory_Allocate(reparseBufferSize);
if (File_GetAttributes(dest) & FILE_ATTRIBUTE_DIRECTORY)
{
bDirectory = TRUE;
}
if (bDirectory)
{
dwFlagsAndAttributes = FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT;
}
else
{
dwFlagsAndAttributes = FILE_FLAG_OPEN_REPARSE_POINT;
}
status = File_Create(
&fileHandle,
Name,
FILE_READ_ATTRIBUTES | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
{
return FALSE;
}
status = NtDeviceIoControlFile(
fileHandle,
NULL,
NULL,
NULL,
&isb,
FSCTL_GET_REPARSE_POINT,
reparseInfo,
sizeof(reparseBufferSize),
NULL,
0
);
reparseData = (BYTE *) &reparseInfo->SymbolicLinkReparseBuffer.PathBuffer;
String_CopyN(
szName,
(WCHAR *) (reparseData + reparseInfo->SymbolicLinkReparseBuffer.SubstituteNameOffset),
reparseInfo->SymbolicLinkReparseBuffer.SubstituteNameLength );
szName[reparseInfo->SymbolicLinkReparseBuffer.SubstituteNameLength] = 0;
NtClose(fileHandle);
if (String_CompareN(dest, szName + 6, String_GetLength(dest)) != 0)
return FALSE;
else
return TRUE;
}
WCHAR* GetPointerToFilePath( WCHAR *Path, WCHAR *File )
{
WCHAR *filePath;
filePath = (WCHAR *) Memory_Allocate((String_GetLength(Path) + String_GetLength(File) + 2) * 2);
String_Copy(filePath, Path);
String_Concatenate(filePath, L"\\");
String_Concatenate(filePath, File);
return filePath;
}
#define PROGRESS_CONTINUE 0
VOID MarkForCache( WCHAR *FilePath )
{
WCHAR *newPath, *pszFileName;
pszFileName = String_FindLastChar(FilePath, '\\') + 1;
newPath = (WCHAR*) Memory_Allocate( (String_GetLength(FilePath) + 7) * 2 );
GetPathOnly(FilePath, newPath);
String_Concatenate(newPath, L"cache_");
String_Concatenate(newPath, pszFileName);
File_Rename(FilePath, newPath);
}
BOOLEAN FolderExists( WCHAR* Folder )
{
NTSTATUS status;
VOID *directoryHandle;
status = File_Create(
&directoryHandle,
Folder,
FILE_LIST_DIRECTORY | SYNCHRONIZE,
FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OPEN,
FILE_DIRECTORY_FILE,
NULL
);
if (NT_SUCCESS(status))
{
NtClose(directoryHandle);
return TRUE;
}
else
{
return FALSE;
}
}
#define MEM_RESERVE 0x2000
#define STATUS_END_OF_FILE ((NTSTATUS)0xC0000011L)
#define FILE_DELETE_ON_CLOSE 0x00001000
#define FILE_WRITE_ATTRIBUTES 0x00000100
#define STATUS_NO_MEMORY ((DWORD)0xC0000017)
#define STATUS_COMMITMENT_LIMIT ((DWORD)0xC000012D)
typedef struct _FILE_NETWORK_OPEN_INFORMATION {
LARGE_INTEGER CreationTime;
LARGE_INTEGER LastAccessTime;
LARGE_INTEGER LastWriteTime;
LARGE_INTEGER ChangeTime;
LARGE_INTEGER AllocationSize;
LARGE_INTEGER EndOfFile;
ULONG FileAttributes;
} FILE_NETWORK_OPEN_INFORMATION, *PFILE_NETWORK_OPEN_INFORMATION;
typedef struct _FILE_BASIC_INFORMATION
{
LARGE_INTEGER CreationTime;
LARGE_INTEGER LastAccessTime;
LARGE_INTEGER LastWriteTime;
LARGE_INTEGER ChangeTime;
ULONG FileAttributes;
} FILE_BASIC_INFORMATION, *PFILE_BASIC_INFORMATION;
NTSTATUS __stdcall NtQueryFullAttributesFile(
OBJECT_ATTRIBUTES* ObjectAttributes,
FILE_NETWORK_OPEN_INFORMATION* FileInformation
);
/**
* Creates or opens a file.
*
* \param FileHandle A variable that receives the file handle.
* \param FileName The Win32 file name.
* \param DesiredAccess The desired access to the file.
* \li \c FILE_READ_ATTRIBUTES | GENERIC_READ | SYNCHRONIZE Grants read
* access.
* \li \c FILE_READ_ATTRIBUTES | GENERIC_WRITE | SYNCHRONIZE Grants
* write access.
* \li \c DELETE | SYNCHRONIZE Allows you to rename file.
* \param ShareAccess The file access granted to other threads.
* \li \c FILE_SHARE_READ Allows other threads to read from the file.
* \li \c FILE_SHARE_WRITE Allows other threads to write to the file.
* \li \c FILE_SHARE_DELETE Allows other threads to delete the file.
* \note: To rename a file use FILE_SHARE_READ | FILE_SHARE_WRITE |
* FILE_SHARE_DELETE
* \param CreateDisposition The action to perform if the file does or
* does not exist.
* \li \c FILE_OPEN If the file exists, open it. Otherwise, fail.
* \li \c FILE_OVERWRITE_IF If the file exists, open and overwrite it.
* Otherwise, create the file.
* \param CreateOptions The options to apply when the file is opened or
* created.
* \li \c FILE_DIRECTORY_FILE Use for directory.
* \li \c FILE_NON_DIRECTORY_FILE Use for non-directory.
*/
NTSTATUS File_Create(
VOID** FileHandle,
WCHAR* FileName,
DWORD DesiredAccess,
DWORD ShareAccess,
DWORD CreateDisposition,
DWORD CreateOptions,
DWORD* CreateStatus
)
{
NTSTATUS status;
UNICODE_STRING fileName;
OBJECT_ATTRIBUTES objAttrib;
IO_STATUS_BLOCK ioStatusBlock;
VOID *fileHandle;
if(!RtlDosPathNameToNtPathName_U(
FileName,
&fileName,
NULL,
NULL
))
return STATUS_OBJECT_NAME_NOT_FOUND;
objAttrib.Length = sizeof(OBJECT_ATTRIBUTES);
objAttrib.RootDirectory = NULL;
objAttrib.ObjectName = &fileName;
objAttrib.Attributes = OBJ_CASE_INSENSITIVE;
objAttrib.SecurityDescriptor = NULL;
objAttrib.SecurityQualityOfService = NULL;
status = NtCreateFile(
&fileHandle,
DesiredAccess,
&objAttrib,
&ioStatusBlock,
NULL,
FILE_ATTRIBUTE_NORMAL,
ShareAccess,
CreateDisposition,
CreateOptions,
NULL,
0
);
Memory_Free(fileName.Buffer);
if (NT_SUCCESS(status))
{
*FileHandle = fileHandle;
}
if (CreateStatus)
*CreateStatus = ioStatusBlock.Information;
return status;
}
HANDLE File_Open( WCHAR* FileName, DWORD DesiredAccess )
{
HANDLE fileHandle = NULL;
File_Create(
&fileHandle,
FileName,
DesiredAccess,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
return fileHandle;
}
/**
* Checks if a file exists.
*
* \param FileName The Win32 file name.
*/
BOOLEAN File_Exists( WCHAR* FileName )
{
NTSTATUS status;
VOID *fileHandle;
status = File_Create(
&fileHandle,
FileName,
FILE_READ_ATTRIBUTES |GENERIC_READ | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (NT_SUCCESS(status))
{
NtClose(fileHandle);
return TRUE;
}
else
{
return FALSE;
}
}
/**
* Copies an existing file to a new file.
*
* \param SourceFileName The Win32 file name of source file.
* \param DestinationFileName The Win32 file name for the new
* file.
* \param ProgressRoutine The address of a callback function of
* type TYPE_FsProgessRoutine that is called each time another
* portion of the file has been copied. This parameter can be
* NULL if no progress routine is required.
*/
VOID File_Copy(
WCHAR* SourceFileName,
WCHAR* DestinationFileName,
TYPE_FsProgessRoutine ProgressRoutine
)
{
VOID *fileHandleSource = NULL, *fileHandleDest = NULL;
UCHAR *buffer = NULL;
UINT_B regionSize = 0x10000;
NTSTATUS status;
UINT64 fileSize, bytesCopied = 0;
IO_STATUS_BLOCK isb;
status = File_Create(
&fileHandleSource,
SourceFileName,
SYNCHRONIZE | FILE_READ_ATTRIBUTES | GENERIC_READ,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
return;
status = File_Create(
&fileHandleDest,
DestinationFileName,
SYNCHRONIZE | FILE_READ_ATTRIBUTES | GENERIC_WRITE,
FILE_SHARE_WRITE,
FILE_CREATE,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
{
if (status == STATUS_OBJECT_PATH_NOT_FOUND)
{
//TODO: Implement with CreatePath() function.
HANDLE directoryHandle;
WCHAR directoryName[260];
WCHAR *end;
end = String_FindFirstChar(DestinationFileName, L'\\');
while (end != NULL)
{
String_CopyN(directoryName, DestinationFileName, end - DestinationFileName + 1);
status = File_Create(
&directoryHandle,
directoryName,
FILE_LIST_DIRECTORY | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_CREATE,
FILE_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT | FILE_OPEN_FOR_BACKUP_INTENT,
NULL
);
end = String_FindFirstChar(++end, L'\\');
}
//try again.
status = File_Create(
&fileHandleDest,
DestinationFileName,
SYNCHRONIZE | FILE_READ_ATTRIBUTES | GENERIC_WRITE,
FILE_SHARE_WRITE,
FILE_CREATE,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
//still fails? abort.
if (!NT_SUCCESS(status))
{
return;
}
}
else
{
return;
}
}
if (ProgressRoutine)
{
//get file size
FILE_STANDARD_INFORMATION fileInformation;
NtQueryInformationFile(
fileHandleSource,
&isb,
&fileInformation,
sizeof(FILE_STANDARD_INFORMATION),
FileStandardInformation
);
fileSize = fileInformation.EndOfFile.QuadPart;
}
NtAllocateVirtualMemory(
NtCurrentProcess(),
(VOID**)&buffer,
0,
®ionSize,
MEM_RESERVE | MEM_COMMIT,
PAGE_READWRITE
);
while (TRUE)
{
status = NtReadFile(
fileHandleSource,
NULL,
NULL,
NULL,
&isb,
buffer,
regionSize,
NULL,
NULL
);
if (status == STATUS_END_OF_FILE)
{
break;
}
else
{
NtWriteFile(
fileHandleDest,
NULL,
NULL,
NULL,
&isb,
buffer,
isb.Information,
NULL,
NULL
);
bytesCopied += isb.Information;
}
if (ProgressRoutine)
ProgressRoutine(fileSize, bytesCopied);
}
NtFreeVirtualMemory(NtCurrentProcess(), (VOID**)&buffer, ®ionSize, MEM_RELEASE);
if (fileHandleSource)
{
NtClose(fileHandleSource);
}
if (fileHandleDest)
{
NtClose(fileHandleDest);
}
}
VOID CreatePath( WCHAR* Path )
{
HANDLE directoryHandle;
WCHAR directoryName[260];
WCHAR *end;
end = String_FindFirstChar(Path, L'\\');
while (end != NULL)
{
String_CopyN(directoryName, Path, end - Path + 1);
directoryName[end - Path + 1] = L'\0';
File_Create(
&directoryHandle,
directoryName,
FILE_LIST_DIRECTORY | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_CREATE,
FILE_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT | FILE_OPEN_FOR_BACKUP_INTENT,
NULL
);
end = String_FindFirstChar(++end, L'\\');
}
}
VOID File_Split( HANDLE InputFile, WCHAR* FileName, DWORD Offset, DWORD Size, WCHAR* OutputPath )
{
HANDLE outputFile = NULL;
WCHAR fileName[60];
VOID *buffer = NULL;
UINT_B regionSize;
NTSTATUS status;
IO_STATUS_BLOCK isb;
UINT32 bytesCopied = 0;
UINT32 bytesRemaining = 0;
String_Copy(fileName, OutputPath);
String_Concatenate(fileName, FileName);
status = File_Create(
&outputFile,
fileName,
FILE_WRITE_ATTRIBUTES | FILE_WRITE_DATA | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OVERWRITE_IF,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (status == STATUS_OBJECT_PATH_NOT_FOUND)
{
CreatePath(OutputPath);
//try again.
File_Create(
&outputFile,
fileName,
FILE_WRITE_ATTRIBUTES | FILE_WRITE_DATA | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OVERWRITE_IF,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
}
regionSize = Size;
status = NtAllocateVirtualMemory(
NtCurrentProcess(),
(VOID**)&buffer,
0,
®ionSize,
MEM_RESERVE | MEM_COMMIT,
PAGE_READWRITE
);
if (status == STATUS_NO_MEMORY)
{
SYSTEM_BASIC_INFORMATION basicInfo;
SYSTEM_BASIC_PERFORMANCE_INFORMATION performanceInfo;
UINT8 percentageOfMemory = 90;
NtQuerySystemInformation(
SystemBasicInformation,
&basicInfo,
sizeof(SYSTEM_BASIC_INFORMATION),
0
);
// Check available memory
NtQuerySystemInformation(
SystemBasicPerformanceInformation,
&performanceInfo,
sizeof(SYSTEM_BASIC_PERFORMANCE_INFORMATION),
NULL
);
while (status == STATUS_NO_MEMORY || status == STATUS_COMMITMENT_LIMIT)
{
regionSize = performanceInfo.AvailablePages * basicInfo.PageSize;
regionSize = (FLOAT)((FLOAT)regionSize / (FLOAT)100) * percentageOfMemory;
status = NtAllocateVirtualMemory(
NtCurrentProcess(),
(VOID**)&buffer,
0,
®ionSize,
MEM_RESERVE | MEM_COMMIT,
PAGE_READWRITE
);
percentageOfMemory -= 10;
}
if (!buffer)
{
return;
}
}
File_SetPointer(InputFile, Offset, FILE_BEGIN);
while (bytesCopied < Size)
{
status = NtReadFile(
InputFile,
NULL,
NULL,
NULL,
&isb,
buffer,
bytesRemaining < regionSize ? bytesRemaining : regionSize,
NULL,
NULL
);
status = NtWriteFile(
outputFile,
NULL,
NULL,
NULL,
&isb,
buffer,
isb.Information,
NULL,
NULL
);
bytesCopied += isb.Information;
bytesRemaining = Size - bytesCopied;
}
NtFreeVirtualMemory(
NtCurrentProcess(),
(VOID**)&buffer,
®ionSize,
MEM_RELEASE
);
if (outputFile)
NtClose(outputFile);
}
/**
* Loads a file into memory and returns the base address.
*
* \param FileName The Win32 file name.
* \param FileSize Optional, returns the file size.
*/
VOID* File_Load( WCHAR* FileName, UINT64* FileSize )
{
HANDLE fileHandle;
NTSTATUS status;
IO_STATUS_BLOCK isb;
FILE_STANDARD_INFORMATION fileInformation;
UINT64 fileSize;
VOID *buffer;
status = File_Create(
&fileHandle,
FileName,
SYNCHRONIZE | FILE_READ_ATTRIBUTES | GENERIC_READ,
FILE_SHARE_READ,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
return NULL;
// Get file size
NtQueryInformationFile(
fileHandle,
&isb,
&fileInformation,
sizeof(FILE_STANDARD_INFORMATION),
FileStandardInformation
);
fileSize = fileInformation.EndOfFile.QuadPart;
// If the user wants it, give it to him
if (FileSize)
*FileSize = fileSize;
// Allocate some memory
buffer = Memory_Allocate(fileSize);
// Read the entire file into memory
status = NtReadFile(
fileHandle,
NULL,
NULL,
NULL,
&isb,
buffer,
fileSize,
NULL,
NULL
);
// We got what we need, the file handle is no longer needed.
NtClose(fileHandle);
return buffer;
}
/**
* Loads a file into memory and returns the base address.
*
* \param FileName The Win32 file name.
* \param FileSize Optional, returns the file size.
*/
VOID File_Dump( WCHAR* FileName, VOID* Buffer, UINT32 Size )
{
HANDLE fileHandle;
NTSTATUS status;
IO_STATUS_BLOCK isb;
status = File_Create(
&fileHandle,
FileName,
FILE_WRITE_ATTRIBUTES | FILE_WRITE_DATA | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
FILE_OVERWRITE_IF,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
return;
// Write memory to file
status = NtWriteFile(
fileHandle,
NULL,
NULL,
NULL,
&isb,
Buffer,
Size,
NULL,
NULL
);
NtClose(fileHandle);
}
/**
* Retrieves the size of a file.
*
* \param FileName The Win32 file name.
*/
UINT64 File_GetSize( WCHAR* FileName )
{
VOID *fileHandle;
IO_STATUS_BLOCK isb;
FILE_STANDARD_INFORMATION fileInformation;
NTSTATUS status;
status = File_Create(
&fileHandle,
FileName,
SYNCHRONIZE | FILE_READ_ATTRIBUTES | GENERIC_READ,
FILE_SHARE_READ,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
if (!NT_SUCCESS(status))
return 0;
NtQueryInformationFile(
fileHandle,
&isb,
&fileInformation,
sizeof(FILE_STANDARD_INFORMATION),
FileStandardInformation
);
NtClose(fileHandle);
return fileInformation.EndOfFile.QuadPart;
}
BOOLEAN File_GetLastWriteTime( HANDLE FileHandle, FILETIME* LastWriteTime )
{
IO_STATUS_BLOCK isb;
FILE_BASIC_INFORMATION fileInformation;
NtQueryInformationFile(
FileHandle,
&isb,
&fileInformation,
sizeof(FILE_BASIC_INFORMATION),
FileBasicInformation
);
LastWriteTime->dwLowDateTime = fileInformation.LastWriteTime.u.LowPart;
LastWriteTime->dwHighDateTime = fileInformation.LastWriteTime.u.HighPart;
return TRUE;
}
DWORD File_GetAttributes( WCHAR* FileName )
{
UNICODE_STRING fileName;
OBJECT_ATTRIBUTES objAttrib;
FILE_NETWORK_OPEN_INFORMATION fileInformation;
RtlDosPathNameToNtPathName_U(FileName, &fileName, NULL, NULL);
objAttrib.Length = sizeof(OBJECT_ATTRIBUTES);
objAttrib.RootDirectory = NULL;
objAttrib.ObjectName = &fileName;
objAttrib.Attributes = OBJ_CASE_INSENSITIVE;
objAttrib.SecurityDescriptor = NULL;
objAttrib.SecurityQualityOfService = NULL;
NtQueryFullAttributesFile(&objAttrib, &fileInformation);
return fileInformation.FileAttributes;
}
UINT32 File_Read( HANDLE FileHandle, VOID* Buffer, UINT32 Length )
{
IO_STATUS_BLOCK isb;
NtReadFile(FileHandle, NULL, NULL, NULL, &isb, Buffer, Length, NULL, NULL);
return isb.Information;
}
VOID File_Write( HANDLE FileHandle, VOID* Buffer, UINT32 Length )
{
IO_STATUS_BLOCK isb;
NtWriteFile(FileHandle, NULL, NULL, NULL, &isb, Buffer, Length, NULL, NULL );
}
INT64 File_GetPointer( HANDLE FileHandle )
{
FILE_POSITION_INFORMATION positionInfo;
IO_STATUS_BLOCK isb;
NtQueryInformationFile(
FileHandle,
&isb,
&positionInfo,
sizeof(FILE_POSITION_INFORMATION),
FilePositionInformation
);
return positionInfo.CurrentByteOffset.QuadPart;
}
VOID File_SetPointer( HANDLE FileHandle, INT64 DistanceToMove, DWORD MoveMethod )
{
FILE_POSITION_INFORMATION positionInfo;
IO_STATUS_BLOCK isb;
positionInfo.CurrentByteOffset.QuadPart = DistanceToMove;
NtSetInformationFile(
FileHandle,
&isb,
&positionInfo,
sizeof(FILE_POSITION_INFORMATION),
FilePositionInformation
);
}
VOID File_Delete( WCHAR* FileName )
{
HANDLE fileHandle;
NTSTATUS status;
status = File_Create(
&fileHandle,
FileName,
DELETE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
FILE_OPEN,
FILE_DELETE_ON_CLOSE,
NULL
);
if (NT_SUCCESS(status))
{
NtClose(fileHandle);
}
}
/**
* Renames a file.
*
* \param FilePath The Win32 file name.
* \param NewFileName The new file name.
*/
VOID File_Rename( WCHAR* FilePath, WCHAR* NewFileName )
{
VOID *fileHandle;
UINT32 renameInfoSize;
IO_STATUS_BLOCK ioStatusBlock;
FILE_RENAME_INFORMATION *renameInfo;
UNICODE_STRING newFileName;
File_Create(
&fileHandle,
FilePath,
DELETE | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
FILE_OPEN,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT,
NULL
);
RtlDosPathNameToNtPathName_U(NewFileName, &newFileName, NULL, NULL);
renameInfoSize = FIELD_OFFSET(FILE_RENAME_INFORMATION, FileName) + (ULONG)newFileName.Length;
renameInfo = (FILE_RENAME_INFORMATION*)Memory_Allocate(renameInfoSize);
renameInfo->ReplaceIfExists = FALSE;
renameInfo->RootDirectory = NULL;
renameInfo->FileNameLength = (ULONG)newFileName.Length;
memcpy(renameInfo->FileName, newFileName.Buffer, newFileName.Length);
NtSetInformationFile(
fileHandle,
&ioStatusBlock,
renameInfo,
renameInfoSize,
FileRenameInformation
);
Memory_Free(newFileName.Buffer);
Memory_Free(renameInfo);
NtClose(fileHandle);
}
VOID File_Close( HANDLE FileHandle )
{
NtClose(FileHandle);
}
|
Gradecak/fission-workflows | pkg/controller/monitor/invocation.go | package monitor
import (
// "github.com/gradecak/fission-workflows/pkg/controller/ctrl"
"github.com/prometheus/client_golang/prometheus"
// "github.com/sirupsen/logrus"
"sync"
"time"
)
type InvocationMonitor struct {
activeMu sync.RWMutex
activeInvocations map[string]time.Time
activeCount int
queuedMu sync.RWMutex
queuedCount int
queuedInvocations map[string]time.Time
}
var (
monitorActiveCount = prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: "invocation",
Subsystem: "monitor",
Name: "active",
Help: "Number of running invocations",
})
monitorScheduledCount = prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: "invocation",
Subsystem: "monitor",
Name: "scheduled",
Help: "Number of scheduled",
})
InvocationTime = prometheus.NewSummaryVec(prometheus.SummaryOpts{
Namespace: "invocation",
Subsystem: "monitor",
Name: "time",
Help: "Duration of an invocation in various states.",
Objectives: map[float64]float64{
0: 0.0001,
0.01: 0.0001,
0.1: 0.0001,
0.25: 0.0001,
0.5: 0.0001,
0.75: 0.0001,
0.9: 0.0001,
1: 0.0001,
},
}, []string{"type"})
)
func init() {
prometheus.MustRegister(monitorScheduledCount, monitorActiveCount, InvocationTime)
}
func NewInvocationMonitor() *InvocationMonitor {
return &InvocationMonitor{
activeMu: sync.RWMutex{},
activeInvocations: make(map[string]time.Time),
activeCount: 0,
queuedMu: sync.RWMutex{},
queuedInvocations: make(map[string]time.Time),
queuedCount: 0,
}
}
func (m *InvocationMonitor) AddQueued(invID string) {
m.queuedMu.Lock()
if _, ok := m.queuedInvocations[invID]; !ok {
m.queuedInvocations[invID] = time.Now()
m.queuedCount++
monitorScheduledCount.Set(float64(m.queuedCount))
}
m.queuedMu.Unlock()
}
func (m *InvocationMonitor) RunQueued(invID string) {
m.queuedMu.Lock()
if t, ok := m.queuedInvocations[invID]; ok {
delete(m.queuedInvocations, invID)
m.queuedCount--
m.activeMu.Lock()
m.activeInvocations[invID] = t
m.activeCount++
m.activeMu.Unlock()
defer func() {
monitorScheduledCount.Set(float64(m.queuedCount))
InvocationTime.WithLabelValues("queued").Observe(float64(time.Since(t)))
monitorActiveCount.Set(float64(m.activeCount))
}()
}
m.queuedMu.Unlock()
}
func (m *InvocationMonitor) Remove(invID string) {
// if in queuedQueue
m.queuedMu.Lock()
if t, ok := m.queuedInvocations[invID]; ok {
delete(m.queuedInvocations, invID)
m.queuedCount--
monitorScheduledCount.Set(float64(m.queuedCount))
InvocationTime.WithLabelValues("queued").Observe(float64(time.Since(t)))
m.queuedMu.Unlock()
return
}
m.queuedMu.Unlock()
// if in activeQueue
m.activeMu.Lock()
if t, ok := m.activeInvocations[invID]; ok {
delete(m.activeInvocations, invID)
m.activeCount--
monitorActiveCount.Set(float64(m.activeCount))
InvocationTime.WithLabelValues("running").Observe(float64(time.Since(t)))
m.activeMu.Unlock()
return
}
m.activeMu.Unlock()
}
func (m *InvocationMonitor) ActiveCount() int {
return m.activeCount
}
func (m *InvocationMonitor) QueuedCount() int {
return m.queuedCount
}
// func (m *InvocationMonitor) HasBacklog() bool {
// // return float64(m.activeCount/ctrl.MAX_PARALLEL_CONTROLLERS) >= 0.85 && m.queuedCount > 0
// return float64()
// }
|
threadhead/scoutz | db/migrate/20130825180049_add_uuid_seq_ical_to_events.rb | class AddUuidSeqIcalToEvents < ActiveRecord::Migration
def change
add_column :events, :ical_sequence, :integer, default: 0
add_column :events, :ical_uuid, :string
end
end
|
aniketh-deepsource/babel | packages/babel-parser/test/fixtures/experimental/_no-plugin/module-string-names-export/input.js | <reponame>aniketh-deepsource/babel
export { foo as "some exports" };
var foo;
|
Galaxy-VN/Oregen3 | src/main/java/me/banbeucmas/oregen3/handler/event/SyncBlockEventHandler.java | <filename>src/main/java/me/banbeucmas/oregen3/handler/event/SyncBlockEventHandler.java
package me.banbeucmas.oregen3.handler.event;
import org.bukkit.World;
import org.bukkit.block.Block;
public class SyncBlockEventHandler extends BlockEventHandler {
@Override
public void generateBlock(final World world, final Block source, final Block to) {
generate(world, source, to);
}
@Override
public boolean isAsync() {
return false;
}
}
|
Vladymyr/pr0x79 | src/test/java/tcb/pr0x79/program/SomeClassBody.java | package tcb.pr0x79.program;
import java.util.List;
import java.util.Map;
public class SomeClassBody extends SomeClassSignature {
public SomeClass create(String val) {
return new SomeClass(val);
}
public interface SomeInnerInterface {
}
public class SomeClass implements SomeInnerInterface {
private String value;
public SomeClass(String value) {
this.value = value;
}
public <M extends SomeInnerInterface> Map<String, List<M>> print(String input) {
System.out.println("Running SomeClass#print(String)");
System.out.println("Input: " + input);
System.out.println("Value: " + this.value);
return null;
}
}
}
|
university-information-system/uis | src/main/java/at/ac/tuwien/inso/repository/GradeRepository.java | package at.ac.tuwien.inso.repository;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import at.ac.tuwien.inso.entity.Grade;
import at.ac.tuwien.inso.entity.Student;
public interface GradeRepository extends CrudRepository<Grade, Long> {
List<Grade> findByStudentAccountId(Long id);
@Query("select g " +
"from Grade g " +
"where g.student = ?1")
List<Grade> findAllOfStudent(Student student);
List<Grade> findByCourseId(Long courseId);
List<Grade> findByLecturerIdAndCourseId(Long lecturerId, Long courseId);
Grade findByUrlIdentifier(String urlIdentifier);
}
|
fcbg-hnp/mnelab | mnelab/dialogs/eventsdialog.py | from PyQt5.QtWidgets import (QDialog, QVBoxLayout, QDialogButtonBox,
QTableWidget, QTableWidgetItem, QAbstractItemView)
from PyQt5.QtCore import Qt
class IntTableWidgetItem(QTableWidgetItem):
def __init__(self, value):
super().__init__(str(value))
def __lt__(self, other):
return int(self.data(Qt.EditRole)) < int(other.data(Qt.EditRole))
class EventsDialog(QDialog):
def __init__(self, parent, pos, desc):
super().__init__(parent)
self.setWindowTitle("Edit Events")
self.table = QTableWidget(len(pos), 2)
for row, (p, d) in enumerate(zip(pos, desc)):
self.table.setItem(row, 0, IntTableWidgetItem(p))
self.table.setItem(row, 1, IntTableWidgetItem(d))
self.table.setHorizontalHeaderLabels(["Position", "Type"])
self.table.horizontalHeader().setStretchLastSection(True)
self.table.verticalHeader().setVisible(False)
self.table.setShowGrid(False)
self.table.setSelectionMode(QAbstractItemView.NoSelection)
self.table.setSortingEnabled(True)
self.table.sortByColumn(0, Qt.AscendingOrder)
vbox = QVBoxLayout(self)
vbox.addWidget(self.table)
buttonbox = QDialogButtonBox(QDialogButtonBox.Ok |
QDialogButtonBox.Cancel)
vbox.addWidget(buttonbox)
buttonbox.accepted.connect(self.accept)
buttonbox.rejected.connect(self.reject)
self.resize(300, 500)
|
Aerondir/gooddata-java | src/test/java/com/gooddata/gdc/GdcTest.java | <reponame>Aerondir/gooddata-java<filename>src/test/java/com/gooddata/gdc/GdcTest.java
package com.gooddata.gdc;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.testng.annotations.Test;
import java.io.InputStream;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
public class GdcTest {
@Test
public void deserialize() throws Exception {
final InputStream stream = getClass().getResourceAsStream("/gdc/gdc.json");
final Gdc gdc = new ObjectMapper().readValue(stream, Gdc.class);
assertThat(gdc, is(notNullValue()));
assertThat(gdc.getHomeLink(), is("/gdc/"));
assertThat(gdc.getTokenLink(), is("/gdc/account/token"));
assertThat(gdc.getLoginLink(), is("/gdc/account/login"));
assertThat(gdc.getMetadataLink(), is("/gdc/md"));
assertThat(gdc.getXTabLink(), is("/gdc/xtab2"));
assertThat(gdc.getAvailableElementsLink(), is("/gdc/availableelements"));
assertThat(gdc.getReportExporterLink(), is("/gdc/exporter"));
assertThat(gdc.getAccountLink(), is("/gdc/account"));
assertThat(gdc.getProjectsLink(), is("/gdc/projects"));
assertThat(gdc.getToolLink(), is("/gdc/tool"));
assertThat(gdc.getTemplatesLink(), is("/gdc/templates"));
assertThat(gdc.getReleaseInfoLink(), is("/gdc/releaseInfo"));
assertThat(gdc.getUserStagingLink(), is("/uploads"));
}
} |
tada/dgo | internal/meta.go | package internal
import (
"reflect"
"github.com/tada/dgo/dgo"
)
var reflectTypeType = reflect.TypeOf((*dgo.Type)(nil)).Elem()
// metaType is the Type returned by a Type
type metaType struct {
tp dgo.Type
}
// DefaultMetaType is the unconstrained meta type
var DefaultMetaType = &metaType{tp: DefaultAnyType}
// MetaType creates the meta type for the given type
func MetaType(t dgo.Type) dgo.Meta {
return &metaType{t}
}
func (t *metaType) Type() dgo.Type {
if t.tp == nil {
return t // type of meta type is meta type
}
return &metaType{nil} // Short circuit meta chain
}
func (t *metaType) Assignable(ot dgo.Type) bool {
if mt, ok := ot.(*metaType); ok {
if t.tp == nil {
// Only MetaTypeType is assignable to MetaTypeType
return mt.tp == nil
}
return t.tp.Equals(mt.tp)
}
return CheckAssignableTo(nil, ot, t)
}
func (t *metaType) Describes() dgo.Type {
return t.tp
}
func (t *metaType) Equals(v interface{}) bool {
if mt, ok := v.(*metaType); ok {
if t.tp == nil {
return mt.tp == nil
}
return t.tp.Equals(mt.tp)
}
return false
}
func (t *metaType) HashCode() dgo.Hash {
h := dgo.Hash(dgo.TiMeta) * 1321
if t.tp != nil {
h += t.tp.HashCode()
}
return h
}
func (t *metaType) Instance(v interface{}) bool {
if ot, ok := v.(dgo.Type); ok {
if t.tp == nil {
// MetaTypeType
_, ok = ot.(*metaType)
return ok
}
return t.tp.Assignable(ot)
}
return false
}
func (t *metaType) New(arg dgo.Value) dgo.Value {
if args, ok := arg.(dgo.Arguments); ok {
args.AssertSize(`type`, 1, 1)
arg = args.Get(0)
}
var tv dgo.Type
if s, ok := arg.(dgo.String); ok {
tv = AsType(Parse(s.GoString()))
} else {
tv = AsType(arg)
}
if !t.Instance(tv) {
panic(IllegalAssignment(t, tv))
}
return tv
}
func (t *metaType) Operator() dgo.TypeOp {
return dgo.OpMeta
}
func (t *metaType) Operand() dgo.Type {
return t.tp
}
func (t *metaType) ReflectType() reflect.Type {
return reflectTypeType
}
func (t *metaType) Resolve(ap dgo.AliasAdder) {
tp := t.tp
t.tp = DefaultAnyType
t.tp = ap.Replace(tp).(dgo.Type)
}
func (t *metaType) String() string {
return TypeString(t)
}
func (t *metaType) TypeIdentifier() dgo.TypeIdentifier {
return dgo.TiMeta
}
|
vishakhaakumar/SQAProject_WarehouseAPI | src/main/java/app/model/Shelf.java | package app.model;
import lombok.Data;
import javax.persistence.*;
@Table(name = "shelf")
@Entity
@Data
public class Shelf {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(name = "code")
private String code;
@ManyToOne
@JoinColumn(name = "warehouse_id")
private Warehouse warehouse;
}
|
gingray/rshade | lib/rshade/core_extensions/object/reveal.rb | <filename>lib/rshade/core_extensions/object/reveal.rb
class Object
def reveal(&block)
trace = ::RShade::Trace.reveal do
block.call
end
trace.show
end
end |
psavery/ParaView | Plugins/GenericIOReader/Readers/LANL/GIO/GenericIO.cxx | /*
* Copyright (C) 2015, UChicago Argonne, LLC
* All Rights Reserved
*
* Generic IO (ANL-15-066)
* <NAME>, Argonne National Laboratory
*
* OPEN SOURCE LICENSE
*
* Under the terms of Contract No. DE-AC02-06CH11357 with UChicago Argonne,
* LLC, the U.S. Government retains certain rights in this software.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the names of UChicago Argonne, LLC or the Department of Energy
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* *****************************************************************************
*
* DISCLAIMER
* THE SOFTWARE IS SUPPLIED “AS IS” WITHOUT WARRANTY OF ANY KIND. NEITHER THE
* UNTED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT OF ENERGY, NOR
* UCHICAGO ARGONNE, LLC, NOR ANY OF THEIR EMPLOYEES, MAKES ANY WARRANTY,
* EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR RESPONSIBILITY FOR THE
* ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY INFORMATION, DATA, APPARATUS,
* PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE WOULD NOT INFRINGE
* PRIVATELY OWNED RIGHTS.
*
* *****************************************************************************
*/
#define _XOPEN_SOURCE 600
#include "GenericIO.h"
#include "CRC64.h"
#ifndef LANL_GENERICIO_NO_COMPRESSION
extern "C" {
#include "blosc.h"
}
#endif
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <cstring>
#include <fstream>
#include <iterator>
#include <sstream>
#include <stdexcept>
#ifndef LANL_GENERICIO_NO_MPI
#include <ctime>
#else
#include <time.h>
#endif
#include <errno.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#ifdef __bgq__
#include <mpix.h>
#endif
namespace lanl
{
#ifndef MPI_UINT64_T
#define MPI_UINT64_T (sizeof(long) == 8 ? MPI_LONG : MPI_LONG_LONG)
#endif
#ifdef _WIN32
#include <Windows.h>
#include <io.h>
#include <stdio.h>
#define S_IRUSR _S_IREAD
#define S_IWUSR _S_IWRITE
#include <direct.h>
#define mkdir(a, b) _mkdir((a))
typedef long long ssize_t;
// Windows-specific functions
void usleep(int waitTime);
int ftruncate(unsigned int fd, size_t size);
int pread(unsigned int fd, void* buf, size_t count, int offset);
int pwrite(unsigned int fd, const void* buf, size_t count, int offset);
void usleep(int waitTime)
{
__int64 time1 = 0, time2 = 0, sysFreq = 0;
QueryPerformanceCounter((LARGE_INTEGER*)&time1);
QueryPerformanceFrequency((LARGE_INTEGER*)&sysFreq);
do
{
QueryPerformanceCounter((LARGE_INTEGER*)&time2);
} while ((time2 - time1) < waitTime);
}
// Convert a POSIX ftruncate to a windows system chsize
int ftruncate(unsigned int fd, size_t size)
{
return _chsize(fd, static_cast<long>(size));
}
// Convert a POSIX read to a windows system read
int pread(unsigned int fd, void* buf, size_t count, int offset)
{
if (_lseek(fd, offset, SEEK_SET) != offset)
return -1;
return _read(fd, (char*)buf, static_cast<unsigned int>(count));
}
// Convert a POSIX write to a windows system write
int pwrite(unsigned int fd, const void* buf, size_t count, int offset)
{
if (_lseek(fd, offset, SEEK_SET) != offset)
return -1;
return _write(fd, (char*)buf, static_cast<unsigned int>(count));
}
#endif
using namespace std;
namespace gio
{
#ifndef LANL_GENERICIO_NO_MPI
GenericFileIO_MPI::~GenericFileIO_MPI()
{
(void)MPI_File_close(&FH);
}
void GenericFileIO_MPI::open(const std::string& FN, bool ForReading)
{
FileName = FN;
int amode = ForReading ? MPI_MODE_RDONLY : (MPI_MODE_WRONLY | MPI_MODE_CREATE);
if (MPI_File_open(Comm, const_cast<char*>(FileName.c_str()), amode, MPI_INFO_NULL, &FH) !=
MPI_SUCCESS)
throw runtime_error(
(!ForReading ? "Unable to create the file: " : "Unable to open the file: ") + FileName);
}
void GenericFileIO_MPI::setSize(size_t sz)
{
if (MPI_File_set_size(FH, sz) != MPI_SUCCESS)
throw runtime_error("Unable to set size for file: " + FileName);
}
void GenericFileIO_MPI::read(void* buf, size_t count, off_t offset, const std::string& D)
{
while (count > 0)
{
MPI_Status status;
if (MPI_File_read_at(FH, offset, buf, count, MPI_BYTE, &status) != MPI_SUCCESS)
throw runtime_error("Unable to read " + D + " from file: " + FileName);
int scount;
(void)MPI_Get_count(&status, MPI_BYTE, &scount);
count -= scount;
buf = ((char*)buf) + scount;
offset += scount;
}
}
void GenericFileIO_MPI::write(const void* buf, size_t count, off_t offset, const std::string& D)
{
while (count > 0)
{
MPI_Status status;
if (MPI_File_write_at(FH, offset, (void*)buf, count, MPI_BYTE, &status) != MPI_SUCCESS)
throw runtime_error("Unable to write " + D + " to file: " + FileName);
int scount;
(void)MPI_Get_count(&status, MPI_BYTE, &scount);
count -= scount;
buf = ((char*)buf) + scount;
offset += scount;
}
}
void GenericFileIO_MPICollective::read(void* buf, size_t count, off_t offset, const std::string& D)
{
int Continue = 0;
do
{
MPI_Status status;
if (MPI_File_read_at_all(FH, offset, buf, count, MPI_BYTE, &status) != MPI_SUCCESS)
throw runtime_error("Unable to read " + D + " from file: " + FileName);
int scount;
(void)MPI_Get_count(&status, MPI_BYTE, &scount);
count -= scount;
buf = ((char*)buf) + scount;
offset += scount;
int NeedContinue = (count > 0);
MPI_Allreduce(&NeedContinue, &Continue, 1, MPI_INT, MPI_SUM, Comm);
} while (Continue);
}
void GenericFileIO_MPICollective::write(
const void* buf, size_t count, off_t offset, const std::string& D)
{
int Continue = 0;
do
{
MPI_Status status;
if (MPI_File_write_at_all(FH, offset, (void*)buf, count, MPI_BYTE, &status) != MPI_SUCCESS)
throw runtime_error("Unable to write " + D + " to file: " + FileName);
int scount;
(void)MPI_Get_count(&status, MPI_BYTE, &scount);
count -= scount;
buf = ((char*)buf) + scount;
offset += scount;
int NeedContinue = (count > 0);
MPI_Allreduce(&NeedContinue, &Continue, 1, MPI_INT, MPI_SUM, Comm);
} while (Continue);
}
#endif
GenericFileIO_POSIX::~GenericFileIO_POSIX()
{
if (FH != -1)
close(FH);
}
void GenericFileIO_POSIX::open(const std::string& FN, bool ForReading)
{
FileName = FN;
errno = 0;
#ifdef _WIN32
// Windows POSIX Must explicitly define O_BINARY otherwise it defaults to text mode
int flags = ForReading ? (O_RDONLY | O_BINARY) : (O_WRONLY | O_CREAT | O_BINARY);
int mode = S_IRUSR | S_IWUSR;
if ((FH = lanl::open(FileName.c_str(), flags, mode)) == -1)
#else
int flags = ForReading ? O_RDONLY : (O_WRONLY | O_CREAT);
int mode = S_IRUSR | S_IWUSR | S_IRGRP;
if ((FH = ::open(FileName.c_str(), flags, mode)) == -1)
#endif
throw runtime_error(
(!ForReading ? "Unable to create the file: " : "Unable to open the file: ") + FileName +
": " + strerror(errno));
}
void GenericFileIO_POSIX::setSize(size_t sz)
{
if (ftruncate(FH, sz) == -1)
throw runtime_error("Unable to set size for file: " + FileName);
}
void GenericFileIO_POSIX::read(void* buf, size_t count, off_t offset, const std::string& D)
{
while (count > 0)
{
ssize_t scount;
errno = 0;
if ((scount = pread(FH, buf, count, offset)) == -1)
{
if (errno == EINTR)
continue;
throw runtime_error(
"Unable to read " + D + " from file: " + FileName + ": " + strerror(errno));
}
count -= scount;
buf = ((char*)buf) + scount;
offset += static_cast<off_t>(scount);
}
}
void GenericFileIO_POSIX::write(const void* buf, size_t count, off_t offset, const std::string& D)
{
while (count > 0)
{
ssize_t scount;
errno = 0;
if ((scount = pwrite(FH, buf, count, offset)) == -1)
{
if (errno == EINTR)
continue;
throw runtime_error(
"Unable to write " + D + " to file: " + FileName + ": " + strerror(errno));
}
count -= scount;
buf = ((char*)buf) + scount;
offset += static_cast<off_t>(scount);
}
}
static bool isBigEndian()
{
const uint32_t one = 1;
return !(*((char*)(&one)));
}
static void bswap(void* v, size_t s)
{
char* p = (char*)v;
for (size_t i = 0; i < s / 2; ++i)
std::swap(p[i], p[s - (i + 1)]);
}
// Using #pragma pack here, instead of __attribute__((packed)) because xlc, at
// least as of v12.1, won't take __attribute__((packed)) on non-POD and/or
// templated types.
#pragma pack(1)
template <typename T, bool IsBigEndian>
struct endian_specific_value
{
operator T() const
{
T rvalue = value;
if (IsBigEndian != isBigEndian())
bswap(&rvalue, sizeof(T));
return rvalue;
};
endian_specific_value& operator=(T nvalue)
{
if (IsBigEndian != isBigEndian())
bswap(&nvalue, sizeof(T));
value = nvalue;
return *this;
}
endian_specific_value& operator+=(T nvalue)
{
*this = *this + nvalue;
return *this;
}
endian_specific_value& operator-=(T nvalue)
{
*this = *this - nvalue;
return *this;
}
private:
T value;
};
static const size_t CRCSize = 8;
static const size_t MagicSize = 8;
static const char* MagicBE = "HACC01B";
static const char* MagicLE = "HACC01L";
template <bool IsBigEndian>
struct GlobalHeader
{
char Magic[MagicSize];
endian_specific_value<uint64_t, IsBigEndian> HeaderSize;
endian_specific_value<uint64_t, IsBigEndian> NElems; // The global total
endian_specific_value<uint64_t, IsBigEndian> Dims[3];
endian_specific_value<uint64_t, IsBigEndian> NVars;
endian_specific_value<uint64_t, IsBigEndian> VarsSize;
endian_specific_value<uint64_t, IsBigEndian> VarsStart;
endian_specific_value<uint64_t, IsBigEndian> NRanks;
endian_specific_value<uint64_t, IsBigEndian> RanksSize;
endian_specific_value<uint64_t, IsBigEndian> RanksStart;
endian_specific_value<uint64_t, IsBigEndian> GlobalHeaderSize;
endian_specific_value<double, IsBigEndian> PhysOrigin[3];
endian_specific_value<double, IsBigEndian> PhysScale[3];
endian_specific_value<uint64_t, IsBigEndian> BlocksSize;
endian_specific_value<uint64_t, IsBigEndian> BlocksStart;
};
enum
{
FloatValue = (1 << 0),
SignedValue = (1 << 1),
ValueIsPhysCoordX = (1 << 2),
ValueIsPhysCoordY = (1 << 3),
ValueIsPhysCoordZ = (1 << 4),
ValueMaybePhysGhost = (1 << 5)
};
static const size_t NameSize = 256;
template <bool IsBigEndian>
struct VariableHeader
{
char Name[NameSize];
endian_specific_value<uint64_t, IsBigEndian> Flags;
endian_specific_value<uint64_t, IsBigEndian> Size;
};
template <bool IsBigEndian>
struct RankHeader
{
endian_specific_value<uint64_t, IsBigEndian> Coords[3];
endian_specific_value<uint64_t, IsBigEndian> NElems;
endian_specific_value<uint64_t, IsBigEndian> Start;
endian_specific_value<uint64_t, IsBigEndian> GlobalRank;
};
static const size_t FilterNameSize = 8;
static const size_t MaxFilters = 4;
template <bool IsBigEndian>
struct BlockHeader
{
char Filters[MaxFilters][FilterNameSize];
endian_specific_value<uint64_t, IsBigEndian> Start;
endian_specific_value<uint64_t, IsBigEndian> Size;
};
template <bool IsBigEndian>
struct CompressHeader
{
endian_specific_value<uint64_t, IsBigEndian> OrigCRC;
};
const char* CompressName = "BLOSC";
#pragma pack()
unsigned GenericIO::DefaultFileIOType = FileIOPOSIX;
int GenericIO::DefaultPartition = 0;
bool GenericIO::DefaultShouldCompress = false;
#ifndef LANL_GENERICIO_NO_MPI
std::size_t GenericIO::CollectiveMPIIOThreshold = 0;
#endif
static bool blosc_initialized = false;
#ifndef LANL_GENERICIO_NO_MPI
void GenericIO::write()
{
if (isBigEndian())
write<true>();
else
write<false>();
}
// Note: writing errors are not currently recoverable (one rank may fail
// while the others don't).
template <bool IsBigEndian>
void GenericIO::write()
{
const char* Magic = IsBigEndian ? MagicBE : MagicLE;
uint64_t FileSize = 0;
int NRanks, Rank;
MPI_Comm_rank(Comm, &Rank);
MPI_Comm_size(Comm, &NRanks);
#ifdef __bgq__
MPI_Barrier(Comm);
#endif
MPI_Comm_split(Comm, Partition, Rank, &SplitComm);
int SplitNRanks, SplitRank;
MPI_Comm_rank(SplitComm, &SplitRank);
MPI_Comm_size(SplitComm, &SplitNRanks);
string LocalFileName;
if (SplitNRanks != NRanks)
{
if (Rank == 0)
{
// In split mode, the specified file becomes the rank map, and the real
// data is partitioned.
vector<int> MapRank, MapPartition;
MapRank.resize(NRanks);
for (int i = 0; i < NRanks; ++i)
MapRank[i] = i;
MapPartition.resize(NRanks);
MPI_Gather(&Partition, 1, MPI_INT, &MapPartition[0], 1, MPI_INT, 0, Comm);
GenericIO GIO(MPI_COMM_SELF, FileName, FileIOType);
GIO.setNumElems(NRanks);
GIO.addVariable("$rank", MapRank); /* this is for use by humans; the reading
code assumes that the partitions are in
rank order */
GIO.addVariable("$partition", MapPartition);
vector<int> CX, CY, CZ;
int TopoStatus;
MPI_Topo_test(Comm, &TopoStatus);
if (TopoStatus == MPI_CART)
{
CX.resize(NRanks);
CY.resize(NRanks);
CZ.resize(NRanks);
for (int i = 0; i < NRanks; ++i)
{
int C[3];
MPI_Cart_coords(Comm, i, 3, C);
CX[i] = C[0];
CY[i] = C[1];
CZ[i] = C[2];
}
GIO.addVariable("$x", CX);
GIO.addVariable("$y", CY);
GIO.addVariable("$z", CZ);
}
GIO.write();
}
else
{
MPI_Gather(&Partition, 1, MPI_INT, 0, 0, MPI_INT, 0, Comm);
}
stringstream ss;
ss << FileName << "#" << Partition;
LocalFileName = ss.str();
}
else
{
LocalFileName = FileName;
}
RankHeader<IsBigEndian> RHLocal;
int Dims[3], Periods[3], Coords[3];
int TopoStatus;
MPI_Topo_test(Comm, &TopoStatus);
if (TopoStatus == MPI_CART)
{
MPI_Cart_get(Comm, 3, Dims, Periods, Coords);
}
else
{
Dims[0] = NRanks;
std::fill(Dims + 1, Dims + 3, 1);
std::fill(Periods, Periods + 3, 0);
Coords[0] = Rank;
std::fill(Coords + 1, Coords + 3, 0);
}
std::copy(Coords, Coords + 3, RHLocal.Coords);
RHLocal.NElems = NElems;
RHLocal.Start = 0;
RHLocal.GlobalRank = Rank;
bool ShouldCompress = DefaultShouldCompress;
const char* EnvStr = getenv("GENERICIO_COMPRESS");
if (EnvStr)
{
int Mod = atoi(EnvStr);
ShouldCompress = (Mod > 0);
}
bool NeedsBlockHeaders = ShouldCompress;
EnvStr = getenv("GENERICIO_FORCE_BLOCKS");
if (!NeedsBlockHeaders && EnvStr)
{
int Mod = atoi(EnvStr);
NeedsBlockHeaders = (Mod > 0);
}
vector<BlockHeader<IsBigEndian> > LocalBlockHeaders;
vector<void*> LocalData;
vector<bool> LocalHasExtraSpace;
vector<vector<unsigned char> > LocalCData;
if (NeedsBlockHeaders)
{
LocalBlockHeaders.resize(Vars.size());
LocalData.resize(Vars.size());
LocalHasExtraSpace.resize(Vars.size());
if (ShouldCompress)
LocalCData.resize(Vars.size());
for (size_t i = 0; i < Vars.size(); ++i)
{
// Filters null by default, leave null starting address (needs to be
// calculated by the header-writing rank).
memset(&LocalBlockHeaders[i], 0, sizeof(BlockHeader<IsBigEndian>));
if (ShouldCompress)
{
LocalCData[i].resize(sizeof(CompressHeader<IsBigEndian>));
CompressHeader<IsBigEndian>* CH = (CompressHeader<IsBigEndian>*)&LocalCData[i][0];
CH->OrigCRC = crc64_omp(Vars[i].Data, Vars[i].Size * NElems);
#ifndef LANL_GENERICIO_NO_COMPRESSION
#ifdef _OPENMP
#pragma omp master
{
#endif
if (!blosc_initialized)
{
blosc_init();
blosc_initialized = true;
}
#ifdef _OPENMP
blosc_set_nthreads(omp_get_max_threads());
}
#endif
LocalCData[i].resize(LocalCData[i].size() + NElems * Vars[i].Size);
if (blosc_compress(9, 1, Vars[i].Size, NElems * Vars[i].Size, Vars[i].Data,
&LocalCData[i][0] + sizeof(CompressHeader<IsBigEndian>), NElems * Vars[i].Size) <= 0)
goto nocomp;
strncpy(LocalBlockHeaders[i].Filters[0], CompressName, FilterNameSize);
size_t CNBytes, CCBytes, CBlockSize;
blosc_cbuffer_sizes(
&LocalCData[i][0] + sizeof(CompressHeader<IsBigEndian>), &CNBytes, &CCBytes, &CBlockSize);
LocalCData[i].resize(CCBytes + sizeof(CompressHeader<IsBigEndian>));
LocalBlockHeaders[i].Size = LocalCData[i].size();
LocalCData[i].resize(LocalCData[i].size() + CRCSize);
LocalData[i] = &LocalCData[i][0];
LocalHasExtraSpace[i] = true;
#endif // LANL_GENERICIO_NO_COMPRESSION
}
else
{
nocomp:
LocalBlockHeaders[i].Size = NElems * Vars[i].Size;
LocalData[i] = Vars[i].Data;
LocalHasExtraSpace[i] = Vars[i].HasExtraSpace;
}
}
}
double StartTime = MPI_Wtime();
if (SplitRank == 0)
{
uint64_t HeaderSize = sizeof(GlobalHeader<IsBigEndian>) +
Vars.size() * sizeof(VariableHeader<IsBigEndian>) +
SplitNRanks * sizeof(RankHeader<IsBigEndian>) + CRCSize;
if (NeedsBlockHeaders)
HeaderSize += SplitNRanks * Vars.size() * sizeof(BlockHeader<IsBigEndian>);
vector<char> Header(HeaderSize, 0);
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&Header[0];
std::copy(Magic, Magic + MagicSize, GH->Magic);
GH->HeaderSize = HeaderSize - CRCSize;
GH->NElems = NElems; // This will be updated later
std::copy(Dims, Dims + 3, GH->Dims);
GH->NVars = Vars.size();
GH->VarsSize = sizeof(VariableHeader<IsBigEndian>);
GH->VarsStart = sizeof(GlobalHeader<IsBigEndian>);
GH->NRanks = SplitNRanks;
GH->RanksSize = sizeof(RankHeader<IsBigEndian>);
GH->RanksStart = GH->VarsStart + Vars.size() * sizeof(VariableHeader<IsBigEndian>);
GH->GlobalHeaderSize = sizeof(GlobalHeader<IsBigEndian>);
std::copy(PhysOrigin, PhysOrigin + 3, GH->PhysOrigin);
std::copy(PhysScale, PhysScale + 3, GH->PhysScale);
if (!NeedsBlockHeaders)
{
GH->BlocksSize = GH->BlocksStart = 0;
}
else
{
GH->BlocksSize = sizeof(BlockHeader<IsBigEndian>);
GH->BlocksStart = GH->RanksStart + SplitNRanks * sizeof(RankHeader<IsBigEndian>);
}
uint64_t RecordSize = 0;
VariableHeader<IsBigEndian>* VH = (VariableHeader<IsBigEndian>*)&Header[GH->VarsStart];
for (size_t i = 0; i < Vars.size(); ++i, ++VH)
{
string VName(Vars[i].Name);
VName.resize(NameSize);
std::copy(VName.begin(), VName.end(), VH->Name);
uint64_t VFlags = 0;
if (Vars[i].IsFloat)
VFlags |= FloatValue;
if (Vars[i].IsSigned)
VFlags |= SignedValue;
if (Vars[i].IsPhysCoordX)
VFlags |= ValueIsPhysCoordX;
if (Vars[i].IsPhysCoordY)
VFlags |= ValueIsPhysCoordY;
if (Vars[i].IsPhysCoordZ)
VFlags |= ValueIsPhysCoordZ;
if (Vars[i].MaybePhysGhost)
VFlags |= ValueMaybePhysGhost;
VH->Flags = VFlags;
RecordSize += VH->Size = Vars[i].Size;
}
MPI_Gather(&RHLocal, sizeof(RHLocal), MPI_BYTE, &Header[GH->RanksStart], sizeof(RHLocal),
MPI_BYTE, 0, SplitComm);
if (NeedsBlockHeaders)
{
MPI_Gather(&LocalBlockHeaders[0], Vars.size() * sizeof(BlockHeader<IsBigEndian>), MPI_BYTE,
&Header[GH->BlocksStart], Vars.size() * sizeof(BlockHeader<IsBigEndian>), MPI_BYTE, 0,
SplitComm);
BlockHeader<IsBigEndian>* BH = (BlockHeader<IsBigEndian>*)&Header[GH->BlocksStart];
for (int i = 0; i < SplitNRanks; ++i)
for (size_t j = 0; j < Vars.size(); ++j, ++BH)
{
if (i == 0 && j == 0)
BH->Start = HeaderSize;
else
BH->Start = BH[-1].Start + BH[-1].Size + CRCSize;
}
RankHeader<IsBigEndian>* RH = (RankHeader<IsBigEndian>*)&Header[GH->RanksStart];
RH->Start = HeaderSize;
++RH;
for (int i = 1; i < SplitNRanks; ++i, ++RH)
{
RH->Start = ((BlockHeader<IsBigEndian>*)&Header[GH->BlocksStart])[i * Vars.size()].Start;
GH->NElems += RH->NElems;
}
// Compute the total file size.
uint64_t LastData = BH[-1].Size + CRCSize;
FileSize = BH[-1].Start + LastData;
}
else
{
RankHeader<IsBigEndian>* RH = (RankHeader<IsBigEndian>*)&Header[GH->RanksStart];
RH->Start = HeaderSize;
++RH;
for (int i = 1; i < SplitNRanks; ++i, ++RH)
{
uint64_t PrevNElems = RH[-1].NElems;
uint64_t PrevData = PrevNElems * RecordSize + CRCSize * Vars.size();
RH->Start = RH[-1].Start + PrevData;
GH->NElems += RH->NElems;
}
// Compute the total file size.
uint64_t LastNElems = RH[-1].NElems;
uint64_t LastData = LastNElems * RecordSize + CRCSize * Vars.size();
FileSize = RH[-1].Start + LastData;
}
// Now that the starting offset has been computed, send it back to each rank.
MPI_Scatter(&Header[GH->RanksStart], sizeof(RHLocal), MPI_BYTE, &RHLocal, sizeof(RHLocal),
MPI_BYTE, 0, SplitComm);
if (NeedsBlockHeaders)
MPI_Scatter(&Header[GH->BlocksStart], sizeof(BlockHeader<IsBigEndian>) * Vars.size(),
MPI_BYTE, &LocalBlockHeaders[0], sizeof(BlockHeader<IsBigEndian>) * Vars.size(), MPI_BYTE,
0, SplitComm);
uint64_t HeaderCRC = crc64_omp(&Header[0], HeaderSize - CRCSize);
crc64_invert(HeaderCRC, &Header[HeaderSize - CRCSize]);
if (FileIOType == FileIOMPI)
FH.get() = new GenericFileIO_MPI(MPI_COMM_SELF);
else if (FileIOType == FileIOMPICollective)
FH.get() = new GenericFileIO_MPICollective(MPI_COMM_SELF);
else
FH.get() = new GenericFileIO_POSIX();
FH.get()->open(LocalFileName);
FH.get()->setSize(FileSize);
FH.get()->write(&Header[0], HeaderSize, 0, "header");
close();
}
else
{
MPI_Gather(&RHLocal, sizeof(RHLocal), MPI_BYTE, 0, 0, MPI_BYTE, 0, SplitComm);
if (NeedsBlockHeaders)
MPI_Gather(&LocalBlockHeaders[0], Vars.size() * sizeof(BlockHeader<IsBigEndian>), MPI_BYTE, 0,
0, MPI_BYTE, 0, SplitComm);
MPI_Scatter(0, 0, MPI_BYTE, &RHLocal, sizeof(RHLocal), MPI_BYTE, 0, SplitComm);
if (NeedsBlockHeaders)
MPI_Scatter(0, 0, MPI_BYTE, &LocalBlockHeaders[0],
sizeof(BlockHeader<IsBigEndian>) * Vars.size(), MPI_BYTE, 0, SplitComm);
}
MPI_Barrier(SplitComm);
if (FileIOType == FileIOMPI)
FH.get() = new GenericFileIO_MPI(SplitComm);
else if (FileIOType == FileIOMPICollective)
FH.get() = new GenericFileIO_MPICollective(SplitComm);
else
FH.get() = new GenericFileIO_POSIX();
FH.get()->open(LocalFileName);
uint64_t Offset = RHLocal.Start;
for (size_t i = 0; i < Vars.size(); ++i)
{
uint64_t WriteSize = NeedsBlockHeaders ? LocalBlockHeaders[i].Size : NElems * Vars[i].Size;
void* Data = NeedsBlockHeaders ? LocalData[i] : Vars[i].Data;
uint64_t CRC = crc64_omp(Data, WriteSize);
bool HasExtraSpace = NeedsBlockHeaders ? LocalHasExtraSpace[i] : Vars[i].HasExtraSpace;
char* CRCLoc = HasExtraSpace ? ((char*)Data) + WriteSize : (char*)&CRC;
if (NeedsBlockHeaders)
Offset = LocalBlockHeaders[i].Start;
// When using extra space for the CRC write, preserve the original contents.
char CRCSave[CRCSize];
if (HasExtraSpace)
std::copy(CRCLoc, CRCLoc + CRCSize, CRCSave);
crc64_invert(CRC, CRCLoc);
if (HasExtraSpace)
{
FH.get()->write(Data, WriteSize + CRCSize, Offset, Vars[i].Name + " with CRC");
}
else
{
FH.get()->write(Data, WriteSize, Offset, Vars[i].Name);
FH.get()->write(CRCLoc, CRCSize, Offset + WriteSize, Vars[i].Name + " CRC");
}
if (HasExtraSpace)
std::copy(CRCSave, CRCSave + CRCSize, CRCLoc);
Offset += WriteSize + CRCSize;
}
close();
MPI_Barrier(Comm);
double EndTime = MPI_Wtime();
double TotalTime = EndTime - StartTime;
double MaxTotalTime;
MPI_Reduce(&TotalTime, &MaxTotalTime, 1, MPI_DOUBLE, MPI_MAX, 0, Comm);
if (SplitNRanks != NRanks)
{
uint64_t ContribFileSize = (SplitRank == 0) ? FileSize : 0;
MPI_Reduce(&ContribFileSize, &FileSize, 1, MPI_UINT64_T, MPI_SUM, 0, Comm);
}
if (Rank == 0)
{
double Rate = ((double)FileSize) / MaxTotalTime / (1024. * 1024.);
cout << "Wrote " << Vars.size() << " variables to " << FileName << " (" << FileSize
<< " bytes) in " << MaxTotalTime << "s: " << Rate << " MB/s" << endl;
}
MPI_Comm_free(&SplitComm);
SplitComm = MPI_COMM_NULL;
}
#endif // LANL_GENERICIO_NO_MPI
template <bool IsBigEndian>
void GenericIO::readHeaderLeader(void* GHPtr, MismatchBehavior MB, int NRanks, int Rank,
int SplitNRanks, string& LocalFileName, uint64_t& HeaderSize, vector<char>& Header)
{
// May be unused depending on preprocessor. Since it's a static var, it's
// initialized here to make sure it's in an executable block so the compiler
// will accept it.
(void)blosc_initialized;
GlobalHeader<IsBigEndian>& GH = *(GlobalHeader<IsBigEndian>*)GHPtr;
if (MB == MismatchDisallowed)
{
if (SplitNRanks != (int)GH.NRanks)
{
stringstream ss;
ss << "Won't read " << LocalFileName << ": communicator-size mismatch: "
<< "current: " << SplitNRanks << ", file: " << GH.NRanks;
throw runtime_error(ss.str());
}
#ifndef LANL_GENERICIO_NO_MPI
int TopoStatus;
MPI_Topo_test(Comm, &TopoStatus);
if (TopoStatus == MPI_CART)
{
int Dims[3], Periods[3], Coords[3];
MPI_Cart_get(Comm, 3, Dims, Periods, Coords);
bool DimsMatch = true;
for (int i = 0; i < 3; ++i)
{
if ((uint64_t)Dims[i] != GH.Dims[i])
{
DimsMatch = false;
break;
}
}
if (!DimsMatch)
{
stringstream ss;
ss << "Won't read " << LocalFileName << ": communicator-decomposition mismatch: "
<< "current: " << Dims[0] << "x" << Dims[1] << "x" << Dims[2] << ", file: " << GH.Dims[0]
<< "x" << GH.Dims[1] << "x" << GH.Dims[2];
throw runtime_error(ss.str());
}
}
#endif
}
else if (MB == MismatchRedistribute && !Redistributing)
{
Redistributing = true;
int NFileRanks = RankMap.empty() ? (int)GH.NRanks : (int)RankMap.size();
int NFileRanksPerRank = NFileRanks / NRanks;
int NRemFileRank = NFileRanks % NRanks;
if (!NFileRanksPerRank)
{
// We have only the remainder, so the last NRemFileRank ranks get one
// file rank, and the others don't.
if (NRemFileRank && NRanks - Rank <= NRemFileRank)
SourceRanks.push_back(NRanks - (Rank + 1));
}
else
{
// Since NRemFileRank < NRanks, and we don't want to put any extra memory
// load on rank 0 (because rank 0's memory load is normally higher than
// the other ranks anyway), the last NRemFileRank will each take
// (NFileRanksPerRank+1) file ranks.
int FirstFileRank = 0, LastFileRank = NFileRanksPerRank - 1;
for (int i = 1; i <= Rank; ++i)
{
FirstFileRank = LastFileRank + 1;
LastFileRank = FirstFileRank + NFileRanksPerRank - 1;
if (NRemFileRank && NRanks - i <= NRemFileRank)
++LastFileRank;
}
for (int i = FirstFileRank; i <= LastFileRank; ++i)
SourceRanks.push_back(i);
}
}
HeaderSize = GH.HeaderSize;
Header.resize(HeaderSize + CRCSize, '\xFE' /* poison */);
FH.get()->read(&Header[0], HeaderSize + CRCSize, 0, "header");
uint64_t CRC = crc64_omp(&Header[0], HeaderSize + CRCSize);
if (CRC != (uint64_t)-1)
{
throw runtime_error("Header CRC check failed: " + LocalFileName);
}
}
// Note: Errors from this function should be recoverable. This means that if
// one rank throws an exception, then all ranks should.
void GenericIO::openAndReadHeader(MismatchBehavior MB, int EffRank, bool CheckPartMap)
{
int NRanks, Rank;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &Rank);
MPI_Comm_size(Comm, &NRanks);
#else
Rank = 0;
NRanks = 1;
#endif
if (EffRank == -1)
EffRank = MB == MismatchRedistribute ? 0 : Rank;
if (RankMap.empty() && CheckPartMap)
{
// First, check to see if the file is a rank map.
unsigned long RanksInMap = 0;
if (Rank == 0)
{
try
{
#ifndef LANL_GENERICIO_NO_MPI
GenericIO GIO(MPI_COMM_SELF, FileName, FileIOType);
#else
GenericIO GIO(FileName, FileIOType);
#endif
GIO.openAndReadHeader(MismatchDisallowed, 0, false);
RanksInMap = static_cast<unsigned long>(GIO.readNumElems());
RankMap.resize(RanksInMap + GIO.requestedExtraSpace() / sizeof(int));
GIO.addVariable("$partition", RankMap, true);
GIO.readData(0, false);
RankMap.resize(RanksInMap);
}
catch (...)
{
RankMap.clear();
RanksInMap = 0;
}
}
#ifndef LANL_GENERICIO_NO_MPI
MPI_Bcast(&RanksInMap, 1, MPI_UNSIGNED_LONG, 0, Comm);
if (RanksInMap > 0)
{
RankMap.resize(RanksInMap);
MPI_Bcast(&RankMap[0], RanksInMap, MPI_INT, 0, Comm);
}
#endif
}
#ifndef LANL_GENERICIO_NO_MPI
if (SplitComm != MPI_COMM_NULL)
MPI_Comm_free(&SplitComm);
#endif
string LocalFileName;
if (RankMap.empty())
{
LocalFileName = FileName;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_dup(MB == MismatchRedistribute ? MPI_COMM_SELF : Comm, &SplitComm);
#endif
}
else
{
stringstream ss;
ss << FileName << "#" << RankMap[EffRank];
LocalFileName = ss.str();
#ifndef LANL_GENERICIO_NO_MPI
if (MB == MismatchRedistribute)
{
MPI_Comm_dup(MPI_COMM_SELF, &SplitComm);
}
else
{
#ifdef __bgq__
MPI_Barrier(Comm);
#endif
MPI_Comm_split(Comm, RankMap[EffRank], Rank, &SplitComm);
}
#endif
}
if (LocalFileName == OpenFileName)
return;
FH.close();
int SplitNRanks, SplitRank;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(SplitComm, &SplitRank);
MPI_Comm_size(SplitComm, &SplitNRanks);
#else
SplitRank = 0;
SplitNRanks = 1;
#endif
uint64_t HeaderSize = 0;
vector<char> Header;
if (SplitRank == 0)
{
#ifndef LANL_GENERICIO_NO_MPI
if (FileIOType == FileIOMPI)
FH.get() = new GenericFileIO_MPI(MPI_COMM_SELF);
else if (FileIOType == FileIOMPICollective)
FH.get() = new GenericFileIO_MPICollective(MPI_COMM_SELF);
else
#endif
FH.get() = new GenericFileIO_POSIX();
#ifndef LANL_GENERICIO_NO_MPI
char True = 1, False = 0;
#endif
try
{
FH.get()->open(LocalFileName, true);
GlobalHeader<false> GH; // endianness does not matter yet...
FH.get()->read(&GH, sizeof(GlobalHeader<false>), 0, "global header");
if (string(GH.Magic, GH.Magic + MagicSize - 1) == MagicLE)
{
readHeaderLeader<false>(
&GH, MB, NRanks, Rank, SplitNRanks, LocalFileName, HeaderSize, Header);
}
else if (string(GH.Magic, GH.Magic + MagicSize - 1) == MagicBE)
{
readHeaderLeader<true>(
&GH, MB, NRanks, Rank, SplitNRanks, LocalFileName, HeaderSize, Header);
}
else
{
string Error = "invalid file-type identifier";
throw runtime_error("Won't read " + LocalFileName + ": " + Error);
}
#ifndef LANL_GENERICIO_NO_MPI
close();
MPI_Bcast(&True, 1, MPI_BYTE, 0, SplitComm);
#endif
}
catch (...)
{
#ifndef LANL_GENERICIO_NO_MPI
MPI_Bcast(&False, 1, MPI_BYTE, 0, SplitComm);
#endif
close();
throw;
}
}
else
{
#ifndef LANL_GENERICIO_NO_MPI
char Okay;
MPI_Bcast(&Okay, 1, MPI_BYTE, 0, SplitComm);
if (!Okay)
throw runtime_error("Failure broadcast from rank 0");
#endif
}
#ifndef LANL_GENERICIO_NO_MPI
MPI_Bcast(&HeaderSize, 1, MPI_UINT64_T, 0, SplitComm);
#endif
Header.resize(HeaderSize, '\xFD' /* poison */);
#ifndef LANL_GENERICIO_NO_MPI
MPI_Bcast(&Header[0], HeaderSize, MPI_BYTE, 0, SplitComm);
#endif
FH.getHeaderCache().clear();
GlobalHeader<false>* GH = (GlobalHeader<false>*)&Header[0];
FH.setIsBigEndian(string(GH->Magic, GH->Magic + MagicSize - 1) == MagicBE);
FH.getHeaderCache().swap(Header);
OpenFileName = LocalFileName;
#ifndef LANL_GENERICIO_NO_MPI
if (!DisableCollErrChecking)
MPI_Barrier(Comm);
if (FileIOType == FileIOMPI)
FH.get() = new GenericFileIO_MPI(SplitComm);
else if (FileIOType == FileIOMPICollective)
FH.get() = new GenericFileIO_MPICollective(SplitComm);
else
FH.get() = new GenericFileIO_POSIX();
int OpenErr = 0, TotOpenErr;
try
{
FH.get()->open(LocalFileName, true);
MPI_Allreduce(
&OpenErr, &TotOpenErr, 1, MPI_INT, MPI_SUM, DisableCollErrChecking ? MPI_COMM_SELF : Comm);
}
catch (...)
{
OpenErr = 1;
MPI_Allreduce(
&OpenErr, &TotOpenErr, 1, MPI_INT, MPI_SUM, DisableCollErrChecking ? MPI_COMM_SELF : Comm);
throw;
}
if (TotOpenErr > 0)
{
stringstream ss;
ss << TotOpenErr << " ranks failed to open file: " << LocalFileName;
throw runtime_error(ss.str());
}
#endif
}
int GenericIO::readNRanks()
{
if (FH.isBigEndian())
return readNRanks<true>();
return readNRanks<false>();
}
template <bool IsBigEndian>
int GenericIO::readNRanks()
{
if (RankMap.size())
return static_cast<int>(RankMap.size());
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
return (int)GH->NRanks;
}
void GenericIO::readDims(int Dims[3])
{
if (FH.isBigEndian())
readDims<true>(Dims);
else
readDims<false>(Dims);
}
template <bool IsBigEndian>
void GenericIO::readDims(int Dims[3])
{
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
Dims[0] = static_cast<int>(GH->Dims[0]);
Dims[1] = static_cast<int>(GH->Dims[1]);
Dims[2] = static_cast<int>(GH->Dims[2]);
}
uint64_t GenericIO::readTotalNumElems()
{
if (FH.isBigEndian())
return readTotalNumElems<true>();
return readTotalNumElems<false>();
}
template <bool IsBigEndian>
uint64_t GenericIO::readTotalNumElems()
{
if (RankMap.size())
return (uint64_t)-1;
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
return GH->NElems;
}
void GenericIO::readPhysOrigin(double Origin[3])
{
if (FH.isBigEndian())
readPhysOrigin<true>(Origin);
else
readPhysOrigin<false>(Origin);
}
// Define a "safe" version of offsetof (offsetof itself might not work for
// non-POD types, and at least xlC v12.1 will complain about this if you try).
#define offsetof_safe(S, F) (size_t(&(S)->F) - size_t(S))
template <bool IsBigEndian>
void GenericIO::readPhysOrigin(double Origin[3])
{
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
if (offsetof_safe(GH, PhysOrigin) >= GH->GlobalHeaderSize)
{
std::fill(Origin, Origin + 3, 0.0);
return;
}
std::copy(GH->PhysOrigin, GH->PhysOrigin + 3, Origin);
}
void GenericIO::readPhysScale(double Scale[3])
{
if (FH.isBigEndian())
readPhysScale<true>(Scale);
else
readPhysScale<false>(Scale);
}
template <bool IsBigEndian>
void GenericIO::readPhysScale(double Scale[3])
{
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
if (offsetof_safe(GH, PhysScale) >= GH->GlobalHeaderSize)
{
std::fill(Scale, Scale + 3, 0.0);
return;
}
std::copy(GH->PhysScale, GH->PhysScale + 3, Scale);
}
template <bool IsBigEndian>
static size_t getRankIndex(
int EffRank, GlobalHeader<IsBigEndian>* GH, vector<int>& RankMap, vector<char>& HeaderCache)
{
if (RankMap.empty())
return EffRank;
for (size_t i = 0; i < GH->NRanks; ++i)
{
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&HeaderCache[GH->RanksStart + i * GH->RanksSize];
if (offsetof_safe(RH, GlobalRank) >= GH->RanksSize)
return EffRank;
if ((int)RH->GlobalRank == EffRank)
return i;
}
assert(false && "Index requested of an invalid rank");
return (size_t)-1;
}
int GenericIO::readGlobalRankNumber(int EffRank)
{
if (FH.isBigEndian())
return readGlobalRankNumber<true>(EffRank);
return readGlobalRankNumber<false>(EffRank);
}
template <bool IsBigEndian>
int GenericIO::readGlobalRankNumber(int EffRank)
{
if (EffRank == -1)
{
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &EffRank);
#else
EffRank = 0;
#endif
}
openAndReadHeader(MismatchAllowed, EffRank, false);
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
size_t RankIndex = getRankIndex<IsBigEndian>(EffRank, GH, RankMap, FH.getHeaderCache());
assert(RankIndex < GH->NRanks && "Invalid rank specified");
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->RanksStart + RankIndex * GH->RanksSize];
if (offsetof_safe(RH, GlobalRank) >= GH->RanksSize)
return EffRank;
return (int)RH->GlobalRank;
}
void GenericIO::getSourceRanks(vector<int>& SR)
{
SR.clear();
if (Redistributing)
{
std::copy(SourceRanks.begin(), SourceRanks.end(), std::back_inserter(SR));
return;
}
int Rank;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &Rank);
#else
Rank = 0;
#endif
SR.push_back(Rank);
}
size_t GenericIO::readNumElems(int EffRank)
{
if (EffRank == -1 && Redistributing)
{
DisableCollErrChecking = true;
size_t TotalSize = 0;
for (size_t i = 0, ie = SourceRanks.size(); i != ie; ++i)
TotalSize += readNumElems(SourceRanks[i]);
DisableCollErrChecking = false;
return TotalSize;
}
if (FH.isBigEndian())
return readNumElems<true>(EffRank);
return readNumElems<false>(EffRank);
}
template <bool IsBigEndian>
size_t GenericIO::readNumElems(int EffRank)
{
if (EffRank == -1)
{
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &EffRank);
#else
EffRank = 0;
#endif
}
openAndReadHeader(Redistributing ? MismatchRedistribute : MismatchAllowed, EffRank, false);
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
size_t RankIndex = getRankIndex<IsBigEndian>(EffRank, GH, RankMap, FH.getHeaderCache());
assert(RankIndex < GH->NRanks && "Invalid rank specified");
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->RanksStart + RankIndex * GH->RanksSize];
return (size_t)RH->NElems;
}
void GenericIO::readDataSection(size_t readOffset, size_t readNumRows, int EffRank,
size_t RowOffset, int Rank, uint64_t& TotalReadSize, int NErrs[3])
{
if (FH.isBigEndian())
readDataSection<true>(readOffset, readNumRows, EffRank, RowOffset, Rank, TotalReadSize, NErrs);
else
readDataSection<false>(readOffset, readNumRows, EffRank, RowOffset, Rank, TotalReadSize, NErrs);
}
void GenericIO::readDataSection(
size_t readOffset, size_t readNumRows, int EffRank, bool PrintStats, bool CollStats)
{
(void)CollStats; // may be unused depending on preprocessor config.
int Rank;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &Rank);
#else
Rank = 0;
#endif
uint64_t TotalReadSize = 0;
#ifndef LANL_GENERICIO_NO_MPI
double StartTime = MPI_Wtime();
#else
double StartTime = double(clock()) / CLOCKS_PER_SEC;
#endif
int NErrs[3] = { 0, 0, 0 };
if (EffRank == -1 && Redistributing)
{
DisableCollErrChecking = true;
size_t RowOffset = 0;
for (size_t i = 0, ie = SourceRanks.size(); i != ie; ++i)
{
readDataSection(
readOffset, readNumRows, SourceRanks[i], RowOffset, Rank, TotalReadSize, NErrs);
RowOffset += readNumElems(SourceRanks[i]);
}
DisableCollErrChecking = false;
}
else
{
readDataSection(readOffset, readNumRows, EffRank, 0, Rank, TotalReadSize, NErrs);
}
int AllNErrs[3];
#ifndef LANL_GENERICIO_NO_MPI
MPI_Allreduce(NErrs, AllNErrs, 3, MPI_INT, MPI_SUM, Comm);
#else
AllNErrs[0] = NErrs[0];
AllNErrs[1] = NErrs[1];
AllNErrs[2] = NErrs[2];
#endif
if (AllNErrs[0] > 0 || AllNErrs[1] > 0 || AllNErrs[2] > 0)
{
stringstream ss;
ss << "Experienced " << AllNErrs[0] << " I/O error(s), " << AllNErrs[1] << " CRC error(s) and "
<< AllNErrs[2] << " decompression CRC error(s) reading: " << OpenFileName;
throw runtime_error(ss.str());
}
#ifndef LANL_GENERICIO_NO_MPI
MPI_Barrier(Comm);
#endif
#ifndef LANL_GENERICIO_NO_MPI
double EndTime = MPI_Wtime();
#else
double EndTime = double(clock()) / CLOCKS_PER_SEC;
#endif
double TotalTime = EndTime - StartTime;
double MaxTotalTime;
#ifndef LANL_GENERICIO_NO_MPI
if (CollStats)
MPI_Reduce(&TotalTime, &MaxTotalTime, 1, MPI_DOUBLE, MPI_MAX, 0, Comm);
else
#endif
MaxTotalTime = TotalTime;
uint64_t AllTotalReadSize;
#ifndef LANL_GENERICIO_NO_MPI
if (CollStats)
MPI_Reduce(&TotalReadSize, &AllTotalReadSize, 1, MPI_UINT64_T, MPI_SUM, 0, Comm);
else
#endif
AllTotalReadSize = TotalReadSize;
if (Rank == 0 && PrintStats)
{
double Rate = ((double)AllTotalReadSize) / MaxTotalTime / (1024. * 1024.);
cout << "Read " << Vars.size() << " variables from " << FileName << " (" << AllTotalReadSize
<< " bytes) in " << MaxTotalTime << "s: " << Rate << " MB/s [excluding header read]"
<< endl;
}
}
// Note: Errors from this function should be recoverable. This means that if
// one rank throws an exception, then all ranks should.
template <bool IsBigEndian>
void GenericIO::readDataSection(size_t readOffset, size_t readNumRows, int EffRank,
size_t RowOffset, int Rank, uint64_t& TotalReadSize, int NErrs[3])
{
openAndReadHeader(Redistributing ? MismatchRedistribute : MismatchAllowed, EffRank, false);
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
if (EffRank == -1)
EffRank = Rank;
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
size_t RankIndex = getRankIndex<IsBigEndian>(EffRank, GH, RankMap, FH.getHeaderCache());
assert(RankIndex < GH->NRanks && "Invalid rank specified");
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->RanksStart + RankIndex * GH->RanksSize];
for (size_t i = 0; i < Vars.size(); ++i)
{
uint64_t Offset = RH->Start;
bool VarFound = false;
for (uint64_t j = 0; j < GH->NVars; ++j)
{
VariableHeader<IsBigEndian>* VH =
(VariableHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->VarsStart + j * GH->VarsSize];
string VName(VH->Name, VH->Name + NameSize);
size_t VNameNull = VName.find('\0');
if (VNameNull < NameSize)
VName.resize(VNameNull);
uint64_t ReadSize = RH->NElems * VH->Size + CRCSize;
if (VName != Vars[i].Name)
{
Offset += ReadSize;
continue;
}
VarFound = true;
bool IsFloat = (VH->Flags & FloatValue) != 0, IsSigned = (VH->Flags & SignedValue) != 0;
if (VH->Size != Vars[i].Size)
{
stringstream ss;
ss << "Size mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << Vars[i].Size << ", file: " << VH->Size;
throw runtime_error(ss.str());
}
else if (IsFloat != Vars[i].IsFloat)
{
string Float("float"), Int("integer");
stringstream ss;
ss << "Type mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << (Vars[i].IsFloat ? Float : Int)
<< ", file: " << (IsFloat ? Float : Int);
throw runtime_error(ss.str());
}
else if (IsSigned != Vars[i].IsSigned)
{
string Signed("signed"), Uns("unsigned");
stringstream ss;
ss << "Type mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << (Vars[i].IsSigned ? Signed : Uns)
<< ", file: " << (IsSigned ? Signed : Uns);
throw runtime_error(ss.str());
}
size_t VarOffset = RowOffset * Vars[i].Size;
void* VarData = ((char*)Vars[i].Data) + VarOffset;
vector<unsigned char> LData;
void* Data = VarData;
bool HasExtraSpace = Vars[i].HasExtraSpace;
(void)HasExtraSpace; // Only used in assert, unused in release builds.
if (offsetof_safe(GH, BlocksStart) < GH->GlobalHeaderSize && GH->BlocksSize > 0)
{
BlockHeader<IsBigEndian>* BH =
(BlockHeader<IsBigEndian>*)&FH
.getHeaderCache()[GH->BlocksStart + (RankIndex * GH->NVars + j) * GH->BlocksSize];
ReadSize = BH->Size + CRCSize;
Offset = BH->Start;
if (strncmp(BH->Filters[0], CompressName, FilterNameSize) == 0)
{
LData.resize(ReadSize);
Data = &LData[0];
HasExtraSpace = true;
}
else if (BH->Filters[0][0] != '\0')
{
stringstream ss;
ss << "Unknown filter \"" << BH->Filters[0] << "\" on variable " << Vars[i].Name;
throw runtime_error(ss.str());
}
}
assert(HasExtraSpace && "Extra space required for reading");
int Retry = 0;
{
int RetryCount = 300;
const char* EnvStr = getenv("GENERICIO_RETRY_COUNT");
if (EnvStr)
RetryCount = atoi(EnvStr);
int RetrySleep = 100; // ms
EnvStr = getenv("GENERICIO_RETRY_SLEEP");
if (EnvStr)
RetrySleep = atoi(EnvStr);
for (; Retry < RetryCount; ++Retry)
{
try
{
//
// Read section
ReadSize = readNumRows * VH->Size;
Offset = Offset + readOffset * VH->Size;
FH.get()->read(Data, ReadSize, static_cast<off_t>(Offset), Vars[i].Name);
break;
}
catch (...)
{
}
usleep(1000 * RetrySleep);
}
if (Retry == RetryCount)
{
++NErrs[0];
break;
}
else if (Retry > 0)
{
EnvStr = getenv("GENERICIO_VERBOSE");
if (EnvStr)
{
int Mod = atoi(EnvStr);
if (Mod > 0)
{
int RankTmp;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(MPI_COMM_WORLD, &RankTmp);
#else
RankTmp = 0;
#endif
std::cerr << "Rank " << RankTmp << ": " << Retry
<< " I/O retries were necessary for reading " << Vars[i].Name
<< " from: " << OpenFileName << "\n";
std::cerr.flush();
}
}
}
}
TotalReadSize += ReadSize;
// Byte swap the data if necessary.
if (IsBigEndian != isBigEndian())
for (size_t k = 0; k < RH->NElems; ++k)
{
char* OffsetTmp = ((char*)VarData) + k * Vars[i].Size;
bswap(OffsetTmp, Vars[i].Size);
}
break;
}
if (!VarFound)
throw runtime_error("Variable " + Vars[i].Name + " not found in: " + OpenFileName);
}
}
void GenericIO::readCoords(int Coords[3], int EffRank)
{
if (EffRank == -1 && Redistributing)
{
std::fill(Coords, Coords + 3, 0);
return;
}
if (FH.isBigEndian())
readCoords<true>(Coords, EffRank);
else
readCoords<false>(Coords, EffRank);
}
template <bool IsBigEndian>
void GenericIO::readCoords(int Coords[3], int EffRank)
{
if (EffRank == -1)
{
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &EffRank);
#else
EffRank = 0;
#endif
}
openAndReadHeader(MismatchAllowed, EffRank, false);
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
size_t RankIndex = getRankIndex<IsBigEndian>(EffRank, GH, RankMap, FH.getHeaderCache());
assert(RankIndex < GH->NRanks && "Invalid rank specified");
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->RanksStart + RankIndex * GH->RanksSize];
Coords[0] = static_cast<int>(RH->Coords[0]);
Coords[1] = static_cast<int>(RH->Coords[1]);
Coords[2] = static_cast<int>(RH->Coords[2]);
}
void GenericIO::readData(int EffRank, bool PrintStats, bool CollStats)
{
(void)CollStats; // may be unused depending on preprocessor config.
int Rank;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(Comm, &Rank);
#else
Rank = 0;
#endif
uint64_t TotalReadSize = 0;
#ifndef LANL_GENERICIO_NO_MPI
double StartTime = MPI_Wtime();
#else
double StartTime = double(clock()) / CLOCKS_PER_SEC;
#endif
int NErrs[3] = { 0, 0, 0 };
if (EffRank == -1 && Redistributing)
{
DisableCollErrChecking = true;
size_t RowOffset = 0;
for (size_t i = 0, ie = SourceRanks.size(); i != ie; ++i)
{
readData(SourceRanks[i], RowOffset, Rank, TotalReadSize, NErrs);
RowOffset += readNumElems(SourceRanks[i]);
}
DisableCollErrChecking = false;
}
else
{
readData(EffRank, 0, Rank, TotalReadSize, NErrs);
}
int AllNErrs[3];
#ifndef LANL_GENERICIO_NO_MPI
MPI_Allreduce(NErrs, AllNErrs, 3, MPI_INT, MPI_SUM, Comm);
#else
AllNErrs[0] = NErrs[0];
AllNErrs[1] = NErrs[1];
AllNErrs[2] = NErrs[2];
#endif
if (AllNErrs[0] > 0 || AllNErrs[1] > 0 || AllNErrs[2] > 0)
{
stringstream ss;
ss << "Experienced " << AllNErrs[0] << " I/O error(s), " << AllNErrs[1] << " CRC error(s) and "
<< AllNErrs[2] << " decompression CRC error(s) reading: " << OpenFileName;
throw runtime_error(ss.str());
}
#ifndef LANL_GENERICIO_NO_MPI
MPI_Barrier(Comm);
#endif
#ifndef LANL_GENERICIO_NO_MPI
double EndTime = MPI_Wtime();
#else
double EndTime = double(clock()) / CLOCKS_PER_SEC;
#endif
double TotalTime = EndTime - StartTime;
double MaxTotalTime;
#ifndef LANL_GENERICIO_NO_MPI
if (CollStats)
MPI_Reduce(&TotalTime, &MaxTotalTime, 1, MPI_DOUBLE, MPI_MAX, 0, Comm);
else
#endif
MaxTotalTime = TotalTime;
uint64_t AllTotalReadSize;
#ifndef LANL_GENERICIO_NO_MPI
if (CollStats)
MPI_Reduce(&TotalReadSize, &AllTotalReadSize, 1, MPI_UINT64_T, MPI_SUM, 0, Comm);
else
#endif
AllTotalReadSize = TotalReadSize;
if (Rank == 0 && PrintStats)
{
double Rate = ((double)AllTotalReadSize) / MaxTotalTime / (1024. * 1024.);
cout << "Read " << Vars.size() << " variables from " << FileName << " (" << AllTotalReadSize
<< " bytes) in " << MaxTotalTime << "s: " << Rate << " MB/s [excluding header read]"
<< endl;
}
}
void GenericIO::readData(
int EffRank, size_t RowOffset, int Rank, uint64_t& TotalReadSize, int NErrs[3])
{
if (FH.isBigEndian())
readData<true>(EffRank, RowOffset, Rank, TotalReadSize, NErrs);
else
readData<false>(EffRank, RowOffset, Rank, TotalReadSize, NErrs);
}
// Note: Errors from this function should be recoverable. This means that if
// one rank throws an exception, then all ranks should.
template <bool IsBigEndian>
void GenericIO::readData(
int EffRank, size_t RowOffset, int Rank, uint64_t& TotalReadSize, int NErrs[3])
{
openAndReadHeader(Redistributing ? MismatchRedistribute : MismatchAllowed, EffRank, false);
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
if (EffRank == -1)
EffRank = Rank;
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
size_t RankIndex = getRankIndex<IsBigEndian>(EffRank, GH, RankMap, FH.getHeaderCache());
assert(RankIndex < GH->NRanks && "Invalid rank specified");
RankHeader<IsBigEndian>* RH =
(RankHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->RanksStart + RankIndex * GH->RanksSize];
for (size_t i = 0; i < Vars.size(); ++i)
{
uint64_t Offset = RH->Start;
bool VarFound = false;
for (uint64_t j = 0; j < GH->NVars; ++j)
{
VariableHeader<IsBigEndian>* VH =
(VariableHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->VarsStart + j * GH->VarsSize];
string VName(VH->Name, VH->Name + NameSize);
size_t VNameNull = VName.find('\0');
if (VNameNull < NameSize)
VName.resize(VNameNull);
uint64_t ReadSize = RH->NElems * VH->Size + CRCSize;
if (VName != Vars[i].Name)
{
Offset += ReadSize;
continue;
}
VarFound = true;
bool IsFloat = (VH->Flags & FloatValue) != 0, IsSigned = (VH->Flags & SignedValue) != 0;
if (VH->Size != Vars[i].Size)
{
stringstream ss;
ss << "Size mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << Vars[i].Size << ", file: " << VH->Size;
throw runtime_error(ss.str());
}
else if (IsFloat != Vars[i].IsFloat)
{
string Float("float"), Int("integer");
stringstream ss;
ss << "Type mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << (Vars[i].IsFloat ? Float : Int)
<< ", file: " << (IsFloat ? Float : Int);
throw runtime_error(ss.str());
}
else if (IsSigned != Vars[i].IsSigned)
{
string Signed("signed"), Uns("unsigned");
stringstream ss;
ss << "Type mismatch for variable " << Vars[i].Name << " in: " << OpenFileName
<< ": current: " << (Vars[i].IsSigned ? Signed : Uns)
<< ", file: " << (IsSigned ? Signed : Uns);
throw runtime_error(ss.str());
}
size_t VarOffset = RowOffset * Vars[i].Size;
void* VarData = ((char*)Vars[i].Data) + VarOffset;
vector<unsigned char> LData;
void* Data = VarData;
bool HasExtraSpace = Vars[i].HasExtraSpace;
if (offsetof_safe(GH, BlocksStart) < GH->GlobalHeaderSize && GH->BlocksSize > 0)
{
BlockHeader<IsBigEndian>* BH =
(BlockHeader<IsBigEndian>*)&FH
.getHeaderCache()[GH->BlocksStart + (RankIndex * GH->NVars + j) * GH->BlocksSize];
ReadSize = BH->Size + CRCSize;
Offset = BH->Start;
if (strncmp(BH->Filters[0], CompressName, FilterNameSize) == 0)
{
LData.resize(ReadSize);
Data = &LData[0];
HasExtraSpace = true;
}
else if (BH->Filters[0][0] != '\0')
{
stringstream ss;
ss << "Unknown filter \"" << BH->Filters[0] << "\" on variable " << Vars[i].Name;
throw runtime_error(ss.str());
}
}
assert(HasExtraSpace && "Extra space required for reading");
char CRCSave[CRCSize];
char* CRCLoc = ((char*)Data) + ReadSize - CRCSize;
if (HasExtraSpace)
std::copy(CRCLoc, CRCLoc + CRCSize, CRCSave);
int Retry = 0;
{
int RetryCount = 300;
const char* EnvStr = getenv("GENERICIO_RETRY_COUNT");
if (EnvStr)
RetryCount = atoi(EnvStr);
int RetrySleep = 100; // ms
EnvStr = getenv("GENERICIO_RETRY_SLEEP");
if (EnvStr)
RetrySleep = atoi(EnvStr);
for (; Retry < RetryCount; ++Retry)
{
try
{
FH.get()->read(Data, ReadSize, static_cast<off_t>(Offset), Vars[i].Name);
break;
}
catch (...)
{
}
usleep(1000 * RetrySleep);
}
if (Retry == RetryCount)
{
++NErrs[0];
break;
}
else if (Retry > 0)
{
EnvStr = getenv("GENERICIO_VERBOSE");
if (EnvStr)
{
int Mod = atoi(EnvStr);
if (Mod > 0)
{
int RankTmp;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(MPI_COMM_WORLD, &RankTmp);
#else
RankTmp = 0;
#endif
std::cerr << "Rank " << RankTmp << ": " << Retry
<< " I/O retries were necessary for reading " << Vars[i].Name
<< " from: " << OpenFileName << "\n";
std::cerr.flush();
}
}
}
}
TotalReadSize += ReadSize;
uint64_t CRC = crc64_omp(Data, ReadSize);
if (CRC != (uint64_t)-1)
{
++NErrs[1];
int RankTmp;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(MPI_COMM_WORLD, &RankTmp);
#else
RankTmp = 0;
#endif
// All ranks will do this and have a good time!
string dn = "gio_crc_errors";
mkdir(dn.c_str(), 0777);
srand(static_cast<unsigned int>(time(0)));
int DumpNum = rand();
stringstream ssd;
ssd << dn << "/gio_crc_error_dump." << RankTmp << "." << DumpNum << ".bin";
stringstream ss;
ss << dn << "/gio_crc_error_log." << RankTmp << ".txt";
ofstream ofs(ss.str().c_str(), ofstream::out | ofstream::app);
ofs << "On-Disk CRC Error Report:\n";
ofs << "Variable: " << Vars[i].Name << "\n";
ofs << "File: " << OpenFileName << "\n";
ofs << "I/O Retries: " << Retry << "\n";
ofs << "Size: " << ReadSize << " bytes\n";
ofs << "Offset: " << Offset << " bytes\n";
ofs << "CRC: " << CRC << " (expected is -1)\n";
ofs << "Dump file: " << ssd.str() << "\n";
ofs << "\n";
ofs.close();
ofstream dofs(ssd.str().c_str(), ofstream::out);
dofs.write((const char*)Data, ReadSize);
dofs.close();
uint64_t RawCRC = crc64_omp(Data, ReadSize - CRCSize);
unsigned char* UData = (unsigned char*)Data;
crc64_invert(RawCRC, &UData[ReadSize - CRCSize]);
#if 1
crc64_omp(Data, ReadSize);
#else // Commenting because NewCRC cannot == -1 (uint64) and this is debugging code.
// uint64_t NewCRC = crc64_omp(Data, ReadSize);
// std::cerr << "Recalculated CRC: " << NewCRC << ((NewCRC == -1) ? "ok" : "bad") << "\n";
#endif
break;
}
if (HasExtraSpace)
std::copy(CRCSave, CRCSave + CRCSize, CRCLoc);
if (LData.size())
{
CompressHeader<IsBigEndian>* CH = (CompressHeader<IsBigEndian>*)&LData[0];
#ifndef LANL_GENERICIO_NO_COMPRESSION
#ifdef _OPENMP
#pragma omp master
{
#endif
if (!blosc_initialized)
{
blosc_init();
blosc_initialized = true;
}
#ifdef _OPENMP
blosc_set_nthreads(omp_get_max_threads());
}
#endif
blosc_decompress(
&LData[0] + sizeof(CompressHeader<IsBigEndian>), VarData, Vars[i].Size * RH->NElems);
#endif // LANL_GENERICIO_NO_COMPRESSION
if (CH->OrigCRC != crc64_omp(VarData, Vars[i].Size * RH->NElems))
{
++NErrs[2];
break;
}
}
// Byte swap the data if necessary.
if (IsBigEndian != isBigEndian())
for (size_t k = 0; k < RH->NElems; ++k)
{
char* OffsetTmp = ((char*)VarData) + k * Vars[i].Size;
bswap(OffsetTmp, Vars[i].Size);
}
break;
}
if (!VarFound)
throw runtime_error("Variable " + Vars[i].Name + " not found in: " + OpenFileName);
// This is for debugging.
if (NErrs[0] || NErrs[1] || NErrs[2])
{
const char* EnvStr = getenv("GENERICIO_VERBOSE");
if (EnvStr)
{
int Mod = atoi(EnvStr);
if (Mod > 0)
{
int RankTmp;
#ifndef LANL_GENERICIO_NO_MPI
MPI_Comm_rank(MPI_COMM_WORLD, &RankTmp);
#else
RankTmp = 0;
#endif
std::cerr << "Rank " << RankTmp << ": " << NErrs[0] << " I/O error(s), " << NErrs[1]
<< " CRC error(s) and " << NErrs[2]
<< " decompression CRC error(s) reading: " << Vars[i].Name
<< " from: " << OpenFileName << "\n";
std::cerr.flush();
}
}
}
if (NErrs[0] || NErrs[1] || NErrs[2])
break;
}
}
void GenericIO::getVariableInfo(vector<VariableInfo>& VI)
{
if (FH.isBigEndian())
getVariableInfo<true>(VI);
else
getVariableInfo<false>(VI);
}
template <bool IsBigEndian>
void GenericIO::getVariableInfo(vector<VariableInfo>& VI)
{
assert(FH.getHeaderCache().size() && "HeaderCache must not be empty");
GlobalHeader<IsBigEndian>* GH = (GlobalHeader<IsBigEndian>*)&FH.getHeaderCache()[0];
for (uint64_t j = 0; j < GH->NVars; ++j)
{
VariableHeader<IsBigEndian>* VH =
(VariableHeader<IsBigEndian>*)&FH.getHeaderCache()[GH->VarsStart + j * GH->VarsSize];
string VName(VH->Name, VH->Name + NameSize);
size_t VNameNull = VName.find('\0');
if (VNameNull < NameSize)
VName.resize(VNameNull);
bool IsFloat = (VH->Flags & FloatValue) != 0, IsSigned = (VH->Flags & SignedValue) != 0,
IsPhysCoordX = ((VH->Flags & ValueIsPhysCoordX) != 0),
IsPhysCoordY = ((VH->Flags & ValueIsPhysCoordY) != 0),
IsPhysCoordZ = ((VH->Flags & ValueIsPhysCoordZ) != 0),
MaybePhysGhost = ((VH->Flags & ValueMaybePhysGhost) != 0);
VI.push_back(VariableInfo(VName, (size_t)VH->Size, IsFloat, IsSigned, IsPhysCoordX,
IsPhysCoordY, IsPhysCoordZ, MaybePhysGhost));
}
}
void GenericIO::setNaturalDefaultPartition()
{
#ifdef __bgq__
DefaultPartition = MPIX_IO_link_id();
#else
#ifndef LANL_GENERICIO_NO_MPI
bool UseName = true;
const char* EnvStr = getenv("GENERICIO_PARTITIONS_USE_NAME");
if (EnvStr)
{
int Mod = atoi(EnvStr);
UseName = (Mod != 0);
}
if (UseName)
{
// This is a heuristic to generate ~256 partitions based on the
// names of the nodes.
char Name[MPI_MAX_PROCESSOR_NAME];
int Len = 0;
MPI_Get_processor_name(Name, &Len);
unsigned char color = 0;
for (int i = 0; i < Len; ++i)
color += (unsigned char)Name[i];
DefaultPartition = color;
}
// This is for debugging.
EnvStr = getenv("GENERICIO_RANK_PARTITIONS");
if (EnvStr)
{
int Mod = atoi(EnvStr);
if (Mod > 0)
{
int Rank;
MPI_Comm_rank(MPI_COMM_WORLD, &Rank);
DefaultPartition += Rank % Mod;
}
}
#endif
#endif
}
} /* END namespace cosmotk */
} /* END namespace lanl */
|
i10416/SLInC | slinc/src/io/gitlab/mhammons/slinc/components/Linker.scala | <filename>slinc/src/io/gitlab/mhammons/slinc/components/Linker.scala
package io.gitlab.mhammons.slinc.components
import jdk.incubator.foreign.CLinker
object Linker:
val linker = CLinker.getInstance
|
ClammyMantis488/Blockus | src/main/java/com/brand/blockus/content/WaterBricks.java | <filename>src/main/java/com/brand/blockus/content/WaterBricks.java
package com.brand.blockus.content;
import com.brand.blockus.blocks.Water.WaterBlockBase;
import com.brand.blockus.blocks.Water.WaterSlabBase;
import com.brand.blockus.blocks.Water.WaterStairsBase;
import com.brand.blockus.blocks.Water.WaterWallBase;
public class WaterBricks {
public static WaterBlockBase WATER_BRICKS;
public static WaterWallBase WATER_BRICKS_WALL;
public static WaterStairsBase WATER_BRICKS_STAIRS;
public static WaterSlabBase WATER_BRICKS_SLAB;
public static WaterBlockBase CHISELED_WATER_BRICKS;
public static void init() {
WATER_BRICKS = new WaterBlockBase("water_bricks", 1.5f, 6.0f);
WATER_BRICKS_WALL = new WaterWallBase("water_bricks_wall", 1.5f, 6.0f);
WATER_BRICKS_STAIRS = new WaterStairsBase(WATER_BRICKS.getDefaultState(), "water_bricks_stairs", 1.5f, 6.0f);
WATER_BRICKS_SLAB = new WaterSlabBase("water_bricks_slab", 2.0f, 6.0f);
CHISELED_WATER_BRICKS = new WaterBlockBase("chiseled_water_bricks", 1.5f, 6.0f);
}
}
|
magagnon/clio_client | lib/clio_client/support/custom_field_date_value.rb | <gh_stars>1-10
module ClioClient
class CustomFieldDateValue < CustomFieldValue
self.attributes[:value] = {type: :date}
end
end
|
baruchsiach/musdk-marvell | apps/include/utils.h | /*******************************************************************************
* Copyright (C) Marvell International Ltd. and its affiliates
*
* This software file (the "File") is owned and distributed by Marvell
* International Ltd. and/or its affiliates ("Marvell") under the following
* alternative licensing terms. Once you have made an election to distribute the
* File under one of the following license alternatives, please (i) delete this
* introductory statement regarding license alternatives, (ii) delete the three
* license alternatives that you have not elected to use and (iii) preserve the
* Marvell copyright notice above.
*
********************************************************************************
* Marvell Commercial License Option
*
* If you received this File from Marvell and you have entered into a commercial
* license agreement (a "Commercial License") with Marvell, the File is licensed
* to you under the terms of the applicable Commercial License.
*
********************************************************************************
* Marvell GPL License Option
*
* If you received this File from Marvell, you may opt to use, redistribute and/or
* modify this File in accordance with the terms and conditions of the General
* Public License Version 2, June 1991 (the "GPL License"), a copy of which is
* available along with the File in the license.txt file or by writing to the Free
* Software Foundation, Inc., or on the worldwide web at http://www.gnu.org/licenses/gpl.txt.
*
* THE FILE IS DISTRIBUTED AS-IS, WITHOUT WARRANTY OF ANY KIND, AND THE IMPLIED
* WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE ARE EXPRESSLY
* DISCLAIMED. The GPL License provides additional details about this warranty
* disclaimer.
*
********************************************************************************
* Marvell GNU General Public License FreeRTOS Exception
*
* If you received this File from Marvell, you may opt to use, redistribute and/or
* modify this File in accordance with the terms and conditions of the Lesser
* General Public License Version 2.1 plus the following FreeRTOS exception.
* An independent module is a module which is not derived from or based on
* FreeRTOS.
* Clause 1:
* Linking FreeRTOS statically or dynamically with other modules is making a
* combined work based on FreeRTOS. Thus, the terms and conditions of the GNU
* General Public License cover the whole combination.
* As a special exception, the copyright holder of FreeRTOS gives you permission
* to link FreeRTOS with independent modules that communicate with FreeRTOS solely
* through the FreeRTOS API interface, regardless of the license terms of these
* independent modules, and to copy and distribute the resulting combined work
* under terms of your choice, provided that:
* 1. Every copy of the combined work is accompanied by a written statement that
* details to the recipient the version of FreeRTOS used and an offer by yourself
* to provide the FreeRTOS source code (including any modifications you may have
* made) should the recipient request it.
* 2. The combined work is not itself an RTOS, scheduler, kernel or related
* product.
* 3. The independent modules add significant and primary functionality to
* FreeRTOS and do not merely extend the existing functionality already present in
* FreeRTOS.
* Clause 2:
* FreeRTOS may not be used for any competitive or comparative purpose, including
* the publication of any form of run time or compile time metric, without the
* express permission of Real Time Engineers Ltd. (this is the norm within the
* industry and is intended to ensure information accuracy).
*
********************************************************************************
* Marvell BSD License Option
*
* If you received this File from Marvell, you may opt to use, redistribute and/or
* modify this File under the following licensing terms.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Marvell nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*******************************************************************************/
#ifndef __MVUTILS_H__
#define __MVUTILS_H__
#include <stdbool.h>
#include <sys/sysinfo.h>
#include "mv_std.h"
#include "mv_net.h"
/* CA-72 prefetch command */
#if __WORDSIZE == 64
static inline void prefetch(const void *ptr)
{
asm volatile("prfm pldl1keep, %a0\n" : : "p" (ptr));
}
#else
static inline void prefetch(const void *ptr)
{
__asm__ __volatile__("pld\t%a0" : : "p" (ptr));
}
#endif
#define MVAPPS_INVALID_COOKIE_HIGH_BITS (~0)
/** Get rid of path in filename - only for unix-type paths using '/' */
#define MVAPPS_NO_PATH(file_name) (strrchr((file_name), '/') ? \
strrchr((file_name), '/') + 1 : (file_name))
/* Maximum size of port name */
#define MVAPPS_PPIO_NAME_MAX 20
#define MV_MAX_BUF_STR_LEN 256
#define MVAPPS_MAX_BURST_SIZE 256
/* Maximum number of CPU cores used by application */
#define MVAPPS_NUM_CORES_PER_AP 8
#define MVAPPS_MAX_NUM_AP 2
#define MVAPPS_MAX_NUM_CORES (MVAPPS_MAX_NUM_AP*MVAPPS_NUM_CORES_PER_AP)
#define MVAPPS_MAX_MEM_REGIONS 2
#define MVAPPS_DEFAULT_AFFINITY 1
#define MVAPPS_INVALID_AFFINITY -1
#define MVAPPS_DEFAULT_CORE_LOAD 100
#define MVAPPS_INVALID_MEMREGIONS -1
#define MVAPPS_PKT_SIZE_INC (-1)
#define MVAPPS_PKT_SIZE_RAND (-2)
#define MVAPPS_PKT_SIZE_IMIX (-3)
#define MVAPPS_PLD_MIN_SIZE 8
#define MVAPPS_PLD_WATERMARK 0xfefafefa
/* JSON Serializatiion definitions */
#define SER_FILE_VAR_DIR "/tmp/"
#define SER_FILE_NAME_PREFIX "musdk-serial-cfg"
#define SER_MAX_FILE_NAME 64
#define SER_MAX_FILE_SIZE (30 * 1024)
/* Default MTU */
#define DEFAULT_MTU 1500
/* VLAN header length */
#define VLAN_HLEN 4
/* Ethernet header length */
#define ETH_HLEN 14
/* Ethernet address length */
#define ETH_ALEN 6
/* Ethernet FCS length */
#define ETH_FCS_LEN 4
/* Ethernet IPG length */
#define ETH_IPG_LEN 20
/* IP version 4 */
#define IP_VERSION_4 4
/* IPv6 version */
#define IP_VERSION_6 6
/* Returns IPv4 version */
#define IPV4_HDR_VER(ver_ihl) (((ver_ihl) & 0xf0) >> 4)
/* IPv6 address length in bytes */
#define IPV6_ADDR_LEN 16
#define IPV4_HDR_LEN 20
#define UDP_HDR_LEN 8
/* Packet Header defines */
#define IP_PROTOCOL_TCP 0x06
#define IP_PROTOCOL_UDP 0x11
/* Macro to convert MTU to MRU */
#define MVAPPS_MTU_TO_MRU(mtu) \
((mtu) + MV_MH_SIZE + VLAN_HLEN + \
ETH_HLEN + ETH_FCS_LEN)
/* Macro to convert MRU to MTU */
#define MVAPPS_MRU_TO_MTU(mru) \
((mru) - MV_MH_SIZE - VLAN_HLEN - \
ETH_HLEN - ETH_FCS_LEN)
/* GNU flavor of num_cpus */
#define system_ncpus() get_nprocs()
extern uintptr_t cookie_high_bits;
enum pp2_op_mode_type {
PP2_OP_MODE_SINGLE_PROCESS = 0, /* App operational mode is single process, only one process allowed*/
PP2_OP_MODE_MASTER, /* App operational mode is master */
PP2_OP_MODE_GUEST, /* App operational mode is guest */
PP2_OP_MODE_NMP_MASTER, /* App operational mode is master and NMP is started*/
PP2_OP_MODE_NMP_GUEST /* App operational mode is NMP guest */
};
struct glb_common_args {
u64 cores_mask;
u64 qs_map;
int prefetch_shift;
int num_ports;
int echo;
u16 burst;
int cpus;
int affinity;
int num_cpu_hash_qs;
int verbose;
int cli;
int (*cli_unregister_cb)(void *);
int qs_map_shift;
u16 pkt_offset; /* Zero maintains default pkt_offset */
u16 mtu;
int ctrl_thresh;
struct timeval ctrl_trd_last_time;
u64 last_rx_cnt;
u64 last_tx_cnt;
u64 last_enc_cnt;
u64 last_dec_cnt;
u32 busy_wait;
pthread_mutex_t thread_lock; /* General Purpose Lock, not intended for data-path */
bool shared_hifs; /* Indicates system has shared hifs. */
struct local_arg *largs[MVAPPS_MAX_NUM_CORES];
void *plat;
u32 op_mode;
u32 guest_id;
char nmp_cfg_location[SER_MAX_FILE_NAME];
struct mv_sys_dma_mem_region *mem_region[MVAPPS_MAX_NUM_AP];
int num_mem_regions;
int num_clusters;
int port_forwarding;
int min_sg_frag;
int max_sg_frag;
int core_load;
};
struct buffer_desc {
void *virt_addr;
dma_addr_t phy_addr;
u16 size;
u16 res;
};
struct ip_range {
u32 start, end, curr; /* same as struct in_addr */
u16 port0, port1, port_curr;
};
struct mac_range {
eth_addr_t start;
eth_addr_t end;
};
struct perf_cmn_cntrs {
u64 rx_cnt;
u64 tx_cnt;
u64 enc_cnt;
u64 dec_cnt;
u64 drop_cnt;
};
struct local_common_args {
u64 qs_map;
int prefetch_shift;
int num_ports;
int id;
int echo;
u16 burst;
int verbose;
u32 busy_wait;
struct perf_cmn_cntrs perf_cntrs;
struct glob_arg *garg;
void *plat;
int min_sg_frag;
int max_sg_frag;
};
/*
* Swap source and destination MAC addresses
*/
static inline void swap_l2(char *buf)
{
u16 *eth_hdr;
register u16 tmp;
eth_hdr = (uint16_t *)buf;
tmp = eth_hdr[0];
eth_hdr[0] = eth_hdr[3];
eth_hdr[3] = tmp;
tmp = eth_hdr[1];
eth_hdr[1] = eth_hdr[4];
eth_hdr[4] = tmp;
tmp = eth_hdr[2];
eth_hdr[2] = eth_hdr[5];
eth_hdr[5] = tmp;
}
/*
* Swap source and destination IP addresses
*/
static inline void swap_l3(char *buf)
{
register u32 tmp32;
buf += 14 + 12;
tmp32 = ((uint32_t *)buf)[0];
((uint32_t *)buf)[0] = ((uint32_t *)buf)[1];
((uint32_t *)buf)[1] = tmp32;
}
/*
* app_get_line()
* get input from stdin into buffer and according to it
* create argc and argv, which need while calling for getopt
*/
static inline int app_get_line(char *prmpt, char *buff, size_t sz, int *argc, char *argv[])
{
int ch, extra;
char *p2;
/* because getopt starting parsing from argument = 1 we are skipping argument zero */
*argc = 1;
/* Get line with buffer overrun protection */
if (prmpt) {
printf("%s", prmpt);
fflush(stdout);
}
if (!fgets(buff, sz, stdin))
return -EINVAL;
/*
* if it was too long, there'll be no newline. In that case, we flush
* to end of line so that excess doesn't affect the next call.
*/
if (buff[strlen(buff) - 1] != '\n') {
extra = 0;
while (((ch = getchar()) != '\n') && (ch != EOF))
extra = 1;
return (extra == 1) ? -EFAULT : 0;
}
/* otherwise remove newline and give string back to caller */
buff[strlen(buff) - 1] = '\0';
pr_info("input: %s\n", buff);
sleep(1);
p2 = strtok(buff, " ");
while (p2 && *argc < sz - 1) {
argv[(*argc)++] = p2;
p2 = strtok(NULL, " ");
}
argv[*argc] = NULL;
return 0;
}
static inline uintptr_t app_get_high_addr(void)
{
return cookie_high_bits;
}
static inline void app_set_high_addr(uintptr_t high_addr)
{
cookie_high_bits = high_addr;
}
int apps_perf_dump(struct glb_common_args *cmn_args);
int app_ctrl_cb(void *arg);
int apps_prefetch_cmd_cb(void *arg, int argc, char *argv[]);
void app_print_horizontal_line(u32 char_count, const char *char_val);
int apps_cores_mask_create(int cpus, int affinity);
int apps_thread_to_cpu(struct glb_common_args *cmn_args, int thread);
int apps_cpu_to_thread(struct glb_common_args *cmn_args, int cpu);
int app_parse_mac_address(char *buf, u8 *macaddr_parts);
int app_range_validate(int value, int min, int max);
int app_build_pkt_pool(void **mem,
struct buffer_desc *buffs,
u16 num_buffs,
u16 min_pkt_size,
u16 max_pkt_size,
int pkt_size,
struct ip_range *src_ipr,
struct ip_range *dst_ipr,
eth_addr_t src_mac,
eth_addr_t dst_mac
);
#endif /*__MVUTILS_H__*/
|
dbadia/sqrl-server-base | src/test/java/com/github/sqrlserverjava/MockSqrlHttpRequestBuilder.java | <reponame>dbadia/sqrl-server-base<gh_stars>1-10
package com.github.sqrlserverjava;
import java.net.URI;
import java.net.URISyntaxException;
import org.springframework.mock.web.MockHttpServletRequest;
public class MockSqrlHttpRequestBuilder {
private final MockHttpServletRequest mockRequest = new MockHttpServletRequest();
public MockSqrlHttpRequestBuilder(URI uri) {
mockRequest.setScheme(uri.getScheme());
mockRequest.setServerName(uri.getHost());
mockRequest.setServerPort(uri.getPort());
mockRequest.setRequestURI(uri.getPath());
}
public MockSqrlHttpRequestBuilder(String sqrlRequestUrl) throws URISyntaxException {
this(new URI(sqrlRequestUrl));
}
public MockSqrlHttpRequestBuilder fromIP(String ip) {
mockRequest.setRemoteAddr(ip);
return this;
}
/**
* @param mockDataParams
* a URI string from the GRC client log such as
* "client=123&server=456&ids=789"
*/
public MockSqrlHttpRequestBuilder withQueryParams(String mockDataParams) {
for (final String nameValuePair : mockDataParams.split("&")) {
final String[] parts = nameValuePair.split("=");
mockRequest.addParameter(parts[0], parts[1]);
}
return this;
}
public MockHttpServletRequest build() {
return mockRequest;
}
}
|
DSWhite96/SI506-2021Fall | lab_exercise_08/lab_exercise_08_solution.py | import json
import requests
# LAB EXERCISE 08
# SETUP CODE
ENDPOINT = 'https://swapi.py4e.com/api'
# END SETUP
# PROBLEM 01 (5 Points)
def get_swapi_resource(url, params=None, timeout=10):
"""Returns a response object decoded into a dictionary. If query string < params > are
provided the response object body is returned in the form on an "envelope" with the data
payload of one or more SWAPI entities to be found in ['results'] list; otherwise, response
object body is returned as a single dictionary representation of the SWAPI entity.
Parameters:
url (str): a url that specifies the resource.
params (dict): optional dictionary of querystring arguments.
timeout (int): timeout value in seconds
Returns:
dict: dictionary representation of the decoded JSON.
"""
if params:
return requests.get(url, params, timeout=timeout).json()
else:
return requests.get(url, timeout=timeout).json()
# END PROBLEM 01
# PROBLEM 02 (2 Points)
lars = {
'name': None,
'hair_color': None,
'eye_color': None,
'species_name': None
}
# END PROBLEM 02
# PROBLEM 03 (5 Points)
def write_json(filepath, data, encoding='utf-8', ensure_ascii=False, indent=2):
"""Serializes object as JSON. Writes content to the provided filepath.
Parameters:
filepath (str): the path to the file
data (dict)/(list): the data to be encoded as JSON and written to the file
encoding (str): name of encoding used to encode the file
ensure_ascii (str): if False non-ASCII characters are printed as is; otherwise
non-ASCII characters are escaped.
indent (int): number of "pretty printed" indention spaces applied to encoded JSON
Returns:
None
"""
with open(filepath, 'w', encoding=encoding) as file_obj:
json.dump(data, file_obj, ensure_ascii=ensure_ascii, indent=indent)
# END PROBLEM 03
# PROBLEM 04 (8 Points)
def main():
""" This function calls <get_swapi_resource> function to retrieve SWAPI character data. Update values in < lars >.
Then it calls <write_json> function to store data to a json file.
Parameters:
None
Return:
None
"""
# Call < get_swapi_resource() > with the correct parameters and save results to < lars_data >
lars_data = get_swapi_resource(ENDPOINT + '/people', {'search': 'owen lars'})
lars_data = lars_data['results'][0]
print(f"Owen Lars data = {lars_data}")
# Retrieve Owen Lars's species and assign to < lars_species >
lars_species = get_swapi_resource(lars_data['species'][0])
lars_species = lars_species['name']
lars_data['species_name'] = lars_species
print(f"Owen Lars data w/Species = {lars_data}")
# Update values in < lars >
for key in lars_data.keys():
if key in lars.keys():
lars[key] = lars_data[key]
print(f"Owen Lars Dictionary = {lars}")
# Write out the information of the dictionary < lars >
write_json("owen_lars.json", lars)
# END PROBLEM 04
if __name__ == '__main__':
main()
|
folio-org/mod-notes | src/main/java/org/folio/rest/exceptions/NoteExceptionHandlers.java | package org.folio.rest.exceptions;
import static org.apache.http.HttpStatus.SC_BAD_REQUEST;
import static org.apache.http.HttpStatus.SC_UNPROCESSABLE_ENTITY;
import static org.folio.common.pf.PartialFunctions.pf;
import static org.folio.rest.exc.ExceptionPredicates.instanceOf;
import javax.ws.rs.core.Response;
import org.folio.common.pf.PartialFunction;
import org.folio.rest.ResponseHelper;
import org.folio.rest.jaxrs.model.Errors;
import org.folio.rest.persist.cql.CQLQueryValidationException;
import org.folio.rest.tools.utils.ValidationHelper;
public class NoteExceptionHandlers {
private NoteExceptionHandlers() {}
public static PartialFunction<Throwable, Response> entityValidationHandler() {
return pf(instanceOf(InputValidationException.class), NoteExceptionHandlers::toUnprocessableEntity);
}
private static Response toUnprocessableEntity(Throwable t) {
InputValidationException exc = (InputValidationException) t;
Errors errorMessage = ValidationHelper.createValidationErrorMessage(
exc.getField(), exc.getValue(), exc.getMessage());
return ResponseHelper.statusWithJson(SC_UNPROCESSABLE_ENTITY, errorMessage);
}
public static PartialFunction<Throwable, Response> cqlValidationHandler() {
return pf(instanceOf(CQLQueryValidationException.class), NoteExceptionHandlers::toBadRequestEntity);
}
private static Response toBadRequestEntity(Throwable t) {
return ResponseHelper.statusWithJson(SC_BAD_REQUEST, "Invalid query");
}
}
|
yizhe-ang/MMSceneGraph | mmdet/models/captioners/updown_captioner.py | <filename>mmdet/models/captioners/updown_captioner.py
# ---------------------------------------------------------------
# Updown_caption_head.py
# Set-up time: 2021/1/3 16:10
# Copyright (c) 2020 ICT
# Licensed under The MIT License [see LICENSE for details]
# Written by Kenneth-Wong (Wenbin-Wang) @ VIPL.ICT
# Contact: <EMAIL> [OR] <EMAIL>
# ---------------------------------------------------------------
from ..registry import CAPTIONERS
import torch
import torch.nn as nn
import torch.nn.functional as F
from .att_base_captioner import AttBaseCaptioner
from mmdet.models.captioners.utils import Attention
@CAPTIONERS.register_module
class UpDownCaptioner(AttBaseCaptioner):
def __init__(self, **kwargs):
super(UpDownCaptioner, self).__init__(**kwargs)
self.num_layers = 2
# First LSTM layer
rnn_input_size = self.head_config.rnn_size + self.word_embed_config.word_embed_dim + self.att_dim
self.lstm1 = nn.LSTMCell(rnn_input_size, self.head_config.rnn_size)
# Second LSTM Layer
self.lstm2 = nn.LSTMCell(self.head_config.rnn_size + self.att_dim, self.head_config.rnn_size)
self.att = Attention(self.head_config, self.attention_feat_config)
if self.head_config.dropout_first_input > 0:
self.dropout1 = nn.Dropout(self.head_config.dropout_first_input)
else:
self.dropout1 = None
if self.head_config.dropout_sec_input > 0:
self.dropout2 = nn.Dropout(self.head_config.dropout_sec_input)
else:
self.dropout2 = None
# state[0] -- h, state[1] -- c
def Forward(self, gv_feat, att_feats, att_mask, p_att_feats, state, wt):
if gv_feat.shape[-1] == 1: # empty gv_feat
gv_feat = torch.mean(att_feats, 1)
xt = self.word_embed(wt)
# lstm1
h2_tm1 = state[0][-1]
input1 = torch.cat([h2_tm1, gv_feat, xt], 1)
if self.dropout1 is not None:
input1 = self.dropout1(input1)
h1_t, c1_t = self.lstm1(input1, (state[0][0], state[1][0]))
att = self.att(h1_t, att_feats, att_mask, p_att_feats)
# lstm2
input2 = torch.cat([att, h1_t], 1)
if self.dropout2 is not None:
input2 = self.dropout2(input2)
h2_t, c2_t = self.lstm2(input2, (state[0][1], state[1][1]))
state = [torch.stack([h1_t, h2_t]), torch.stack([c1_t, c2_t])]
return h2_t, state |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.