repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
hfbin/LibraPlatform | libra-common/libra-common-mybatis/src/main/java/cn/hfbin/common/database/MybatisPlusConfig.java | /*
* Copyright [2021] [LibraPlatform of copyright huangfubin]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hfbin.common.database;
import cn.hfbin.common.core.constant.ConfigValueConstant;
import cn.hfbin.common.core.context.SpringContextUtils;
import cn.hutool.core.collection.CollectionUtil;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.core.injector.AbstractMethod;
import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
import com.baomidou.mybatisplus.core.injector.ISqlInjector;
import com.baomidou.mybatisplus.extension.injector.methods.AlwaysUpdateSomeColumnById;
import com.baomidou.mybatisplus.extension.injector.methods.InsertBatchSomeColumn;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.handler.TenantLineHandler;
import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.TenantLineInnerInterceptor;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue;
import net.sf.jsqlparser.statement.select.Select;
import net.sf.jsqlparser.statement.select.WithItem;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.List;
/**
* @Description
* @anthor hfbin
* @date 2019/9/30
*/
@Configuration
public class MybatisPlusConfig {
@Value("${" + ConfigValueConstant.MYBATIS_PLUS_IGNORE_TABLE + ":table}")
private String ignoreTable;
@Value("${" + ConfigValueConstant.MYBATIS_PLUS_OPEN_TENANT + ":true}")
private boolean openTenant;
@Bean
public MybatisPlusInterceptor mybatisPlusInterceptor() {
// 多租户处理
MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
// 租户拦截 PaginationInterceptor https://mp.baomidou.com/guide/interceptor-tenant-line.html
if(openTenant){
interceptor.addInnerInterceptor(new TenantLineInnerInterceptor(tenantLineHandler()));
}
// 分页插件
interceptor.addInnerInterceptor(new PaginationInnerInterceptor(DbType.MYSQL));
return interceptor;
}
/**
* 租户相关配置
* 忽略拼接多租户sql注解 @InterceptorIgnore(tenantLine = "1")【在mapper层上使用】
* @return SQL解析过滤
*/
private TenantLineHandler tenantLineHandler() {
return new TenantLineHandler() {
/**
* 获取租户 ID 值表达式,只支持单个 ID 值
* <p>
*
* @return 租户 ID 值表达式
*/
@Override
public Expression getTenantId(){
return new StringValue(SpringContextUtils.getTenantCode());
}
/**
* 获取租户字段名
* <p>
* 默认字段名叫: tenant_code
*
* @return 租户字段名
*/
@Override
public String getTenantIdColumn() {
return "tenant_code";
}
/**
* 根据表名判断是否忽略拼接多租户条件
* <p>
* 默认都要进行解析并拼接多租户条件
* 忽略拼接多租户sql注解 @InterceptorIgnore(tenantLine = "1")【在mapper层上使用】
* @param tableName 表名
* @return 是否忽略, true:表示忽略,false:需要解析并拼接多租户条件
*/
@Override
public boolean ignoreTable(String tableName) {
return ignoreTable.contains(tableName);
}
};
}
/**
* sql增强
* @return
*/
@Bean
public ISqlInjector sqlInjector() {
return new DefaultSqlInjector() {
@Override
public List<AbstractMethod> getMethodList(Class<?> mapperClass) {
List<AbstractMethod> methodList = super.getMethodList(mapperClass);
methodList.add(new InsertBatchSomeColumn());
methodList.add(new AlwaysUpdateSomeColumnById());
return methodList;
}
};
}
}
|
nard-tech/nard_rails | app/validators/nard/rails/text.rb | <filename>app/validators/nard/rails/text.rb
# テキストに関連するバリデーションのクラスとエラーのクラスを格納する名前空間
module Nard::Rails::Text
end
|
ragupta-git/ucscentralsdk | ucscentralsdk/mometa/trig/TrigMeta.py | <reponame>ragupta-git/ucscentralsdk<gh_stars>0
"""This module contains the general information for TrigMeta ManagedObject."""
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class TrigMetaConsts():
ADMIN_STATE_TRIGGER = "trigger"
ADMIN_STATE_TRIGGER_IMMEDIATE = "trigger-immediate"
ADMIN_STATE_TRIGGERED = "triggered"
ADMIN_STATE_UNTRIGGERED = "untriggered"
ADMIN_STATE_USER_ACK = "user-ack"
INT_ID_NONE = "none"
OPER_STATE_CAP_REACHED = "cap-reached"
OPER_STATE_FAILED = "failed"
OPER_STATE_IN_PROGRESS = "in-progress"
OPER_STATE_PENDING = "pending"
OPER_STATE_PENDING_ACK = "pending-ack"
OPER_STATE_TRIGGERED = "triggered"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
POLICY_OWNER_UNSPECIFIED = "unspecified"
class TrigMeta(ManagedObject):
"""This is TrigMeta class."""
consts = TrigMetaConsts()
naming_props = set([u'schedName'])
mo_meta = MoMeta("TrigMeta", "trigMeta", "meta-trig-[sched_name]", VersionMeta.Version101a, "InputOutput", 0xff, [], ["read-only"], [u'orgDomainGroup', u'storageCloud', u'topSystem'], [u'trigServerToken', u'trigTokenRequestor', u'trigTriggered'], ["Get"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["trigger", "trigger-immediate", "triggered", "untriggered", "user-ack"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x4, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"job_count": MoPropertyMeta("job_count", "jobCount", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["cap-reached", "failed", "in-progress", "pending", "pending-ack", "triggered"], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["local", "pending-policy", "policy", "unspecified"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"sched_name": MoPropertyMeta("sched_name", "schedName", "string", VersionMeta.Version101a, MoPropertyMeta.NAMING, 0x40, None, None, r"""[\-\.:_a-zA-Z0-9]{1,256}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"trig_time": MoPropertyMeta("trig_time", "trigTime", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"window_dn": MoPropertyMeta("window_dn", "windowDn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"intId": "int_id",
"jobCount": "job_count",
"name": "name",
"operState": "oper_state",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"rn": "rn",
"schedName": "sched_name",
"status": "status",
"trigTime": "trig_time",
"windowDn": "window_dn",
}
def __init__(self, parent_mo_or_dn, sched_name, **kwargs):
self._dirty_mask = 0
self.sched_name = sched_name
self.admin_state = None
self.child_action = None
self.descr = None
self.int_id = None
self.job_count = None
self.name = None
self.oper_state = None
self.policy_level = None
self.policy_owner = None
self.status = None
self.trig_time = None
self.window_dn = None
ManagedObject.__init__(self, "TrigMeta", parent_mo_or_dn, **kwargs)
|
kucjac/clego | xminio/xminiomock/object_gen.go | <filename>xminio/xminiomock/object_gen.go
// Code generated by MockGen. DO NOT EDIT.
// Source: github.com/kucjac/cleango/xminio (interfaces: ObjectPutterGetter,Object)
// Package xminiomock is a generated GoMock package.
package xminiomock
import (
context "context"
io "io"
reflect "reflect"
gomock "github.com/golang/mock/gomock"
xminio "github.com/kucjac/cleango/xminio"
minio "github.com/minio/minio-go/v7"
)
// MockObjectPutterGetter is a mock of ObjectPutterGetter interface.
type MockObjectPutterGetter struct {
ctrl *gomock.Controller
recorder *MockObjectPutterGetterMockRecorder
}
// MockObjectPutterGetterMockRecorder is the mock recorder for MockObjectPutterGetter.
type MockObjectPutterGetterMockRecorder struct {
mock *MockObjectPutterGetter
}
// NewMockObjectPutterGetter creates a new mock instance.
func NewMockObjectPutterGetter(ctrl *gomock.Controller) *MockObjectPutterGetter {
mock := &MockObjectPutterGetter{ctrl: ctrl}
mock.recorder = &MockObjectPutterGetterMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockObjectPutterGetter) EXPECT() *MockObjectPutterGetterMockRecorder {
return m.recorder
}
// GetObject mocks base method.
func (m *MockObjectPutterGetter) GetObject(arg0 context.Context, arg1, arg2 string, arg3 minio.GetObjectOptions) (xminio.Object, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetObject", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(xminio.Object)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetObject indicates an expected call of GetObject.
func (mr *MockObjectPutterGetterMockRecorder) GetObject(arg0, arg1, arg2, arg3 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetObject", reflect.TypeOf((*MockObjectPutterGetter)(nil).GetObject), arg0, arg1, arg2, arg3)
}
// PutObject mocks base method.
func (m *MockObjectPutterGetter) PutObject(arg0 context.Context, arg1, arg2 string, arg3 io.Reader, arg4 int64, arg5 minio.PutObjectOptions) (minio.UploadInfo, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "PutObject", arg0, arg1, arg2, arg3, arg4, arg5)
ret0, _ := ret[0].(minio.UploadInfo)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// PutObject indicates an expected call of PutObject.
func (mr *MockObjectPutterGetterMockRecorder) PutObject(arg0, arg1, arg2, arg3, arg4, arg5 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PutObject", reflect.TypeOf((*MockObjectPutterGetter)(nil).PutObject), arg0, arg1, arg2, arg3, arg4, arg5)
}
// MockObject is a mock of Object interface.
type MockObject struct {
ctrl *gomock.Controller
recorder *MockObjectMockRecorder
}
// MockObjectMockRecorder is the mock recorder for MockObject.
type MockObjectMockRecorder struct {
mock *MockObject
}
// NewMockObject creates a new mock instance.
func NewMockObject(ctrl *gomock.Controller) *MockObject {
mock := &MockObject{ctrl: ctrl}
mock.recorder = &MockObjectMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockObject) EXPECT() *MockObjectMockRecorder {
return m.recorder
}
// Close mocks base method.
func (m *MockObject) Close() error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Close")
ret0, _ := ret[0].(error)
return ret0
}
// Close indicates an expected call of Close.
func (mr *MockObjectMockRecorder) Close() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockObject)(nil).Close))
}
// Read mocks base method.
func (m *MockObject) Read(arg0 []byte) (int, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Read", arg0)
ret0, _ := ret[0].(int)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// Read indicates an expected call of Read.
func (mr *MockObjectMockRecorder) Read(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read", reflect.TypeOf((*MockObject)(nil).Read), arg0)
}
// ReadAt mocks base method.
func (m *MockObject) ReadAt(arg0 []byte, arg1 int64) (int, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ReadAt", arg0, arg1)
ret0, _ := ret[0].(int)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ReadAt indicates an expected call of ReadAt.
func (mr *MockObjectMockRecorder) ReadAt(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadAt", reflect.TypeOf((*MockObject)(nil).ReadAt), arg0, arg1)
}
// Seek mocks base method.
func (m *MockObject) Seek(arg0 int64, arg1 int) (int64, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Seek", arg0, arg1)
ret0, _ := ret[0].(int64)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// Seek indicates an expected call of Seek.
func (mr *MockObjectMockRecorder) Seek(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Seek", reflect.TypeOf((*MockObject)(nil).Seek), arg0, arg1)
}
// Stat mocks base method.
func (m *MockObject) Stat() (minio.ObjectInfo, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Stat")
ret0, _ := ret[0].(minio.ObjectInfo)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// Stat indicates an expected call of Stat.
func (mr *MockObjectMockRecorder) Stat() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stat", reflect.TypeOf((*MockObject)(nil).Stat))
}
|
dansebmey/iaf | core/src/main/java/nl/nn/adapterframework/configuration/classloaders/IConfigurationClassLoader.java | <filename>core/src/main/java/nl/nn/adapterframework/configuration/classloaders/IConfigurationClassLoader.java
/*
Copyright 2019 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.configuration.classloaders;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.configuration.IbisContext;
/**
* Interface for IBIS Configuration ClassLoaders.
*
* @author <NAME>
*
*/
public interface IConfigurationClassLoader extends ReloadAware {
public enum ReportLevel {
DEBUG, INFO, WARN, ERROR;
}
/**
* Configure the {@link IConfigurationClassLoader}'s implementation
* @throws ConfigurationException when the {@link IConfigurationClassLoader}'s implementation cannot retrieve or load the configuration files
*/
public void configure(IbisContext ibisContext, String configurationName) throws ConfigurationException;
/**
* Retrieve the IbisContext from the ClassLoader which is set when the {@link IConfigurationClassLoader#configure(IbisContext, String) configure} method is called
*/
public IbisContext getIbisContext();
/**
* Retrieve the name of the configuration that uses this {@link IConfigurationClassLoader}
*/
public String getConfigurationName();
/**
* Defines the log level for errors caused by the {@link IConfigurationClassLoader#configure(IbisContext, String) configure} method
* @param level ReportLevel in string format to be parsed by the ClassLoaderManager digester
*/
public void setReportLevel(String level);
/**
* @return the {@link ReportLevel} set for this {@link IConfigurationClassLoader}
*/
public ReportLevel getReportLevel();
}
|
cnm06/Competitive-Programming | Practice/Coding Blocks/Competitive Warriors Challenge 1.0/Simple Input.py | <filename>Practice/Coding Blocks/Competitive Warriors Challenge 1.0/Simple Input.py
sum = 0
while 1:
n = int(raw_input())
sum += n
if sum < 0:
break
print n
|
alizeait/fela | packages/fela-plugin-theme-value/src/__tests__/themeValue-test.js | <gh_stars>1000+
import themeValue from '../index'
const theme = {
colors: {
foreground: {
primary: 'red',
secondary: 'blue',
},
background: {
primary: 'black',
secondary: 'white',
},
},
fonts: {
text: 'Helvetica Neue, Arial, sans-serif',
heading: 'Impact, serif',
},
}
describe('Theme value plugin', () => {
it('should resolve theme values', () => {
const themeMapping = {
color: (t) => t.colors,
backgroundColor: (t) => t.colors,
fontFamily: (t) => t.fonts,
}
const style = {
color: 'foreground.primary',
backgroundColor: 'background.secondary',
fontFamily: 'heading',
}
expect(
themeValue(themeMapping)(style, undefined, undefined, { theme })
).toEqual({
color: 'red',
backgroundColor: 'white',
fontFamily: 'Impact, serif',
})
})
it('should fallback to strings if no value is found', () => {
const themeMapping = {
color: (t) => t.colors,
backgroundColor: (t) => t.colors,
fontFamily: (t) => t.fonts,
}
const style = {
color: 'yellow',
backgroundColor: 'background.secondary',
fontFamily: 'Arial',
}
expect(
themeValue(themeMapping)(style, undefined, undefined, { theme })
).toEqual({
color: 'yellow',
backgroundColor: 'white',
fontFamily: 'Arial',
})
})
})
|
kebernet/erigo | ios/app/Erigo/com/google/common/hash/Hashing.h | <gh_stars>1-10
//
// Generated by the J2ObjC translator. DO NOT EDIT!
// source: /Volumes/Personal/Documents/raspi-config/client-framework/build/j2oSources/com/google/common/hash/Hashing.java
//
#include "J2ObjC_header.h"
#pragma push_macro("INCLUDE_ALL_ComGoogleCommonHashHashing")
#ifdef RESTRICT_ComGoogleCommonHashHashing
#define INCLUDE_ALL_ComGoogleCommonHashHashing 0
#else
#define INCLUDE_ALL_ComGoogleCommonHashHashing 1
#endif
#undef RESTRICT_ComGoogleCommonHashHashing
#if __has_feature(nullability)
#pragma clang diagnostic push
#pragma GCC diagnostic ignored "-Wnullability-completeness"
#endif
#if !defined (ComGoogleCommonHashHashing_) && (INCLUDE_ALL_ComGoogleCommonHashHashing || defined(INCLUDE_ComGoogleCommonHashHashing))
#define ComGoogleCommonHashHashing_
@class ComGoogleCommonHashHashCode;
@class IOSByteArray;
@class IOSObjectArray;
@protocol ComGoogleCommonHashHashFunction;
@protocol JavaLangIterable;
@protocol JavaSecurityKey;
@interface ComGoogleCommonHashHashing : NSObject
#pragma mark Public
+ (id<ComGoogleCommonHashHashFunction>)adler32;
+ (ComGoogleCommonHashHashCode *)combineOrderedWithJavaLangIterable:(id<JavaLangIterable>)hashCodes;
+ (ComGoogleCommonHashHashCode *)combineUnorderedWithJavaLangIterable:(id<JavaLangIterable>)hashCodes;
+ (id<ComGoogleCommonHashHashFunction>)concatenatingWithComGoogleCommonHashHashFunction:(id<ComGoogleCommonHashHashFunction>)first
withComGoogleCommonHashHashFunction:(id<ComGoogleCommonHashHashFunction>)second
withComGoogleCommonHashHashFunctionArray:(IOSObjectArray *)rest;
+ (id<ComGoogleCommonHashHashFunction>)concatenatingWithJavaLangIterable:(id<JavaLangIterable>)hashFunctions;
+ (jint)consistentHashWithComGoogleCommonHashHashCode:(ComGoogleCommonHashHashCode *)hashCode
withInt:(jint)buckets;
+ (jint)consistentHashWithLong:(jlong)input
withInt:(jint)buckets;
+ (id<ComGoogleCommonHashHashFunction>)crc32;
+ (id<ComGoogleCommonHashHashFunction>)crc32c;
+ (id<ComGoogleCommonHashHashFunction>)farmHashFingerprint64;
+ (id<ComGoogleCommonHashHashFunction>)goodFastHashWithInt:(jint)minimumBits;
+ (id<ComGoogleCommonHashHashFunction>)hmacMd5WithByteArray:(IOSByteArray *)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacMd5WithJavaSecurityKey:(id<JavaSecurityKey>)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha1WithByteArray:(IOSByteArray *)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha1WithJavaSecurityKey:(id<JavaSecurityKey>)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha256WithByteArray:(IOSByteArray *)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha256WithJavaSecurityKey:(id<JavaSecurityKey>)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha512WithByteArray:(IOSByteArray *)key;
+ (id<ComGoogleCommonHashHashFunction>)hmacSha512WithJavaSecurityKey:(id<JavaSecurityKey>)key;
+ (id<ComGoogleCommonHashHashFunction>)md5;
+ (id<ComGoogleCommonHashHashFunction>)murmur3_128;
+ (id<ComGoogleCommonHashHashFunction>)murmur3_128WithInt:(jint)seed;
+ (id<ComGoogleCommonHashHashFunction>)murmur3_32;
+ (id<ComGoogleCommonHashHashFunction>)murmur3_32WithInt:(jint)seed;
+ (id<ComGoogleCommonHashHashFunction>)sha1;
+ (id<ComGoogleCommonHashHashFunction>)sha256;
+ (id<ComGoogleCommonHashHashFunction>)sha384;
+ (id<ComGoogleCommonHashHashFunction>)sha512;
+ (id<ComGoogleCommonHashHashFunction>)sipHash24;
+ (id<ComGoogleCommonHashHashFunction>)sipHash24WithLong:(jlong)k0
withLong:(jlong)k1;
#pragma mark Package-Private
+ (jint)checkPositiveAndMakeMultipleOf32WithInt:(jint)bits;
@end
J2OBJC_STATIC_INIT(ComGoogleCommonHashHashing)
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_goodFastHashWithInt_(jint minimumBits);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_murmur3_32WithInt_(jint seed);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_murmur3_32();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_murmur3_128WithInt_(jint seed);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_murmur3_128();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sipHash24();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sipHash24WithLong_withLong_(jlong k0, jlong k1);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_md5();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sha1();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sha256();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sha384();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_sha512();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacMd5WithJavaSecurityKey_(id<JavaSecurityKey> key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacMd5WithByteArray_(IOSByteArray *key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha1WithJavaSecurityKey_(id<JavaSecurityKey> key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha1WithByteArray_(IOSByteArray *key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha256WithJavaSecurityKey_(id<JavaSecurityKey> key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha256WithByteArray_(IOSByteArray *key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha512WithJavaSecurityKey_(id<JavaSecurityKey> key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_hmacSha512WithByteArray_(IOSByteArray *key);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_crc32c();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_crc32();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_adler32();
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_farmHashFingerprint64();
FOUNDATION_EXPORT jint ComGoogleCommonHashHashing_consistentHashWithComGoogleCommonHashHashCode_withInt_(ComGoogleCommonHashHashCode *hashCode, jint buckets);
FOUNDATION_EXPORT jint ComGoogleCommonHashHashing_consistentHashWithLong_withInt_(jlong input, jint buckets);
FOUNDATION_EXPORT ComGoogleCommonHashHashCode *ComGoogleCommonHashHashing_combineOrderedWithJavaLangIterable_(id<JavaLangIterable> hashCodes);
FOUNDATION_EXPORT ComGoogleCommonHashHashCode *ComGoogleCommonHashHashing_combineUnorderedWithJavaLangIterable_(id<JavaLangIterable> hashCodes);
FOUNDATION_EXPORT jint ComGoogleCommonHashHashing_checkPositiveAndMakeMultipleOf32WithInt_(jint bits);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_concatenatingWithComGoogleCommonHashHashFunction_withComGoogleCommonHashHashFunction_withComGoogleCommonHashHashFunctionArray_(id<ComGoogleCommonHashHashFunction> first, id<ComGoogleCommonHashHashFunction> second, IOSObjectArray *rest);
FOUNDATION_EXPORT id<ComGoogleCommonHashHashFunction> ComGoogleCommonHashHashing_concatenatingWithJavaLangIterable_(id<JavaLangIterable> hashFunctions);
J2OBJC_TYPE_LITERAL_HEADER(ComGoogleCommonHashHashing)
#endif
#if !defined (ComGoogleCommonHashHashing_ChecksumType_) && (INCLUDE_ALL_ComGoogleCommonHashHashing || defined(INCLUDE_ComGoogleCommonHashHashing_ChecksumType))
#define ComGoogleCommonHashHashing_ChecksumType_
#define RESTRICT_JavaLangEnum 1
#define INCLUDE_JavaLangEnum 1
#include "java/lang/Enum.h"
#define RESTRICT_ComGoogleCommonBaseSupplier 1
#define INCLUDE_ComGoogleCommonBaseSupplier 1
#include "com/google/common/base/Supplier.h"
@class IOSObjectArray;
@protocol JavaUtilZipChecksum;
typedef NS_ENUM(NSUInteger, ComGoogleCommonHashHashing_ChecksumType_Enum) {
ComGoogleCommonHashHashing_ChecksumType_Enum_CRC_32 = 0,
ComGoogleCommonHashHashing_ChecksumType_Enum_ADLER_32 = 1,
};
@interface ComGoogleCommonHashHashing_ChecksumType : JavaLangEnum < NSCopying, ComGoogleCommonBaseSupplier >
#pragma mark Public
- (id<JavaUtilZipChecksum>)get;
+ (ComGoogleCommonHashHashing_ChecksumType *)valueOfWithNSString:(NSString *)name;
+ (IOSObjectArray *)values;
#pragma mark Package-Private
- (id)copyWithZone:(NSZone *)zone;
@end
J2OBJC_STATIC_INIT(ComGoogleCommonHashHashing_ChecksumType)
/*! INTERNAL ONLY - Use enum accessors declared below. */
FOUNDATION_EXPORT ComGoogleCommonHashHashing_ChecksumType *ComGoogleCommonHashHashing_ChecksumType_values_[];
inline ComGoogleCommonHashHashing_ChecksumType *ComGoogleCommonHashHashing_ChecksumType_get_CRC_32();
J2OBJC_ENUM_CONSTANT(ComGoogleCommonHashHashing_ChecksumType, CRC_32)
inline ComGoogleCommonHashHashing_ChecksumType *ComGoogleCommonHashHashing_ChecksumType_get_ADLER_32();
J2OBJC_ENUM_CONSTANT(ComGoogleCommonHashHashing_ChecksumType, ADLER_32)
FOUNDATION_EXPORT IOSObjectArray *ComGoogleCommonHashHashing_ChecksumType_values();
FOUNDATION_EXPORT ComGoogleCommonHashHashing_ChecksumType *ComGoogleCommonHashHashing_ChecksumType_valueOfWithNSString_(NSString *name);
FOUNDATION_EXPORT ComGoogleCommonHashHashing_ChecksumType *ComGoogleCommonHashHashing_ChecksumType_fromOrdinal(NSUInteger ordinal);
J2OBJC_TYPE_LITERAL_HEADER(ComGoogleCommonHashHashing_ChecksumType)
#endif
#if __has_feature(nullability)
#pragma clang diagnostic pop
#endif
#pragma pop_macro("INCLUDE_ALL_ComGoogleCommonHashHashing")
|
muxanick/scala-native-java-stubs | javax/management/ObjectInstance.scala | package javax.management
import java.io.Serializable
import java.lang.{Object, String}
import scala.scalanative.annotation.stub
/** Used to represent the object name of an MBean and its class name.
* If the MBean is a Dynamic MBean the class name should be retrieved from
* the MBeanInfo it provides.
*/
class ObjectInstance extends Object with Serializable {
/** Allows an object instance to be created given an object name and
* the full class name, including the package name.
*/
@stub
def this(objectName: ObjectName, className: String) = ???
/** Allows an object instance to be created given a string representation of
* an object name and the full class name, including the package name.
*/
@stub
def this(objectName: String, className: String) = ???
/** Compares the current object instance with another object instance. */
@stub
def equals(object: Any): Boolean = ???
/** Returns the class part. */
@stub
def getClassName(): String = ???
/** Returns the object name part. */
@stub
def getObjectName(): ObjectName = ???
/** Returns a hash code value for the object. */
@stub
def hashCode(): Int = ???
/** Returns a string representing this ObjectInstance object. */
@stub
def toString(): String = ???
}
|
Tinindo/api-jupiter | dist/src/useCases/Users/LIstUsers/ListUsersUseCase.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ListUsersUseCase = void 0;
class ListUsersUseCase {
constructor(usersRepository) {
this.usersRepository = usersRepository;
}
async execute(limit, offset) {
const users = await this.usersRepository.list(limit, offset);
return users;
}
}
exports.ListUsersUseCase = ListUsersUseCase;
|
ishiura-compiler/CF3 | testsuite/EXP_5/test798.c |
/*
CF3
Copyright (c) 2015 ishiura-lab.
Released under the MIT license.
https://github.com/ishiura-compiler/CF3/MIT-LICENSE.md
*/
#include<stdio.h>
#include<stdint.h>
#include<stdlib.h>
#include"test1.h"
static uint64_t t0 = 3084LLU;
int16_t x6 = INT16_MAX;
uint64_t x11 = UINT64_MAX;
uint64_t x12 = 480977073566383751LLU;
static volatile uint32_t x16 = 123310U;
volatile uint64_t t3 = 67658961LLU;
int64_t x17 = -1LL;
volatile int32_t t5 = INT32_MAX;
static volatile uint64_t x38 = UINT64_MAX;
int16_t x40 = INT16_MAX;
int32_t x46 = INT32_MIN;
static uint32_t x47 = 322U;
uint32_t t11 = 659U;
uint32_t x50 = 42178303U;
static volatile int64_t x53 = INT64_MIN;
int32_t x55 = INT32_MIN;
int64_t t14 = 153LL;
int32_t x61 = INT32_MIN;
uint32_t x66 = UINT32_MAX;
int64_t t16 = -481LL;
int64_t x75 = INT64_MAX;
int32_t x78 = -1;
uint16_t x80 = UINT16_MAX;
volatile int16_t x88 = INT16_MAX;
static volatile int16_t x94 = -1;
int32_t t25 = -519339276;
volatile int8_t x109 = INT8_MAX;
uint8_t x112 = 7U;
int16_t x115 = -1;
static volatile uint64_t t28 = 1583875539399329073LLU;
int32_t x127 = INT32_MIN;
uint64_t x143 = UINT64_MAX;
uint8_t x145 = 118U;
uint32_t x147 = UINT32_MAX;
volatile uint32_t t32 = 861306U;
static volatile int8_t x166 = 1;
uint64_t t37 = 74960LLU;
uint16_t x173 = 71U;
int16_t x180 = -1;
volatile uint16_t x185 = 48U;
int32_t x193 = -3;
int64_t x195 = 1351939772LL;
static volatile uint32_t x198 = 3804U;
uint16_t x200 = UINT16_MAX;
static uint32_t x202 = 8840U;
int16_t x213 = 1;
int32_t x214 = 989425;
static uint16_t x219 = 2U;
volatile uint64_t t49 = 3712005011587532LLU;
volatile int32_t x234 = INT32_MIN;
static uint32_t t51 = 14U;
static uint8_t x242 = 0U;
volatile int16_t x248 = INT16_MIN;
int64_t t53 = -2134534783678383LL;
int8_t x255 = -1;
static int16_t x259 = -545;
volatile int64_t x263 = INT64_MIN;
volatile uint64_t t57 = 13024LLU;
int32_t x266 = -29606;
int32_t x268 = 156;
int32_t t58 = -12;
uint16_t x271 = 101U;
static volatile uint16_t x276 = 0U;
static uint8_t x279 = 2U;
int32_t x281 = INT32_MIN;
uint8_t x289 = 0U;
int16_t x291 = -1;
volatile int64_t x293 = INT64_MIN;
uint64_t x310 = UINT64_MAX;
uint64_t x311 = 39038LLU;
int64_t x314 = INT64_MIN;
int64_t t70 = -116323189245LL;
uint16_t x322 = UINT16_MAX;
int32_t x328 = INT32_MIN;
volatile uint8_t x332 = 3U;
int32_t x336 = INT32_MIN;
volatile int16_t x338 = INT16_MAX;
volatile uint16_t x345 = 369U;
uint32_t x346 = UINT32_MAX;
int32_t x361 = 243559;
uint8_t x382 = 17U;
volatile int64_t x384 = INT64_MIN;
int8_t x392 = INT8_MIN;
uint8_t x398 = 37U;
int16_t x404 = 14;
volatile int8_t x405 = INT8_MIN;
static int16_t x407 = -1;
static int64_t x414 = -6LL;
uint8_t x417 = 0U;
static uint32_t t93 = 252U;
uint8_t x427 = 81U;
int8_t x434 = -1;
int64_t x447 = INT64_MIN;
uint32_t x452 = 14910230U;
static uint32_t x457 = 353691U;
int64_t x461 = 1095LL;
volatile uint32_t x464 = 48864U;
static int32_t x469 = -1;
volatile int8_t x470 = 13;
uint64_t x474 = 71357131LLU;
static uint64_t t105 = 902112LLU;
int16_t x481 = INT16_MAX;
int16_t x488 = -1;
int64_t t108 = 254228307553418500LL;
volatile int64_t x489 = -34LL;
static volatile int16_t x490 = -1;
int8_t x491 = INT8_MIN;
int64_t t110 = -120933LL;
int8_t x498 = -1;
int8_t x499 = 3;
uint8_t x504 = UINT8_MAX;
uint32_t x505 = 205U;
uint8_t x508 = UINT8_MAX;
uint32_t t113 = 1675001123U;
volatile int16_t x509 = INT16_MIN;
volatile int32_t x511 = INT32_MIN;
volatile int16_t x512 = INT16_MIN;
int8_t x513 = INT8_MIN;
static int32_t x514 = -1;
int64_t x515 = -1LL;
volatile uint64_t t116 = 15275LLU;
uint8_t x525 = 20U;
uint16_t x530 = 23U;
int16_t x533 = -1;
int8_t x535 = -1;
int8_t x536 = -1;
volatile int32_t x539 = INT32_MIN;
int8_t x545 = INT8_MIN;
int64_t x569 = INT64_MIN;
volatile int32_t x575 = INT32_MIN;
uint8_t x581 = 80U;
uint16_t x587 = UINT16_MAX;
int32_t x593 = INT32_MIN;
static uint64_t t133 = 7522608698LLU;
volatile int64_t t134 = -2076849526604LL;
int8_t x608 = INT8_MAX;
uint64_t t135 = 6048462697LLU;
int32_t x609 = INT32_MAX;
static uint8_t x611 = 74U;
uint32_t x614 = UINT32_MAX;
int64_t x615 = 257164157614LL;
volatile uint16_t x616 = 20U;
volatile int64_t t140 = 8528LL;
uint16_t x629 = 5U;
uint8_t x649 = UINT8_MAX;
volatile int64_t t145 = 2471661033LL;
uint64_t x654 = 6404729LLU;
uint8_t x656 = UINT8_MAX;
static volatile uint32_t t147 = 146U;
int64_t x664 = INT64_MIN;
uint8_t x665 = 27U;
uint32_t x677 = 6448U;
uint32_t x680 = 22312173U;
int64_t x681 = INT64_MIN;
int16_t x688 = 10;
uint16_t x695 = 95U;
int32_t x697 = INT32_MIN;
int32_t x700 = INT32_MAX;
int64_t t158 = 1163828595985784LL;
int64_t x707 = INT64_MIN;
int32_t x714 = -1;
int64_t x715 = INT64_MIN;
uint64_t t162 = 10LLU;
uint64_t x727 = 392023LLU;
static int16_t x728 = 7790;
uint32_t x732 = 10U;
static volatile uint32_t t164 = 53U;
int16_t x742 = -19;
volatile int16_t x743 = -4812;
int16_t x744 = -1;
int16_t x747 = INT16_MIN;
volatile uint32_t t167 = 786478U;
int16_t x749 = -1;
volatile uint8_t x753 = 1U;
int64_t x755 = INT64_MAX;
volatile uint64_t t169 = 6733942549626666274LLU;
static int8_t x760 = -1;
uint32_t t170 = 7207709U;
volatile int16_t x763 = INT16_MIN;
int32_t t171 = 520409;
uint8_t x774 = UINT8_MAX;
int64_t x777 = 4983267LL;
volatile int32_t x779 = INT32_MIN;
static uint64_t x782 = 3003295300LLU;
int16_t x783 = INT16_MIN;
int64_t x796 = -183585909101327LL;
volatile int64_t t179 = -409939815883LL;
int16_t x799 = INT16_MAX;
static volatile int16_t x807 = INT16_MAX;
static volatile int64_t x813 = -8884211821100675LL;
int8_t x815 = INT8_MIN;
int64_t t184 = 172830521117842LL;
int64_t x831 = 133496681LL;
uint32_t x838 = 248385U;
int8_t x851 = INT8_MIN;
int32_t x860 = 140894;
int16_t x862 = -1;
static int16_t x869 = INT16_MAX;
volatile uint64_t t196 = 9514682208LLU;
int16_t x877 = -1120;
static volatile int32_t t198 = 145082;
volatile uint64_t x886 = 67LLU;
volatile uint64_t t199 = 21447261331537329LLU;
void f0(void) {
uint32_t x1 = 3U;
static uint64_t x2 = 521LLU;
volatile uint8_t x3 = 44U;
volatile uint64_t x4 = 3461299198313975459LLU;
t0 = (x1^((x2%x3)+x4));
if (t0 != 3461299198313975499LLU) { NG(); } else { ; }
}
void f1(void) {
static uint8_t x5 = UINT8_MAX;
static int32_t x7 = 201;
volatile uint8_t x8 = 0U;
volatile int32_t t1 = 531;
t1 = (x5^((x6%x7)+x8));
if (t1 != 251) { NG(); } else { ; }
}
void f2(void) {
static int16_t x9 = INT16_MIN;
volatile uint16_t x10 = 1013U;
volatile uint64_t t2 = 592036093975133LLU;
t2 = (x9^((x10%x11)+x12));
if (t2 != 17965767000143135356LLU) { NG(); } else { ; }
}
void f3(void) {
uint64_t x13 = UINT64_MAX;
uint64_t x14 = UINT64_MAX;
int32_t x15 = 12298388;
t3 = (x13^((x14%x15)+x16));
if (t3 != 18446744073708901706LLU) { NG(); } else { ; }
}
void f4(void) {
static int64_t x18 = 40800199684LL;
volatile int8_t x19 = -4;
static int64_t x20 = -16048958739LL;
int64_t t4 = -7815398420965LL;
t4 = (x17^((x18%x19)+x20));
if (t4 != 16048958738LL) { NG(); } else { ; }
}
void f5(void) {
int32_t x21 = INT32_MIN;
static int16_t x22 = -1628;
static int16_t x23 = -1;
int8_t x24 = -1;
t5 = (x21^((x22%x23)+x24));
if (t5 != INT32_MAX) { NG(); } else { ; }
}
void f6(void) {
uint64_t x25 = UINT64_MAX;
int64_t x26 = -38LL;
int32_t x27 = -2;
static int32_t x28 = INT32_MIN;
uint64_t t6 = 4096546631350164574LLU;
t6 = (x25^((x26%x27)+x28));
if (t6 != 2147483647LLU) { NG(); } else { ; }
}
void f7(void) {
int16_t x29 = INT16_MAX;
int8_t x30 = INT8_MAX;
int32_t x31 = 249;
uint64_t x32 = 2828029737676LLU;
uint64_t t7 = 1590886728966165LLU;
t7 = (x29^((x30%x31)+x32));
if (t7 != 2828029740212LLU) { NG(); } else { ; }
}
void f8(void) {
uint32_t x33 = UINT32_MAX;
int32_t x34 = INT32_MIN;
uint8_t x35 = 29U;
static int16_t x36 = INT16_MAX;
static volatile uint32_t t8 = 33083709U;
t8 = (x33^((x34%x35)+x36));
if (t8 != 4294934536U) { NG(); } else { ; }
}
void f9(void) {
int32_t x37 = 6879;
uint32_t x39 = UINT32_MAX;
volatile uint64_t t9 = 1047586457570LLU;
t9 = (x37^((x38%x39)+x40));
if (t9 != 25888LLU) { NG(); } else { ; }
}
void f10(void) {
static uint32_t x41 = UINT32_MAX;
uint64_t x42 = 2738590974132005LLU;
static int16_t x43 = INT16_MAX;
int16_t x44 = -15176;
static volatile uint64_t t10 = 54485559853117924LLU;
t10 = (x41^((x42%x43)+x44));
if (t10 != 4294952064LLU) { NG(); } else { ; }
}
void f11(void) {
int32_t x45 = -1;
int16_t x48 = INT16_MIN;
t11 = (x45^((x46%x47)+x48));
if (t11 != 32485U) { NG(); } else { ; }
}
void f12(void) {
volatile int64_t x49 = -1LL;
volatile uint16_t x51 = 1990U;
int64_t x52 = INT64_MIN;
volatile int64_t t12 = 6527182LL;
t12 = (x49^((x50%x51)+x52));
if (t12 != 9223372036854775554LL) { NG(); } else { ; }
}
void f13(void) {
int16_t x54 = 1;
int32_t x56 = -1;
static volatile int64_t t13 = INT64_MIN;
t13 = (x53^((x54%x55)+x56));
if (t13 != INT64_MIN) { NG(); } else { ; }
}
void f14(void) {
static uint16_t x57 = 342U;
int16_t x58 = -48;
int64_t x59 = -1LL;
volatile int64_t x60 = INT64_MAX;
t14 = (x57^((x58%x59)+x60));
if (t14 != 9223372036854775465LL) { NG(); } else { ; }
}
void f15(void) {
int64_t x62 = -2261812429412671LL;
uint64_t x63 = 63313992LLU;
int8_t x64 = INT8_MAX;
volatile uint64_t t15 = 1279632627LLU;
t15 = (x61^((x62%x63)+x64));
if (t15 != 18446744071617274008LLU) { NG(); } else { ; }
}
void f16(void) {
int64_t x65 = -1LL;
volatile uint16_t x67 = 1814U;
volatile uint8_t x68 = UINT8_MAX;
t16 = (x65^((x66%x67)+x68));
if (t16 != -1473LL) { NG(); } else { ; }
}
void f17(void) {
uint64_t x69 = 36629683644814772LLU;
static volatile int32_t x70 = -1;
uint8_t x71 = UINT8_MAX;
uint32_t x72 = 12999873U;
volatile uint64_t t17 = 6018098657LLU;
t17 = (x69^((x70%x71)+x72));
if (t17 != 36629683632343412LLU) { NG(); } else { ; }
}
void f18(void) {
int8_t x73 = INT8_MIN;
volatile int8_t x74 = -33;
int8_t x76 = INT8_MIN;
int64_t t18 = 44LL;
t18 = (x73^((x74%x75)+x76));
if (t18 != 223LL) { NG(); } else { ; }
}
void f19(void) {
uint16_t x77 = 10116U;
volatile int16_t x79 = 12;
volatile int32_t t19 = 9;
t19 = (x77^((x78%x79)+x80));
if (t19 != 55418) { NG(); } else { ; }
}
void f20(void) {
int64_t x81 = INT64_MAX;
int8_t x82 = -2;
static uint16_t x83 = UINT16_MAX;
uint8_t x84 = 0U;
int64_t t20 = -4LL;
t20 = (x81^((x82%x83)+x84));
if (t20 != -9223372036854775807LL) { NG(); } else { ; }
}
void f21(void) {
volatile uint8_t x85 = 80U;
volatile uint64_t x86 = UINT64_MAX;
volatile int64_t x87 = INT64_MIN;
uint64_t t21 = 6283826LLU;
t21 = (x85^((x86%x87)+x88));
if (t21 != 9223372036854808494LLU) { NG(); } else { ; }
}
void f22(void) {
static uint16_t x89 = 143U;
volatile int64_t x90 = -543982LL;
uint32_t x91 = 229U;
static int32_t x92 = INT32_MIN;
static int64_t t22 = 2LL;
t22 = (x89^((x90%x91)+x92));
if (t22 != -2147483878LL) { NG(); } else { ; }
}
void f23(void) {
int64_t x93 = -1LL;
volatile int32_t x95 = INT32_MIN;
int8_t x96 = -1;
static volatile int64_t t23 = -75614LL;
t23 = (x93^((x94%x95)+x96));
if (t23 != 1LL) { NG(); } else { ; }
}
void f24(void) {
int32_t x97 = INT32_MAX;
volatile int64_t x98 = -61573032666116LL;
int8_t x99 = INT8_MIN;
volatile int64_t x100 = INT64_MAX;
int64_t t24 = -46001LL;
t24 = (x97^((x98%x99)+x100));
if (t24 != 9223372034707292164LL) { NG(); } else { ; }
}
void f25(void) {
static int8_t x101 = -1;
int32_t x102 = INT32_MIN;
int32_t x103 = 1937764;
uint16_t x104 = 144U;
t25 = (x101^((x102%x103)+x104));
if (t25 != 440991) { NG(); } else { ; }
}
void f26(void) {
volatile int32_t x110 = -1;
int16_t x111 = -1;
int32_t t26 = 0;
t26 = (x109^((x110%x111)+x112));
if (t26 != 120) { NG(); } else { ; }
}
void f27(void) {
int32_t x113 = 439326601;
uint16_t x114 = UINT16_MAX;
int64_t x116 = INT64_MIN;
int64_t t27 = -29614LL;
t27 = (x113^((x114%x115)+x116));
if (t27 != -9223372036415449207LL) { NG(); } else { ; }
}
void f28(void) {
uint64_t x121 = 338004050362LLU;
volatile int32_t x122 = -1;
uint32_t x123 = UINT32_MAX;
uint8_t x124 = 88U;
t28 = (x121^((x122%x123)+x124));
if (t28 != 338004050402LLU) { NG(); } else { ; }
}
void f29(void) {
int64_t x125 = -8978127749478069LL;
volatile int64_t x126 = INT64_MAX;
int8_t x128 = INT8_MAX;
static int64_t t29 = 2367468325087LL;
t29 = (x125^((x126%x127)+x128));
if (t29 != -8978129896961739LL) { NG(); } else { ; }
}
void f30(void) {
static int64_t x133 = INT64_MIN;
int64_t x134 = INT64_MAX;
int64_t x135 = 7594920LL;
volatile int16_t x136 = -1900;
static volatile int64_t t30 = 744998709645527718LL;
t30 = (x133^((x134%x135)+x136));
if (t30 != -9223372036850803661LL) { NG(); } else { ; }
}
void f31(void) {
uint8_t x141 = 0U;
uint8_t x142 = 15U;
volatile int64_t x144 = INT64_MIN;
volatile uint64_t t31 = 104LLU;
t31 = (x141^((x142%x143)+x144));
if (t31 != 9223372036854775823LLU) { NG(); } else { ; }
}
void f32(void) {
volatile int32_t x146 = INT32_MIN;
int16_t x148 = INT16_MIN;
t32 = (x145^((x146%x147)+x148));
if (t32 != 2147450998U) { NG(); } else { ; }
}
void f33(void) {
volatile uint64_t x149 = 26995927512LLU;
uint32_t x150 = UINT32_MAX;
static int32_t x151 = -1;
static volatile int32_t x152 = -46302571;
static volatile uint64_t t33 = 1905LLU;
t33 = (x149^((x150%x151)+x152));
if (t33 != 28792345421LLU) { NG(); } else { ; }
}
void f34(void) {
int64_t x153 = INT64_MAX;
volatile int64_t x154 = INT64_MIN;
int64_t x155 = -1LL;
volatile uint64_t x156 = UINT64_MAX;
uint64_t t34 = 22598LLU;
t34 = (x153^((x154%x155)+x156));
if (t34 != 9223372036854775808LLU) { NG(); } else { ; }
}
void f35(void) {
uint8_t x157 = 15U;
int16_t x158 = INT16_MIN;
int32_t x159 = -80028304;
int16_t x160 = 295;
static volatile int32_t t35 = -10556969;
t35 = (x157^((x158%x159)+x160));
if (t35 != -32472) { NG(); } else { ; }
}
void f36(void) {
uint8_t x161 = 15U;
int64_t x162 = 58637284624LL;
static int16_t x163 = INT16_MIN;
int16_t x164 = -736;
int64_t t36 = 7618920941690LL;
t36 = (x161^((x162%x163)+x164));
if (t36 != 29247LL) { NG(); } else { ; }
}
void f37(void) {
int64_t x165 = 2158470465101479649LL;
static volatile uint64_t x167 = UINT64_MAX;
uint32_t x168 = 0U;
t37 = (x165^((x166%x167)+x168));
if (t37 != 2158470465101479648LLU) { NG(); } else { ; }
}
void f38(void) {
volatile uint8_t x174 = 6U;
int32_t x175 = INT32_MIN;
int16_t x176 = INT16_MIN;
int32_t t38 = 38442;
t38 = (x173^((x174%x175)+x176));
if (t38 != -32703) { NG(); } else { ; }
}
void f39(void) {
static int64_t x177 = -19228018306706LL;
static volatile int16_t x178 = -1;
static int8_t x179 = INT8_MIN;
int64_t t39 = -1314063LL;
t39 = (x177^((x178%x179)+x180));
if (t39 != 19228018306704LL) { NG(); } else { ; }
}
void f40(void) {
int64_t x181 = -1LL;
int16_t x182 = INT16_MIN;
int64_t x183 = INT64_MAX;
uint32_t x184 = UINT32_MAX;
volatile int64_t t40 = 83601LL;
t40 = (x181^((x182%x183)+x184));
if (t40 != -4294934528LL) { NG(); } else { ; }
}
void f41(void) {
int16_t x186 = 236;
int64_t x187 = -1LL;
int64_t x188 = INT64_MAX;
static int64_t t41 = 449LL;
t41 = (x185^((x186%x187)+x188));
if (t41 != 9223372036854775759LL) { NG(); } else { ; }
}
void f42(void) {
uint8_t x194 = 1U;
int32_t x196 = -1;
volatile int64_t t42 = 21194176015LL;
t42 = (x193^((x194%x195)+x196));
if (t42 != -3LL) { NG(); } else { ; }
}
void f43(void) {
static volatile int64_t x197 = 0LL;
int32_t x199 = -134;
int64_t t43 = 91052644868505985LL;
t43 = (x197^((x198%x199)+x200));
if (t43 != 69339LL) { NG(); } else { ; }
}
void f44(void) {
int16_t x201 = 1670;
int16_t x203 = INT16_MIN;
int16_t x204 = 252;
uint32_t t44 = 31551138U;
t44 = (x201^((x202%x203)+x204));
if (t44 != 9474U) { NG(); } else { ; }
}
void f45(void) {
static int64_t x215 = INT64_MIN;
uint8_t x216 = 9U;
volatile int64_t t45 = 40372003833508LL;
t45 = (x213^((x214%x215)+x216));
if (t45 != 989435LL) { NG(); } else { ; }
}
void f46(void) {
int32_t x217 = 1;
static int64_t x218 = INT64_MAX;
int8_t x220 = INT8_MAX;
volatile int64_t t46 = 18LL;
t46 = (x217^((x218%x219)+x220));
if (t46 != 129LL) { NG(); } else { ; }
}
void f47(void) {
int8_t x221 = -1;
volatile uint64_t x222 = 5588923164LLU;
int8_t x223 = -1;
int64_t x224 = INT64_MAX;
volatile uint64_t t47 = 265285615030LLU;
t47 = (x221^((x222%x223)+x224));
if (t47 != 9223372031265852644LLU) { NG(); } else { ; }
}
void f48(void) {
uint64_t x225 = UINT64_MAX;
static uint32_t x226 = 3U;
int32_t x227 = INT32_MAX;
volatile int32_t x228 = -55179655;
uint64_t t48 = 732225976037870LLU;
t48 = (x225^((x226%x227)+x228));
if (t48 != 18446744069469763971LLU) { NG(); } else { ; }
}
void f49(void) {
static volatile int8_t x229 = INT8_MIN;
int64_t x230 = INT64_MIN;
static volatile uint64_t x231 = UINT64_MAX;
int32_t x232 = INT32_MAX;
t49 = (x229^((x230%x231)+x232));
if (t49 != 9223372034707292287LLU) { NG(); } else { ; }
}
void f50(void) {
static int32_t x233 = INT32_MAX;
int8_t x235 = -1;
int64_t x236 = INT64_MIN;
int64_t t50 = -276729103890613544LL;
t50 = (x233^((x234%x235)+x236));
if (t50 != -9223372034707292161LL) { NG(); } else { ; }
}
void f51(void) {
static int16_t x237 = INT16_MIN;
uint32_t x238 = UINT32_MAX;
uint8_t x239 = 42U;
uint16_t x240 = 383U;
t51 = (x237^((x238%x239)+x240));
if (t51 != 4294934914U) { NG(); } else { ; }
}
void f52(void) {
volatile uint32_t x241 = 923927565U;
static uint16_t x243 = UINT16_MAX;
int8_t x244 = -1;
static volatile uint32_t t52 = 1025685U;
t52 = (x241^((x242%x243)+x244));
if (t52 != 3371039730U) { NG(); } else { ; }
}
void f53(void) {
uint8_t x245 = UINT8_MAX;
volatile int64_t x246 = INT64_MIN;
uint16_t x247 = UINT16_MAX;
t53 = (x245^((x246%x247)+x248));
if (t53 != -65281LL) { NG(); } else { ; }
}
void f54(void) {
int64_t x249 = -147249380987692LL;
uint32_t x250 = 127563U;
uint8_t x251 = 100U;
static int16_t x252 = INT16_MIN;
volatile int64_t t54 = 57792631LL;
t54 = (x249^((x250%x251)+x252));
if (t54 != -147252231508757LL) { NG(); } else { ; }
}
void f55(void) {
int64_t x253 = INT64_MAX;
uint16_t x254 = 9U;
uint16_t x256 = 3U;
static volatile int64_t t55 = 1LL;
t55 = (x253^((x254%x255)+x256));
if (t55 != 9223372036854775804LL) { NG(); } else { ; }
}
void f56(void) {
static int32_t x257 = INT32_MIN;
int64_t x258 = -1LL;
static int8_t x260 = 14;
static int64_t t56 = -3400007373719LL;
t56 = (x257^((x258%x259)+x260));
if (t56 != -2147483635LL) { NG(); } else { ; }
}
void f57(void) {
static uint64_t x261 = UINT64_MAX;
uint8_t x262 = UINT8_MAX;
static uint8_t x264 = 6U;
t57 = (x261^((x262%x263)+x264));
if (t57 != 18446744073709551354LLU) { NG(); } else { ; }
}
void f58(void) {
int16_t x265 = INT16_MIN;
uint8_t x267 = 6U;
t58 = (x265^((x266%x267)+x268));
if (t58 != -32614) { NG(); } else { ; }
}
void f59(void) {
uint8_t x269 = 3U;
static uint8_t x270 = UINT8_MAX;
static uint8_t x272 = 3U;
volatile int32_t t59 = 9060;
t59 = (x269^((x270%x271)+x272));
if (t59 != 59) { NG(); } else { ; }
}
void f60(void) {
int8_t x273 = INT8_MIN;
int16_t x274 = -63;
int32_t x275 = -2529331;
int32_t t60 = -455161;
t60 = (x273^((x274%x275)+x276));
if (t60 != 65) { NG(); } else { ; }
}
void f61(void) {
volatile int8_t x277 = 1;
int32_t x278 = INT32_MIN;
uint16_t x280 = UINT16_MAX;
volatile int32_t t61 = 96441;
t61 = (x277^((x278%x279)+x280));
if (t61 != 65534) { NG(); } else { ; }
}
void f62(void) {
volatile uint32_t x282 = UINT32_MAX;
static int8_t x283 = INT8_MIN;
static int32_t x284 = INT32_MIN;
uint32_t t62 = 1953960342U;
t62 = (x281^((x282%x283)+x284));
if (t62 != 127U) { NG(); } else { ; }
}
void f63(void) {
int16_t x285 = INT16_MAX;
int64_t x286 = -2016LL;
uint16_t x287 = UINT16_MAX;
volatile int32_t x288 = INT32_MIN;
int64_t t63 = 3422459834799002LL;
t63 = (x285^((x286%x287)+x288));
if (t63 != -2147514401LL) { NG(); } else { ; }
}
void f64(void) {
int16_t x290 = -1;
int32_t x292 = INT32_MAX;
volatile int32_t t64 = INT32_MAX;
t64 = (x289^((x290%x291)+x292));
if (t64 != INT32_MAX) { NG(); } else { ; }
}
void f65(void) {
uint64_t x294 = 4600254603LLU;
volatile int32_t x295 = -1;
int64_t x296 = INT64_MIN;
volatile uint64_t t65 = 7464216187079LLU;
t65 = (x293^((x294%x295)+x296));
if (t65 != 4600254603LLU) { NG(); } else { ; }
}
void f66(void) {
int64_t x297 = INT64_MAX;
int64_t x298 = INT64_MAX;
uint8_t x299 = UINT8_MAX;
int32_t x300 = -5076;
int64_t t66 = 117477496895LL;
t66 = (x297^((x298%x299)+x300));
if (t66 != -9223372036854770860LL) { NG(); } else { ; }
}
void f67(void) {
uint8_t x301 = 2U;
volatile int16_t x302 = INT16_MAX;
uint16_t x303 = UINT16_MAX;
int16_t x304 = INT16_MAX;
volatile int32_t t67 = 4741141;
t67 = (x301^((x302%x303)+x304));
if (t67 != 65532) { NG(); } else { ; }
}
void f68(void) {
static uint64_t x305 = 338LLU;
int16_t x306 = INT16_MIN;
volatile uint8_t x307 = 1U;
volatile int16_t x308 = 12;
volatile uint64_t t68 = 2551369306LLU;
t68 = (x305^((x306%x307)+x308));
if (t68 != 350LLU) { NG(); } else { ; }
}
void f69(void) {
volatile int16_t x309 = INT16_MIN;
static uint64_t x312 = UINT64_MAX;
uint64_t t69 = 512148017142073LLU;
t69 = (x309^((x310%x311)+x312));
if (t69 != 18446744073709530570LLU) { NG(); } else { ; }
}
void f70(void) {
int32_t x313 = INT32_MIN;
static volatile uint16_t x315 = 830U;
int32_t x316 = INT32_MIN;
t70 = (x313^((x314%x315)+x316));
if (t70 != 4294967278LL) { NG(); } else { ; }
}
void f71(void) {
int8_t x321 = -2;
int32_t x323 = 76838;
uint32_t x324 = 49U;
uint32_t t71 = 217385U;
t71 = (x321^((x322%x323)+x324));
if (t71 != 4294901710U) { NG(); } else { ; }
}
void f72(void) {
static volatile int32_t x325 = -249955;
int8_t x326 = 0;
volatile int8_t x327 = 1;
int32_t t72 = 60;
t72 = (x325^((x326%x327)+x328));
if (t72 != 2147233693) { NG(); } else { ; }
}
void f73(void) {
int32_t x329 = INT32_MIN;
int16_t x330 = INT16_MIN;
static int8_t x331 = INT8_MAX;
static int32_t t73 = 150;
t73 = (x329^((x330%x331)+x332));
if (t73 != -2147483647) { NG(); } else { ; }
}
void f74(void) {
volatile int8_t x333 = INT8_MAX;
int64_t x334 = 9628LL;
uint64_t x335 = 434776546617252LLU;
uint64_t t74 = 25823199LLU;
t74 = (x333^((x334%x335)+x336));
if (t74 != 18446744071562077667LLU) { NG(); } else { ; }
}
void f75(void) {
int8_t x337 = -1;
static int16_t x339 = INT16_MIN;
volatile int64_t x340 = INT64_MIN;
int64_t t75 = 52924LL;
t75 = (x337^((x338%x339)+x340));
if (t75 != 9223372036854743040LL) { NG(); } else { ; }
}
void f76(void) {
int16_t x341 = INT16_MAX;
volatile uint32_t x342 = UINT32_MAX;
volatile uint32_t x343 = 1U;
volatile uint16_t x344 = 0U;
volatile uint32_t t76 = 23630U;
t76 = (x341^((x342%x343)+x344));
if (t76 != 32767U) { NG(); } else { ; }
}
void f77(void) {
int8_t x347 = INT8_MIN;
int8_t x348 = -30;
uint32_t t77 = 1U;
t77 = (x345^((x346%x347)+x348));
if (t77 != 272U) { NG(); } else { ; }
}
void f78(void) {
volatile int32_t x349 = -587330851;
int64_t x350 = 1LL;
int32_t x351 = INT32_MAX;
uint16_t x352 = 157U;
int64_t t78 = -101740LL;
t78 = (x349^((x350%x351)+x352));
if (t78 != -587331005LL) { NG(); } else { ; }
}
void f79(void) {
int64_t x357 = INT64_MIN;
int16_t x358 = INT16_MIN;
static int32_t x359 = -1;
static int64_t x360 = INT64_MIN;
volatile int64_t t79 = 988972971932LL;
t79 = (x357^((x358%x359)+x360));
if (t79 != 0LL) { NG(); } else { ; }
}
void f80(void) {
static int16_t x362 = 3605;
int64_t x363 = INT64_MIN;
volatile int16_t x364 = -1272;
volatile int64_t t80 = 14035100494038LL;
t80 = (x361^((x362%x363)+x364));
if (t80 != 245370LL) { NG(); } else { ; }
}
void f81(void) {
volatile uint16_t x365 = 1294U;
static int64_t x366 = -11008225293943LL;
uint64_t x367 = 8LLU;
static volatile uint16_t x368 = UINT16_MAX;
static uint64_t t81 = 921445LLU;
t81 = (x365^((x366%x367)+x368));
if (t81 != 66830LLU) { NG(); } else { ; }
}
void f82(void) {
static uint16_t x369 = 5U;
int64_t x370 = -1LL;
int64_t x371 = INT64_MIN;
uint8_t x372 = UINT8_MAX;
int64_t t82 = 2166146397941040LL;
t82 = (x369^((x370%x371)+x372));
if (t82 != 251LL) { NG(); } else { ; }
}
void f83(void) {
uint32_t x381 = UINT32_MAX;
uint64_t x383 = 161097LLU;
uint64_t t83 = 4531627877682293135LLU;
t83 = (x381^((x382%x383)+x384));
if (t83 != 9223372041149743086LLU) { NG(); } else { ; }
}
void f84(void) {
int8_t x385 = -1;
uint64_t x386 = 15159905298LLU;
uint32_t x387 = 286034632U;
int64_t x388 = -1LL;
volatile uint64_t t84 = 1770777210LLU;
t84 = (x385^((x386%x387)+x388));
if (t84 != 18446744073709481814LLU) { NG(); } else { ; }
}
void f85(void) {
int32_t x389 = 124;
static int32_t x390 = INT32_MAX;
uint32_t x391 = 15569U;
volatile uint32_t t85 = 31144U;
t85 = (x389^((x390%x391)+x392));
if (t85 != 4702U) { NG(); } else { ; }
}
void f86(void) {
uint64_t x393 = UINT64_MAX;
int16_t x394 = -1;
int32_t x395 = -1;
static int8_t x396 = INT8_MIN;
volatile uint64_t t86 = 896574530LLU;
t86 = (x393^((x394%x395)+x396));
if (t86 != 127LLU) { NG(); } else { ; }
}
void f87(void) {
static int64_t x397 = -31508LL;
static volatile int8_t x399 = -2;
int32_t x400 = INT32_MIN;
volatile int64_t t87 = -8604191768391LL;
t87 = (x397^((x398%x399)+x400));
if (t87 != 2147452141LL) { NG(); } else { ; }
}
void f88(void) {
static uint64_t x401 = 133715LLU;
int16_t x402 = INT16_MAX;
int64_t x403 = -1LL;
uint64_t t88 = 517LLU;
t88 = (x401^((x402%x403)+x404));
if (t88 != 133725LLU) { NG(); } else { ; }
}
void f89(void) {
static uint8_t x406 = 2U;
uint16_t x408 = 49U;
int32_t t89 = -502794557;
t89 = (x405^((x406%x407)+x408));
if (t89 != -79) { NG(); } else { ; }
}
void f90(void) {
int64_t x409 = -1LL;
static volatile uint32_t x410 = UINT32_MAX;
int8_t x411 = -1;
int64_t x412 = INT64_MIN;
volatile int64_t t90 = INT64_MAX;
t90 = (x409^((x410%x411)+x412));
if (t90 != INT64_MAX) { NG(); } else { ; }
}
void f91(void) {
int8_t x413 = -1;
int32_t x415 = -327;
int32_t x416 = INT32_MAX;
int64_t t91 = 2238526225787462LL;
t91 = (x413^((x414%x415)+x416));
if (t91 != -2147483642LL) { NG(); } else { ; }
}
void f92(void) {
int32_t x418 = 3;
static int16_t x419 = -1;
uint64_t x420 = UINT64_MAX;
uint64_t t92 = UINT64_MAX;
t92 = (x417^((x418%x419)+x420));
if (t92 != UINT64_MAX) { NG(); } else { ; }
}
void f93(void) {
int16_t x421 = -1438;
volatile uint32_t x422 = 799U;
uint8_t x423 = 28U;
static uint32_t x424 = UINT32_MAX;
t93 = (x421^((x422%x423)+x424));
if (t93 != 4294965868U) { NG(); } else { ; }
}
void f94(void) {
uint8_t x425 = 117U;
static uint16_t x426 = 115U;
static uint16_t x428 = UINT16_MAX;
volatile int32_t t94 = 7;
t94 = (x425^((x426%x427)+x428));
if (t94 != 65620) { NG(); } else { ; }
}
void f95(void) {
uint64_t x429 = UINT64_MAX;
int16_t x430 = INT16_MAX;
static int32_t x431 = -1;
uint32_t x432 = 210180U;
volatile uint64_t t95 = 32LLU;
t95 = (x429^((x430%x431)+x432));
if (t95 != 18446744073709341435LLU) { NG(); } else { ; }
}
void f96(void) {
uint16_t x433 = 14U;
static uint16_t x435 = 1U;
volatile int32_t x436 = 265339075;
int32_t t96 = 2393;
t96 = (x433^((x434%x435)+x436));
if (t96 != 265339085) { NG(); } else { ; }
}
void f97(void) {
static volatile int16_t x441 = INT16_MIN;
int16_t x442 = INT16_MIN;
volatile uint64_t x443 = UINT64_MAX;
int32_t x444 = -140;
volatile uint64_t t97 = 21900257670194LLU;
t97 = (x441^((x442%x443)+x444));
if (t97 != 65396LLU) { NG(); } else { ; }
}
void f98(void) {
int64_t x445 = INT64_MAX;
int32_t x446 = -1;
int16_t x448 = 292;
volatile int64_t t98 = 92037945LL;
t98 = (x445^((x446%x447)+x448));
if (t98 != 9223372036854775516LL) { NG(); } else { ; }
}
void f99(void) {
int8_t x449 = INT8_MIN;
int64_t x450 = 2059718313LL;
int32_t x451 = INT32_MAX;
volatile int64_t t99 = 3LL;
t99 = (x449^((x450%x451)+x452));
if (t99 != -2074628545LL) { NG(); } else { ; }
}
void f100(void) {
uint8_t x453 = 117U;
int16_t x454 = INT16_MAX;
uint16_t x455 = UINT16_MAX;
int8_t x456 = -55;
volatile int32_t t100 = 256011515;
t100 = (x453^((x454%x455)+x456));
if (t100 != 32701) { NG(); } else { ; }
}
void f101(void) {
int16_t x458 = 0;
int64_t x459 = INT64_MAX;
int16_t x460 = -1;
int64_t t101 = 25365731457680565LL;
t101 = (x457^((x458%x459)+x460));
if (t101 != -353692LL) { NG(); } else { ; }
}
void f102(void) {
static int64_t x462 = 1715786757LL;
int64_t x463 = INT64_MIN;
int64_t t102 = 59295681118LL;
t102 = (x461^((x462%x463)+x464));
if (t102 != 1715836578LL) { NG(); } else { ; }
}
void f103(void) {
uint8_t x465 = 7U;
uint64_t x466 = 7185117968684LLU;
static int64_t x467 = -1LL;
static uint32_t x468 = 15437U;
uint64_t t103 = 182567638450894LLU;
t103 = (x465^((x466%x467)+x468));
if (t103 != 7185117984126LLU) { NG(); } else { ; }
}
void f104(void) {
uint8_t x471 = 1U;
uint64_t x472 = 15036LLU;
static volatile uint64_t t104 = 19528721285382LLU;
t104 = (x469^((x470%x471)+x472));
if (t104 != 18446744073709536579LLU) { NG(); } else { ; }
}
void f105(void) {
volatile int64_t x473 = -1LL;
int8_t x475 = -1;
static int32_t x476 = INT32_MIN;
t105 = (x473^((x474%x475)+x476));
if (t105 != 2076126516LLU) { NG(); } else { ; }
}
void f106(void) {
static int8_t x477 = INT8_MAX;
int16_t x478 = INT16_MAX;
uint32_t x479 = 202049602U;
static uint16_t x480 = 5U;
volatile uint32_t t106 = 5875212U;
t106 = (x477^((x478%x479)+x480));
if (t106 != 32891U) { NG(); } else { ; }
}
void f107(void) {
int64_t x482 = -1LL;
int8_t x483 = INT8_MAX;
int8_t x484 = -1;
int64_t t107 = 119LL;
t107 = (x481^((x482%x483)+x484));
if (t107 != -32767LL) { NG(); } else { ; }
}
void f108(void) {
int64_t x485 = -1LL;
int8_t x486 = -1;
int32_t x487 = 1297813;
t108 = (x485^((x486%x487)+x488));
if (t108 != 1LL) { NG(); } else { ; }
}
void f109(void) {
uint32_t x492 = 11U;
static volatile int64_t t109 = 313LL;
t109 = (x489^((x490%x491)+x492));
if (t109 != -44LL) { NG(); } else { ; }
}
void f110(void) {
int64_t x493 = INT64_MIN;
int16_t x494 = -2930;
static int32_t x495 = INT32_MIN;
int64_t x496 = INT64_MAX;
t110 = (x493^((x494%x495)+x496));
if (t110 != -2931LL) { NG(); } else { ; }
}
void f111(void) {
uint8_t x497 = 12U;
volatile int8_t x500 = INT8_MIN;
int32_t t111 = -53;
t111 = (x497^((x498%x499)+x500));
if (t111 != -141) { NG(); } else { ; }
}
void f112(void) {
int32_t x501 = INT32_MIN;
int8_t x502 = 10;
uint8_t x503 = 28U;
static volatile int32_t t112 = 962550;
t112 = (x501^((x502%x503)+x504));
if (t112 != -2147483383) { NG(); } else { ; }
}
void f113(void) {
static uint16_t x506 = 675U;
volatile int8_t x507 = INT8_MIN;
t113 = (x505^((x506%x507)+x508));
if (t113 != 495U) { NG(); } else { ; }
}
void f114(void) {
static uint16_t x510 = 71U;
int32_t t114 = -1;
t114 = (x509^((x510%x511)+x512));
if (t114 != 71) { NG(); } else { ; }
}
void f115(void) {
volatile uint16_t x516 = 12U;
int64_t t115 = -1528LL;
t115 = (x513^((x514%x515)+x516));
if (t115 != -116LL) { NG(); } else { ; }
}
void f116(void) {
static int16_t x517 = INT16_MIN;
volatile uint64_t x518 = UINT64_MAX;
uint64_t x519 = UINT64_MAX;
uint32_t x520 = 69394U;
t116 = (x517^((x518%x519)+x520));
if (t116 != 18446744073709457170LLU) { NG(); } else { ; }
}
void f117(void) {
volatile uint8_t x521 = 1U;
volatile int16_t x522 = INT16_MAX;
int8_t x523 = -1;
static int8_t x524 = 1;
static volatile int32_t t117 = -4;
t117 = (x521^((x522%x523)+x524));
if (t117 != 0) { NG(); } else { ; }
}
void f118(void) {
uint8_t x526 = 0U;
int32_t x527 = INT32_MIN;
volatile int32_t x528 = INT32_MIN;
int32_t t118 = -258166585;
t118 = (x525^((x526%x527)+x528));
if (t118 != -2147483628) { NG(); } else { ; }
}
void f119(void) {
uint64_t x529 = UINT64_MAX;
int16_t x531 = 589;
volatile int64_t x532 = 2388884597LL;
static volatile uint64_t t119 = 136460296456LLU;
t119 = (x529^((x530%x531)+x532));
if (t119 != 18446744071320666995LLU) { NG(); } else { ; }
}
void f120(void) {
static volatile int64_t x534 = 494LL;
volatile int64_t t120 = -479184LL;
t120 = (x533^((x534%x535)+x536));
if (t120 != 0LL) { NG(); } else { ; }
}
void f121(void) {
int64_t x537 = INT64_MAX;
int16_t x538 = -127;
static int8_t x540 = -7;
volatile int64_t t121 = 5LL;
t121 = (x537^((x538%x539)+x540));
if (t121 != -9223372036854775675LL) { NG(); } else { ; }
}
void f122(void) {
static int32_t x541 = -1;
static int32_t x542 = -9;
int64_t x543 = 45646409063LL;
uint32_t x544 = UINT32_MAX;
int64_t t122 = 294289LL;
t122 = (x541^((x542%x543)+x544));
if (t122 != -4294967287LL) { NG(); } else { ; }
}
void f123(void) {
volatile int32_t x546 = INT32_MIN;
uint8_t x547 = UINT8_MAX;
uint64_t x548 = UINT64_MAX;
uint64_t t123 = 8174773704487LLU;
t123 = (x545^((x546%x547)+x548));
if (t123 != 255LLU) { NG(); } else { ; }
}
void f124(void) {
volatile uint64_t x549 = 2878422444LLU;
uint64_t x550 = 111LLU;
uint16_t x551 = 1U;
volatile uint8_t x552 = 0U;
static uint64_t t124 = 7048433LLU;
t124 = (x549^((x550%x551)+x552));
if (t124 != 2878422444LLU) { NG(); } else { ; }
}
void f125(void) {
int32_t x561 = -1;
int16_t x562 = -1;
uint8_t x563 = 58U;
int8_t x564 = INT8_MIN;
volatile int32_t t125 = 11535193;
t125 = (x561^((x562%x563)+x564));
if (t125 != 128) { NG(); } else { ; }
}
void f126(void) {
static int32_t x565 = INT32_MIN;
static volatile int64_t x566 = -1LL;
int32_t x567 = INT32_MAX;
int8_t x568 = -1;
int64_t t126 = 3509493799LL;
t126 = (x565^((x566%x567)+x568));
if (t126 != 2147483646LL) { NG(); } else { ; }
}
void f127(void) {
volatile int8_t x570 = 1;
volatile int64_t x571 = -2426095980473083LL;
int32_t x572 = -1847324;
int64_t t127 = 110LL;
t127 = (x569^((x570%x571)+x572));
if (t127 != 9223372036852928485LL) { NG(); } else { ; }
}
void f128(void) {
int64_t x573 = -1LL;
uint64_t x574 = 3766442808LLU;
int64_t x576 = INT64_MAX;
volatile uint64_t t128 = 494LLU;
t128 = (x573^((x574%x575)+x576));
if (t128 != 9223372033088333000LLU) { NG(); } else { ; }
}
void f129(void) {
uint64_t x582 = 552LLU;
static int8_t x583 = INT8_MAX;
uint64_t x584 = 7487847096670156303LLU;
volatile uint64_t t129 = 87305LLU;
t129 = (x581^((x582%x583)+x584));
if (t129 != 7487847096670156395LLU) { NG(); } else { ; }
}
void f130(void) {
volatile int32_t x585 = INT32_MIN;
int8_t x586 = INT8_MAX;
volatile uint32_t x588 = 471609U;
volatile uint32_t t130 = 8773137U;
t130 = (x585^((x586%x587)+x588));
if (t130 != 2147955384U) { NG(); } else { ; }
}
void f131(void) {
int16_t x589 = INT16_MIN;
int32_t x590 = -2;
volatile int16_t x591 = 1271;
int8_t x592 = INT8_MIN;
volatile int32_t t131 = -11610;
t131 = (x589^((x590%x591)+x592));
if (t131 != 32638) { NG(); } else { ; }
}
void f132(void) {
int16_t x594 = -4;
int32_t x595 = INT32_MIN;
volatile int8_t x596 = -2;
int32_t t132 = 6;
t132 = (x593^((x594%x595)+x596));
if (t132 != 2147483642) { NG(); } else { ; }
}
void f133(void) {
uint64_t x597 = 473998LLU;
int8_t x598 = -16;
volatile int8_t x599 = -1;
int32_t x600 = -1;
t133 = (x597^((x598%x599)+x600));
if (t133 != 18446744073709077617LLU) { NG(); } else { ; }
}
void f134(void) {
int16_t x601 = -1;
volatile int64_t x602 = INT64_MIN;
volatile int8_t x603 = -15;
int64_t x604 = -1LL;
t134 = (x601^((x602%x603)+x604));
if (t134 != 8LL) { NG(); } else { ; }
}
void f135(void) {
int32_t x605 = INT32_MIN;
uint64_t x606 = 4427731LLU;
int64_t x607 = -332051122098989LL;
t135 = (x605^((x606%x607)+x608));
if (t135 != 18446744071566495826LLU) { NG(); } else { ; }
}
void f136(void) {
static uint64_t x610 = UINT64_MAX;
uint64_t x612 = 12176LLU;
uint64_t t136 = 76007526880262LLU;
t136 = (x609^((x610%x611)+x612));
if (t136 != 2147471460LLU) { NG(); } else { ; }
}
void f137(void) {
uint8_t x613 = UINT8_MAX;
int64_t t137 = 226LL;
t137 = (x613^((x614%x615)+x616));
if (t137 != 4294967532LL) { NG(); } else { ; }
}
void f138(void) {
int32_t x617 = -1;
uint16_t x618 = 0U;
int64_t x619 = INT64_MIN;
int64_t x620 = INT64_MIN;
int64_t t138 = INT64_MAX;
t138 = (x617^((x618%x619)+x620));
if (t138 != INT64_MAX) { NG(); } else { ; }
}
void f139(void) {
volatile int64_t x621 = INT64_MIN;
int32_t x622 = INT32_MIN;
int32_t x623 = INT32_MIN;
static int32_t x624 = -1;
volatile int64_t t139 = INT64_MAX;
t139 = (x621^((x622%x623)+x624));
if (t139 != INT64_MAX) { NG(); } else { ; }
}
void f140(void) {
uint16_t x625 = 29U;
int16_t x626 = -1;
int64_t x627 = INT64_MIN;
uint8_t x628 = 10U;
t140 = (x625^((x626%x627)+x628));
if (t140 != 20LL) { NG(); } else { ; }
}
void f141(void) {
int16_t x630 = INT16_MIN;
static uint32_t x631 = 2996499U;
volatile int32_t x632 = -1;
volatile uint32_t t141 = 5U;
t141 = (x629^((x630%x631)+x632));
if (t141 != 951457U) { NG(); } else { ; }
}
void f142(void) {
int8_t x637 = INT8_MIN;
int32_t x638 = -61024;
int16_t x639 = 52;
int16_t x640 = 2;
int32_t t142 = 1747;
t142 = (x637^((x638%x639)+x640));
if (t142 != 102) { NG(); } else { ; }
}
void f143(void) {
volatile int16_t x641 = 5;
int8_t x642 = -1;
volatile uint16_t x643 = 3351U;
int32_t x644 = INT32_MAX;
int32_t t143 = -475000933;
t143 = (x641^((x642%x643)+x644));
if (t143 != 2147483643) { NG(); } else { ; }
}
void f144(void) {
int64_t x645 = INT64_MIN;
int32_t x646 = INT32_MAX;
int64_t x647 = INT64_MAX;
uint64_t x648 = 569317LLU;
volatile uint64_t t144 = 3551610789934349LLU;
t144 = (x645^((x646%x647)+x648));
if (t144 != 9223372039002828772LLU) { NG(); } else { ; }
}
void f145(void) {
static int8_t x650 = 12;
static int64_t x651 = INT64_MIN;
int16_t x652 = -1;
t145 = (x649^((x650%x651)+x652));
if (t145 != 244LL) { NG(); } else { ; }
}
void f146(void) {
int64_t x653 = INT64_MIN;
int8_t x655 = INT8_MIN;
volatile uint64_t t146 = 1618230413585LLU;
t146 = (x653^((x654%x655)+x656));
if (t146 != 9223372036861180792LLU) { NG(); } else { ; }
}
void f147(void) {
uint32_t x657 = UINT32_MAX;
volatile int16_t x658 = -1;
int8_t x659 = -13;
uint8_t x660 = UINT8_MAX;
t147 = (x657^((x658%x659)+x660));
if (t147 != 4294967041U) { NG(); } else { ; }
}
void f148(void) {
int16_t x661 = -1;
volatile int16_t x662 = INT16_MAX;
int64_t x663 = -1LL;
volatile int64_t t148 = INT64_MAX;
t148 = (x661^((x662%x663)+x664));
if (t148 != INT64_MAX) { NG(); } else { ; }
}
void f149(void) {
int8_t x666 = INT8_MIN;
volatile uint16_t x667 = UINT16_MAX;
int8_t x668 = -54;
int32_t t149 = 248584;
t149 = (x665^((x666%x667)+x668));
if (t149 != -175) { NG(); } else { ; }
}
void f150(void) {
int32_t x669 = 36311;
int16_t x670 = INT16_MAX;
int16_t x671 = INT16_MAX;
static volatile int16_t x672 = -1;
volatile int32_t t150 = 665;
t150 = (x669^((x670%x671)+x672));
if (t150 != -36312) { NG(); } else { ; }
}
void f151(void) {
uint8_t x673 = 5U;
static volatile int16_t x674 = -1;
int64_t x675 = -1LL;
uint8_t x676 = 87U;
int64_t t151 = -1LL;
t151 = (x673^((x674%x675)+x676));
if (t151 != 82LL) { NG(); } else { ; }
}
void f152(void) {
uint8_t x678 = 37U;
volatile int32_t x679 = INT32_MIN;
uint32_t t152 = 448158U;
t152 = (x677^((x678%x679)+x680));
if (t152 != 22309922U) { NG(); } else { ; }
}
void f153(void) {
uint8_t x682 = 3U;
int8_t x683 = -1;
static volatile int8_t x684 = INT8_MIN;
volatile int64_t t153 = 11298945381876476LL;
t153 = (x681^((x682%x683)+x684));
if (t153 != 9223372036854775680LL) { NG(); } else { ; }
}
void f154(void) {
int8_t x685 = INT8_MIN;
int8_t x686 = 22;
static uint16_t x687 = 7U;
int32_t t154 = 6847;
t154 = (x685^((x686%x687)+x688));
if (t154 != -117) { NG(); } else { ; }
}
void f155(void) {
int16_t x689 = -1;
int32_t x690 = INT32_MAX;
int16_t x691 = -1848;
int32_t x692 = INT32_MIN;
volatile int32_t t155 = -33750617;
t155 = (x689^((x690%x691)+x692));
if (t155 != 2147483184) { NG(); } else { ; }
}
void f156(void) {
volatile int32_t x693 = 225258538;
uint16_t x694 = 5927U;
volatile int32_t x696 = -250;
volatile int32_t t156 = 40;
t156 = (x693^((x694%x695)+x696));
if (t156 != -225258751) { NG(); } else { ; }
}
void f157(void) {
static uint32_t x698 = UINT32_MAX;
static int8_t x699 = INT8_MAX;
uint32_t t157 = 1108755U;
t157 = (x697^((x698%x699)+x700));
if (t157 != 14U) { NG(); } else { ; }
}
void f158(void) {
int32_t x701 = 586528;
static int8_t x702 = INT8_MIN;
volatile uint32_t x703 = 645U;
int64_t x704 = INT64_MIN;
t158 = (x701^((x702%x703)+x704));
if (t158 != -9223372036854189771LL) { NG(); } else { ; }
}
void f159(void) {
static int32_t x705 = -8064004;
static int64_t x706 = -68079LL;
int16_t x708 = INT16_MIN;
volatile int64_t t159 = -67028LL;
t159 = (x705^((x706%x707)+x708));
if (t159 != 8029677LL) { NG(); } else { ; }
}
void f160(void) {
static int16_t x713 = -5549;
int8_t x716 = 10;
static volatile int64_t t160 = 438334521015LL;
t160 = (x713^((x714%x715)+x716));
if (t160 != -5542LL) { NG(); } else { ; }
}
void f161(void) {
volatile uint32_t x717 = UINT32_MAX;
uint32_t x718 = UINT32_MAX;
static uint64_t x719 = 100619059398714LLU;
volatile int64_t x720 = 281263502571446LL;
uint64_t t161 = 29539402LLU;
t161 = (x717^((x718%x719)+x720));
if (t161 != 281265544054858LLU) { NG(); } else { ; }
}
void f162(void) {
int8_t x721 = INT8_MAX;
uint64_t x722 = 25511554LLU;
volatile uint8_t x723 = 7U;
int8_t x724 = 0;
t162 = (x721^((x722%x723)+x724));
if (t162 != 122LLU) { NG(); } else { ; }
}
void f163(void) {
int16_t x725 = INT16_MIN;
int32_t x726 = INT32_MIN;
volatile uint64_t t163 = 599035LLU;
t163 = (x725^((x726%x727)+x728));
if (t163 != 18446744073709444258LLU) { NG(); } else { ; }
}
void f164(void) {
static uint8_t x729 = UINT8_MAX;
int8_t x730 = 19;
int16_t x731 = INT16_MAX;
t164 = (x729^((x730%x731)+x732));
if (t164 != 226U) { NG(); } else { ; }
}
void f165(void) {
int32_t x737 = INT32_MAX;
uint8_t x738 = UINT8_MAX;
int8_t x739 = INT8_MAX;
int64_t x740 = INT64_MIN;
int64_t t165 = 420806934902247408LL;
t165 = (x737^((x738%x739)+x740));
if (t165 != -9223372034707292162LL) { NG(); } else { ; }
}
void f166(void) {
int64_t x741 = INT64_MIN;
volatile int64_t t166 = -1137LL;
t166 = (x741^((x742%x743)+x744));
if (t166 != 9223372036854775788LL) { NG(); } else { ; }
}
void f167(void) {
volatile uint16_t x745 = 2U;
volatile uint32_t x746 = 812608U;
int16_t x748 = INT16_MIN;
t167 = (x745^((x746%x747)+x748));
if (t167 != 779842U) { NG(); } else { ; }
}
void f168(void) {
uint64_t x750 = 6848437675037263LLU;
int16_t x751 = -1;
int8_t x752 = -1;
static uint64_t t168 = 715308237540856LLU;
t168 = (x749^((x750%x751)+x752));
if (t168 != 18439895636034514353LLU) { NG(); } else { ; }
}
void f169(void) {
uint64_t x754 = 3085255938LLU;
static volatile uint32_t x756 = 1995789U;
t169 = (x753^((x754%x755)+x756));
if (t169 != 3087251726LLU) { NG(); } else { ; }
}
void f170(void) {
uint16_t x757 = UINT16_MAX;
uint32_t x758 = UINT32_MAX;
uint8_t x759 = UINT8_MAX;
t170 = (x757^((x758%x759)+x760));
if (t170 != 4294901760U) { NG(); } else { ; }
}
void f171(void) {
static int8_t x761 = INT8_MIN;
volatile int16_t x762 = -1;
uint16_t x764 = 1166U;
t171 = (x761^((x762%x763)+x764));
if (t171 != -1267) { NG(); } else { ; }
}
void f172(void) {
int32_t x765 = INT32_MIN;
int64_t x766 = INT64_MIN;
int8_t x767 = 5;
uint32_t x768 = 45116U;
volatile int64_t t172 = 103187629989863853LL;
t172 = (x765^((x766%x767)+x768));
if (t172 != -2147438535LL) { NG(); } else { ; }
}
void f173(void) {
int8_t x769 = 3;
int32_t x770 = -378604;
int8_t x771 = -13;
int16_t x772 = -1;
static volatile int32_t t173 = -10323284;
t173 = (x769^((x770%x771)+x772));
if (t173 != -7) { NG(); } else { ; }
}
void f174(void) {
static uint32_t x773 = 117203U;
volatile uint64_t x775 = UINT64_MAX;
int32_t x776 = -1;
uint64_t t174 = 100606860975LLU;
t174 = (x773^((x774%x775)+x776));
if (t174 != 117037LLU) { NG(); } else { ; }
}
void f175(void) {
int32_t x778 = INT32_MIN;
int32_t x780 = INT32_MIN;
volatile int64_t t175 = -780LL;
t175 = (x777^((x778%x779)+x780));
if (t175 != -2142500381LL) { NG(); } else { ; }
}
void f176(void) {
int32_t x781 = INT32_MAX;
int8_t x784 = INT8_MIN;
uint64_t t176 = 2125761806LLU;
t176 = (x781^((x782%x783)+x784));
if (t176 != 3439155771LLU) { NG(); } else { ; }
}
void f177(void) {
int8_t x785 = INT8_MIN;
int16_t x786 = 1;
int32_t x787 = -601;
static uint64_t x788 = 38557178105642843LLU;
volatile uint64_t t177 = 795LLU;
t177 = (x785^((x786%x787)+x788));
if (t177 != 18408186895603908828LLU) { NG(); } else { ; }
}
void f178(void) {
uint32_t x789 = UINT32_MAX;
uint8_t x790 = 9U;
uint16_t x791 = UINT16_MAX;
volatile int64_t x792 = 208046LL;
volatile int64_t t178 = 44LL;
t178 = (x789^((x790%x791)+x792));
if (t178 != 4294759240LL) { NG(); } else { ; }
}
void f179(void) {
int32_t x793 = INT32_MIN;
volatile int8_t x794 = INT8_MAX;
int64_t x795 = 1005373928729763781LL;
t179 = (x793^((x794%x795)+x796));
if (t179 != 183584402582896LL) { NG(); } else { ; }
}
void f180(void) {
volatile int32_t x797 = INT32_MAX;
int32_t x798 = -1;
int8_t x800 = 1;
volatile int32_t t180 = INT32_MAX;
t180 = (x797^((x798%x799)+x800));
if (t180 != INT32_MAX) { NG(); } else { ; }
}
void f181(void) {
static volatile int8_t x801 = 6;
volatile uint8_t x802 = 3U;
int32_t x803 = -13282;
int32_t x804 = INT32_MIN;
int32_t t181 = -1;
t181 = (x801^((x802%x803)+x804));
if (t181 != -2147483643) { NG(); } else { ; }
}
void f182(void) {
int32_t x805 = -1;
static int64_t x806 = INT64_MIN;
uint64_t x808 = 1636544LLU;
volatile uint64_t t182 = 6007610861399348542LLU;
t182 = (x805^((x806%x807)+x808));
if (t182 != 18446744073707915079LLU) { NG(); } else { ; }
}
void f183(void) {
volatile int8_t x809 = -1;
static uint64_t x810 = 364940305LLU;
static uint64_t x811 = UINT64_MAX;
int32_t x812 = -254408;
uint64_t t183 = 3286796139678522LLU;
t183 = (x809^((x810%x811)+x812));
if (t183 != 18446744073344865718LLU) { NG(); } else { ; }
}
void f184(void) {
int64_t x814 = INT64_MIN;
int64_t x816 = -1LL;
t184 = (x813^((x814%x815)+x816));
if (t184 != 8884211821100674LL) { NG(); } else { ; }
}
void f185(void) {
int8_t x817 = INT8_MAX;
uint16_t x818 = UINT16_MAX;
uint32_t x819 = 9U;
int8_t x820 = -1;
uint32_t t185 = 48U;
t185 = (x817^((x818%x819)+x820));
if (t185 != 122U) { NG(); } else { ; }
}
void f186(void) {
volatile int64_t x821 = -1962038114446990LL;
uint64_t x822 = 22769682LLU;
int16_t x823 = -1;
volatile int32_t x824 = -817098399;
volatile uint64_t t186 = 143LLU;
t186 = (x821^((x822%x823)+x824));
if (t186 != 1962038628749313LLU) { NG(); } else { ; }
}
void f187(void) {
int64_t x829 = INT64_MIN;
int16_t x830 = -1;
int64_t x832 = INT64_MAX;
int64_t t187 = -428307LL;
t187 = (x829^((x830%x831)+x832));
if (t187 != -2LL) { NG(); } else { ; }
}
void f188(void) {
int16_t x833 = INT16_MIN;
volatile int32_t x834 = 175515;
int64_t x835 = INT64_MIN;
static uint16_t x836 = UINT16_MAX;
int64_t t188 = 467079876601LL;
t188 = (x833^((x834%x835)+x836));
if (t188 != -250470LL) { NG(); } else { ; }
}
void f189(void) {
int64_t x837 = -1LL;
static volatile int8_t x839 = -10;
uint8_t x840 = UINT8_MAX;
int64_t t189 = 86254895LL;
t189 = (x837^((x838%x839)+x840));
if (t189 != -248641LL) { NG(); } else { ; }
}
void f190(void) {
static int32_t x841 = INT32_MAX;
static uint8_t x842 = 29U;
int32_t x843 = INT32_MIN;
uint8_t x844 = UINT8_MAX;
volatile int32_t t190 = 737190479;
t190 = (x841^((x842%x843)+x844));
if (t190 != 2147483363) { NG(); } else { ; }
}
void f191(void) {
uint16_t x849 = 1636U;
int32_t x850 = INT32_MIN;
static uint8_t x852 = 0U;
int32_t t191 = 2000;
t191 = (x849^((x850%x851)+x852));
if (t191 != 1636) { NG(); } else { ; }
}
void f192(void) {
int64_t x853 = -31572LL;
volatile int64_t x854 = -1LL;
uint32_t x855 = 597522U;
volatile int8_t x856 = INT8_MIN;
int64_t t192 = -2848144929LL;
t192 = (x853^((x854%x855)+x856));
if (t192 != 31699LL) { NG(); } else { ; }
}
void f193(void) {
volatile uint16_t x857 = UINT16_MAX;
volatile int16_t x858 = INT16_MIN;
int16_t x859 = -52;
int32_t t193 = -221999;
t193 = (x857^((x858%x859)+x860));
if (t193 != 186793) { NG(); } else { ; }
}
void f194(void) {
uint32_t x861 = 28054U;
int64_t x863 = -94427LL;
static int16_t x864 = 0;
volatile int64_t t194 = -14101472464721888LL;
t194 = (x861^((x862%x863)+x864));
if (t194 != -28055LL) { NG(); } else { ; }
}
void f195(void) {
static uint8_t x870 = 46U;
uint32_t x871 = 8344U;
volatile int32_t x872 = INT32_MIN;
uint32_t t195 = 5173429U;
t195 = (x869^((x870%x871)+x872));
if (t195 != 2147516369U) { NG(); } else { ; }
}
void f196(void) {
int16_t x873 = INT16_MIN;
int8_t x874 = INT8_MIN;
uint64_t x875 = 667033134LLU;
int8_t x876 = INT8_MIN;
t196 = (x873^((x874%x875)+x876));
if (t196 != 18446744073318328320LLU) { NG(); } else { ; }
}
void f197(void) {
volatile int64_t x878 = INT64_MIN;
int16_t x879 = INT16_MAX;
int32_t x880 = -1;
static int64_t t197 = 2970LL;
t197 = (x877^((x878%x879)+x880));
if (t197 != 1111LL) { NG(); } else { ; }
}
void f198(void) {
int8_t x881 = INT8_MIN;
static int8_t x882 = INT8_MIN;
static volatile int32_t x883 = INT32_MAX;
int8_t x884 = INT8_MAX;
t198 = (x881^((x882%x883)+x884));
if (t198 != 127) { NG(); } else { ; }
}
void f199(void) {
uint32_t x885 = 124U;
int16_t x887 = INT16_MAX;
static volatile uint32_t x888 = 2866U;
t199 = (x885^((x886%x887)+x888));
if (t199 != 2825LLU) { NG(); } else { ; }
}
int main(void) {
f0();
f1();
f2();
f3();
f4();
f5();
f6();
f7();
f8();
f9();
f10();
f11();
f12();
f13();
f14();
f15();
f16();
f17();
f18();
f19();
f20();
f21();
f22();
f23();
f24();
f25();
f26();
f27();
f28();
f29();
f30();
f31();
f32();
f33();
f34();
f35();
f36();
f37();
f38();
f39();
f40();
f41();
f42();
f43();
f44();
f45();
f46();
f47();
f48();
f49();
f50();
f51();
f52();
f53();
f54();
f55();
f56();
f57();
f58();
f59();
f60();
f61();
f62();
f63();
f64();
f65();
f66();
f67();
f68();
f69();
f70();
f71();
f72();
f73();
f74();
f75();
f76();
f77();
f78();
f79();
f80();
f81();
f82();
f83();
f84();
f85();
f86();
f87();
f88();
f89();
f90();
f91();
f92();
f93();
f94();
f95();
f96();
f97();
f98();
f99();
f100();
f101();
f102();
f103();
f104();
f105();
f106();
f107();
f108();
f109();
f110();
f111();
f112();
f113();
f114();
f115();
f116();
f117();
f118();
f119();
f120();
f121();
f122();
f123();
f124();
f125();
f126();
f127();
f128();
f129();
f130();
f131();
f132();
f133();
f134();
f135();
f136();
f137();
f138();
f139();
f140();
f141();
f142();
f143();
f144();
f145();
f146();
f147();
f148();
f149();
f150();
f151();
f152();
f153();
f154();
f155();
f156();
f157();
f158();
f159();
f160();
f161();
f162();
f163();
f164();
f165();
f166();
f167();
f168();
f169();
f170();
f171();
f172();
f173();
f174();
f175();
f176();
f177();
f178();
f179();
f180();
f181();
f182();
f183();
f184();
f185();
f186();
f187();
f188();
f189();
f190();
f191();
f192();
f193();
f194();
f195();
f196();
f197();
f198();
f199();
return 0;
}
|
cmri/BC-BSP | src/java/com/chinamobile/bcbsp/sync/GeneralSSController.java | <gh_stars>1-10
/**
* CopyRight by Chinamobile
*
* GeneralSSController.java
*/
package com.chinamobile.bcbsp.sync;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.LogFactory;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
import org.mortbay.log.Log;
import com.chinamobile.bcbsp.BSPConfiguration;
import com.chinamobile.bcbsp.Constants;
import com.chinamobile.bcbsp.bspcontroller.JobInProgressControlInterface;
import com.chinamobile.bcbsp.util.BSPJobID;
/**
* GeneralSSController
*
* GeneralSSController for completing the general SuperStep synchronization
* control. This class is connected to JobInProgress.
*
* @author
* @version
*/
public class GeneralSSController implements Watcher,
GeneralSSControllerInterface {
private static final org.apache.commons.logging.Log LOG = LogFactory.getLog(GeneralSSController.class);
private BSPConfiguration conf;
private JobInProgressControlInterface jip;
private BSPJobID jobId;
private int superStepCounter = 0;
private int faultSuperStepCounter = 0;
private int checkNumBase;
private ZooKeeper zk = null;
private final String zookeeperAddr;
private final String bspZKRoot;
private volatile Integer mutex = 0;
private int stageFlag = 1;
private ZooKeeperRun zkRun = new ZooKeeperRun();
public class ZooKeeperRun extends Thread {
public void startNextSuperStep(SuperStepCommand ssc) throws Exception {
int nextSuperStep = ssc.getNextSuperStepNum();
jip.reportLOG(jobId.toString()
+ "the next superstepnum is : "
+ nextSuperStep);
Stat s = null;
s = zk.exists(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + nextSuperStep, false);
if (s == null) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + nextSuperStep, new byte[0],
Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
jip.reportLOG("The node hash exists"
+ bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + nextSuperStep);
List<String> tmpList = new ArrayList<String>();
Stat tmpStat = null;
tmpList = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ nextSuperStep, false);
for (String e : tmpList) {
tmpStat = zk.exists(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ nextSuperStep + "/" + e, false);
zk.delete(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + nextSuperStep + "/" + e, tmpStat
.getAversion());
}
}
s = zk.exists(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + nextSuperStep, false);
if (s == null) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + nextSuperStep, new byte[0],
Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
List<String> tmpList = new ArrayList<String>();
Stat tmpStat = null;
tmpList = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ nextSuperStep, false);
for (String e : tmpList) {
tmpStat = zk.exists(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ nextSuperStep + "/" + e, false);
zk.delete(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + nextSuperStep + "/" + e, tmpStat
.getAversion());
}
}
zk.create(bspZKRoot + "/" + jobId.toString().substring(17) + "-sc"
+ "/" + superStepCounter + "/" + Constants.COMMAND_NAME,
ssc.toString().getBytes(), Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
jip.reportLOG(jobId.toString() + " command of next is "
+ ssc.toString());
jip.reportLOG(jobId.toString() + " [Write Command Path] "
+ bspZKRoot + "/" + jobId.toString().substring(17) + "-sc"
+ "/" + superStepCounter + "/" + Constants.COMMAND_NAME);
jip.reportLOG(jobId.toString() + " leave the barrier of "
+ superStepCounter);
}
public void stopNextSuperStep(String command) throws Exception {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17) + "-sc"
+ "/" + superStepCounter + "/" + Constants.COMMAND_NAME,
command.getBytes(), Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
jip.reportLOG(jobId.toString() + " command of next is " + command);
jip.reportLOG(jobId.toString() + " prepare to quit");
}
public void cleanReadHistory(int ableCheckPoint) {
List<String> tmpList = new ArrayList<String>();
Stat tmpStat = null;
try {
tmpList = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ ableCheckPoint, false);
for (String e : tmpList) {
tmpStat = zk.exists(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ ableCheckPoint + "/" + e, false);
zk.delete(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + ableCheckPoint + "/" + e, tmpStat
.getAversion());
jip.reportLOG("The node hash exists"
+ bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + ableCheckPoint + "/" + e);
}
} catch (Exception exc) {
jip.reportLOG(jobId.toString() + " [cleanReadHistory]" + exc.getMessage());
}
}
/**
* This is a thread and execute the logic control
*/
public void run() {
Stat s = null;
boolean jobEndFlag = true;
// create the directory for the 0th SuperStep
try {
s = zk.exists(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + superStepCounter, false);
if (s == null) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss" + "/" + superStepCounter, new byte[0],
Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
s = zk.exists(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + superStepCounter, false);
if (s == null) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + superStepCounter, new byte[0],
Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
} catch (Exception e) {
jip.reportLOG(jobId.toString() + " [run]" + e.getMessage());
}
while (jobEndFlag) {
try {
setStageFlag(Constants.SUPERSTEP_STAGE.FIRST_STAGE);
generalSuperStepBarrier(checkNumBase * 2);
setStageFlag(Constants.SUPERSTEP_STAGE.SECOND_STAGE);
SuperStepCommand ssc = getSuperStepCommand(checkNumBase);
switch (ssc.getCommandType()) {
case Constants.COMMAND_TYPE.START:
startNextSuperStep(ssc);
superStepCounter = ssc.getNextSuperStepNum();
jip.setSuperStepCounter(superStepCounter);
break;
case Constants.COMMAND_TYPE.START_AND_CHECKPOINT:
startNextSuperStep(ssc);
generalSuperStepBarrier(checkNumBase * 3);
jip.setAbleCheckPoint(superStepCounter);
LOG.info("ableCheckPoint: " + superStepCounter);
superStepCounter = ssc.getNextSuperStepNum();
jip.setSuperStepCounter(superStepCounter);
break;
case Constants.COMMAND_TYPE.START_AND_RECOVERY:
cleanReadHistory(ssc.getAbleCheckPoint());
startNextSuperStep(ssc);
setCheckNumBase();
superStepCounter = ssc.getAbleCheckPoint();
generalSuperStepBarrier(checkNumBase * 1);
superStepCounter = ssc.getNextSuperStepNum();
jip.setSuperStepCounter(superStepCounter);
break;
case Constants.COMMAND_TYPE.STOP:
stopNextSuperStep(ssc.toString());
jobEndFlag = quitBarrier();
break;
default:
jip.reportLOG(jobId.toString()
+ " Unkonwn command of "
+ ssc.getCommandType());
}
} catch (Exception e) {
jip.reportLOG(jobId.toString() + "error: " + e.toString());
}
}// while(jobEndFlag)
}// run
}
/**
* Generate the GeneralSSController to control the synchronization between
* SuperSteps
*
* @param jobId
*/
@SuppressWarnings("unused")
public GeneralSSController(BSPJobID jobId) {
this.jobId = jobId;
this.conf = new BSPConfiguration();
this.zookeeperAddr = conf.get(Constants.ZOOKEEPER_QUORUM)
+ ":"
+ conf.getInt(Constants.ZOOKEPER_CLIENT_PORT,
Constants.DEFAULT_ZOOKEPER_CLIENT_PORT);
this.bspZKRoot = Constants.BSPJOB_ZOOKEEPER_DIR_ROOT;
// adjust the location. This function must be located there.
// If it is located in run(), may be crashed on ZooKeeper cluster.
setup();
}
@Override
public boolean isCommandBarrier() {
try {
List<String> list = new ArrayList<String>();
list = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ faultSuperStepCounter, false);
jip.reportLOG("[isCommandBarrier] path: " + bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ faultSuperStepCounter);
if (list.size() < checkNumBase + 1) {
jip.reportLOG("[isCommandBarrier] " + list.size() + " instead of " + (checkNumBase + 1));
jip.reportLOG("[isCommandBarrier] " + list.toString());
return false;
} else {
jip.reportLOG("[isCommandBarrier] " + list.size());
return true;
}
} catch (Exception e) {
jip.reportLOG("[isCommandBarrier] " + e.getMessage());
return false;
}
}
@Override
public void setJobInProgressControlInterface(
JobInProgressControlInterface jip) {
this.jip = jip;
this.superStepCounter = jip.getSuperStepCounter();
}
@Override
public void setCheckNumBase() {
this.checkNumBase = jip.getCheckNum();
}
public int getStageFlag() {
return stageFlag;
}
public void setStageFlag(int stageFlag) {
this.stageFlag = stageFlag;
}
/**
* Connect to ZooKeeper cluster and create the root directory for the job
*/
@Override
public void setup() {
try {
this.zk = new ZooKeeper(this.zookeeperAddr, 3000, this);
if (zk != null) {
Stat s = null;
// create the directory for scheduler
s = zk.exists(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-s", false);
if (s == null) {
zk.create(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-s",
new byte[0], Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
// create the directory for load data
s = zk.exists(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-d", false);
if (s == null) {
zk.create(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-d",
new byte[0], Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
// create the directory for SuperStep
s = zk.exists(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-ss", false);
if (s == null) {
zk.create(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-ss",
new byte[0], Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
// create the directory for SuperStep Command
s = zk.exists(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-sc", false);
if (s == null) {
zk.create(this.bspZKRoot + "/"
+ this.jobId.toString().substring(17) + "-sc",
new byte[0], Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
}
} catch (Exception e) {
jip.reportLOG(jobId.toString() + " [setup]" + e.getMessage());
}
}
/**
* Connect to ZooKeeper cluster and delete the directory for the job
*/
@Override
public void cleanup() {
Stat statJob = null;
Stat statStaff = null;
Stat tmpStat = null;
List<String> list = new ArrayList<String>();
List<String> tmpList = new ArrayList<String>();
try {
// cleanup the directory of scheduler
try {
list.clear();
list = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-s", false);
for (String e : list) {
statStaff = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-s" + "/" + e,
false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-s" + "/" + e,
statStaff.getVersion());
}
} catch (Exception e) {
// Undo
} finally {
statJob = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-s", false);
zk.delete(this.bspZKRoot + "/" + jobId.toString().substring(17)
+ "-s", statJob.getVersion());
}
jip.reportLOG(jobId.toString() + "delete the -s");
// cleanup the directory of load data
try {
list.clear();
list = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-d", false);
for (String e : list) {
statStaff = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-d" + "/" + e,
false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-d" + "/" + e,
statStaff.getVersion());
}
} catch (Exception e) {
// Undo
} finally {
statJob = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-d", false);
zk.delete(this.bspZKRoot + "/" + jobId.toString().substring(17)
+ "-d", statJob.getVersion());
}
jip.reportLOG(jobId.toString() + "delete the -d");
// cleanup the directory of SuperStep control
list.clear();
list = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss", false);
for (String e : list) {
try {
tmpList.clear();
tmpList = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/" + e,
false);
for (String ee : tmpList) {
tmpStat = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ e + "/" + ee, false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ e + "/" + ee, tmpStat.getAversion());
}
} catch (Exception exc) {
// Undo
} finally {
statStaff = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/" + e,
false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/" + e,
statStaff.getVersion());
}
}
statJob = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss", false);
zk.delete(this.bspZKRoot + "/" + jobId.toString().substring(17)
+ "-ss", statJob.getVersion());
jip.reportLOG(jobId.toString() + "delete the -ss");
// cleanup the directory of SuperStep command
list.clear();
list = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc", false);
for (String e : list) {
try {
tmpList.clear();
tmpList = zk.getChildren(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/" + e,
false);
for (String ee : tmpList) {
tmpStat = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ e + "/" + ee, false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ e + "/" + ee, tmpStat.getAversion());
}
} catch (Exception exc) {
// Undo
} finally {
statStaff = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/" + e,
false);
zk.delete(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/" + e,
statStaff.getVersion());
}
}
statJob = zk.exists(this.bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc", false);
zk.delete(this.bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc", statJob.getVersion());
jip.reportLOG(jobId.toString() + "delete the -sc");
} catch (KeeperException e) {
jip.reportLOG(jobId.toString() + "delet error: " + e.toString());
} catch (InterruptedException e) {;
jip.reportLOG(jobId.toString() + "delet error: " + e.toString());
}
}
@Override
public void start() {
this.zkRun.start();
}
@Override
@SuppressWarnings("deprecation")
public void stop() {
this.zkRun.stop();
}
@Override
public boolean generalSuperStepBarrier(int checkNum) {
List<String> list = new ArrayList<String>();
try {
// make sure that all staffs complete the computation and
// receiving-messages
jip.reportLOG(jobId.toString() + " enter the barrier of "
+ superStepCounter);
while (true) {
synchronized (mutex) {
list.clear();
list = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ superStepCounter, true);
if (list.size() < checkNum) {
mutex.wait();
} else {
break;
}
}
}// while(true)
return true;
} catch (KeeperException e) {
jip.reportLOG(jobId.toString() + "error: " + e.toString());
return false;
} catch (InterruptedException e) {
jip.reportLOG(jobId.toString() + "error: " + e.toString());
return false;
}
}
@Override
public SuperStepCommand getSuperStepCommand(int checkNum) {
Stat s = null;
List<String> list = new ArrayList<String>();
try {
// make sure that all staffs have reported the info
while (true) {
synchronized (mutex) {
list.clear();
list = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ superStepCounter, true);
if (list.size() < checkNum) {
jip.reportLOG("[getSuperStepCommand]: " + list.size() + " instead of " + checkNum);
mutex.wait();
} else {
jip.reportLOG("[getSuperStepCommand]: " + list.size());
break;
}
}
}// while(true)
// give the command to all staffs according to the report info
SuperStepReportContainer[] ssrcs = new SuperStepReportContainer[checkNumBase];
int counter = 0;
for (String e : list) {
s = zk.exists(bspZKRoot + "/" + jobId.toString().substring(17)
+ "-sc" + "/" + superStepCounter + "/" + e, false);
byte[] b = zk.getData(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-sc" + "/"
+ superStepCounter + "/" + e, false, s);
ssrcs[counter++] = new SuperStepReportContainer(new String(b));
}
SuperStepCommand ssc = jip.generateCommand(ssrcs);
return ssc;
} catch (KeeperException e) {
e.printStackTrace();
jip.reportLOG(jobId.toString() + "error: " + e.toString());
return null;
} catch (InterruptedException e) {
e.printStackTrace();
jip.reportLOG(jobId.toString() + "error: " + e.toString());
return null;
}
}
@SuppressWarnings("finally")
@Override
public boolean quitBarrier() {
List<String> list = new ArrayList<String>();
try {
while (true) {
synchronized (mutex) {
list.clear();
list = zk.getChildren(bspZKRoot + "/"
+ jobId.toString().substring(17) + "-ss" + "/"
+ superStepCounter, true);
if (list.size() > 0) {
mutex.wait();
} else {
break;
}
}
}// while(true)
} catch (KeeperException e) {
e.printStackTrace();
jip.reportLOG(jobId.toString() + "error: " + e.toString());
} catch (InterruptedException e) {
e.printStackTrace();
jip.reportLOG(jobId.toString() + "error: " + e.toString());
} finally {
jip.completedJob();
return false;
}
}
@Override
public void process(WatchedEvent event) {
synchronized (mutex) {
mutex.notify();
}
}
@Override
public void recoveryBarrier(List<String> WMNames) {
Log.info("recoveryBarrier: this.superStepCounter " + superStepCounter);
faultSuperStepCounter = superStepCounter;
int base = WMNames.size();
switch (this.stageFlag) {
case Constants.SUPERSTEP_STAGE.FIRST_STAGE :
try{
jip.reportLOG("recoveried: " + this.jobId.toString()
+ " enter the firstStageSuperStepBarrier of " + Integer.toString(superStepCounter));
for(int i=0; i<base*2; i++) {// WMNames.get(0)
zk.create(bspZKRoot + "/" + jobId.toString().substring(17) + "-ss"
+ "/" + Integer.toString(superStepCounter) + "/"
+ WMNames.get(0) + "-recovery" + i, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);//slave2-recovery0
Log.info("first--recoveryBarrier: " + "recovery" + i);
}
jip.reportLOG("recoveried: " + this.jobId.toString()
+ " enter the secondStageSuperStepBarrier(first) of " + Integer.toString(superStepCounter));
for(int i=0; i<base; i++) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17) + "-sc"
+ "/" + Integer.toString(superStepCounter) + "/" + WMNames.get(i) + "-recovery" + i,
"RECOVERY".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
Log.info("second-(first)--recoveryBarrier: " + "recovery" + i);
}
}catch(KeeperException e){
e.printStackTrace();
}catch(InterruptedException e){
e.printStackTrace();
}
break;
case Constants.SUPERSTEP_STAGE.SECOND_STAGE ://int 4
try{
jip.reportLOG("recoveried " + this.jobId.toString()
+ " enter the secondStageSuperStepBarrier(second) of superStepCounter: " + Integer.toString(superStepCounter));
for(int i=0; i<base; i++) {
zk.create(bspZKRoot + "/" + jobId.toString().substring(17) + "-sc"
+ "/" + Integer.toString(superStepCounter) + "/" + WMNames.get(i) + "-recovery" + i,
"RECOVERY".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
Log.info("second--recoveryBarrier: " + "recovery" + i);
}
}catch(KeeperException e){
e.printStackTrace();
}catch(InterruptedException e){
e.printStackTrace();
}
break;
default :
jip.reportLOG(jobId.toString() + " Unkonwn command of " );
}//switch
}
}
|
Xzg-github/web_service | src/api/signature/account_management/enterprise_account_management/config.js | import {pageSize, pageSizeType, description, searchConfig} from '../../../gloablConfig';
const person = '/api/signature/account_management/enterprise_account_management/dropPerson';
const oneControls = [
{key:'dgyhzh',title:'对公银行账号',type:'text',required:true, btn:{title:"获取验证码",key:'code'}},
{key:'dgyhmc',title:'对公银行名称',type:'text',required:true},
{key:'dgyykhzh',title:'对公开户支行',type:'text',required:true,props:{
placeholder:'请填写完整的支行名称'
}},
];
const LABELS = [
{key: 'account', title: '企业账号'},
{key: 'companyOrder', title: '企业编号'},
{key: 'companyName', title: '企业名称'},
{key: 'notifyEmail', title: '电子邮件',type:'edit'},
{key: 'managerPersonName', title: '管理人姓名'},
{key: 'notifyPhone', title: '手机号码',type:'edit'},
];
const two_buttons = [
{key:'add',title:'新增',bsStyle:'primary'},
{key:'edit',title:'编辑'},
{key:'del',title:'删除'},
];
const two_controls = [
{key:'signSealName',title:'签章名称',type:'text',required:true},
];
const three_buttons = [
{key:'add',title:'新增',bsStyle:'primary'},
{key:'edit',title:'编辑'},
{key:'del',title:'删除'},
{key:'enable',title:'启用'},
{key:'disable',title:'禁用'},
];
const authOptions = [
{value:0,title:'禁用'},
{value:1,title:'启用'},
];
const three_tableCols = [
{key:'authStatus',title:'状态',options:authOptions},
{key:'userAccountId',title:'真实姓名'},
{key:'account',title:'账号'},
{key:'signSealId',title:'授权签章名称'},
{key:'updateUser',title:'操作人'},
{key:'updateTime',title:'操作时间'},
{key:'insertUser',title:'创建人'},
{key:'insertTime',title:'创建时间'},
];
const three_controls = [
{key:'userAccountId',title:'请选择被授权人',type:'search',searchUrl:person,required:true},
{key:'account',title:'授权人账号',type:'readonly'},
{key:'signSealId',title:'授权签章',type:'select',required:true},
];
const statusOptions = [
{value:0,title:'未支付'},
{value:1,title:'已支付'}
];
const invoiceOptions = [
{value:0,title:'未支付'},
{value:1,title:'已支付成功'},
];
const payOptions = [
{value:1,title:'微信'},
{value:2,title:'支付宝'},
{value:3,title:'网银'}
];
const four_tableCols = [
{key:'orderStatus',title:'订单状态',options:statusOptions},
{key:'nativeOrderNo',title:'订单编号'},
{key:'orderMoney',title:'订购金额'},
{key:'insertTime',title:'订购时间'},
{key:'payWay',title:'支付方式',options:payOptions},
{key:'outerOrderNo',title:'付款流水号'},
{key:'payTime',title:'付款时间'},
{key:'payDescription',title:'付款备注'},
{key:'invoiceStatus',title:'发票状态',options:invoiceOptions},
{key:'usedMoney',title:'已用金额'},
{key:'leftMoney',title:'剩余金额'},
];
const four_controls = [
{key:'orderMoney',title:'订购金额',type:'number',required:true},
];
const four_buttons = [
{key:'order',title:'订购',bsStyle:'primary'},
// {key:'pay',title:'支付订单'},
{key:'look',title:'查看消费记录'},
];
const four_filters = [
{key:'nativeOrderNo',title:'订单编号',type:'text'},
{key:'orderStatus',title:'订单状态',type:'select',options:statusOptions},
{key:'orderTimeFrom',title:'订购时间',type:'date',props:{showTime:true}},
{key:'orderTimeTo',title:'至',type:'date',props:{showTime:true}},
];
const unitOptions = [
{value:'second',title:'次'},
{value:'strip',title:'条'},
];
const effectOptions = [
{value:1,title:'长期有效'}
];
const four_cols = [
{key:'ruleName',title:'套餐名称'},
{key:'businessItemId',title:'业务项目'},
{key:'price',title:'价格(元)'},
{key:'unitType',title:'单位',options:unitOptions},
{key:'effectiveType',title:'有效期',options:effectOptions},
];
const four_pay_cols = [
{key:'unitPrice',title:'文件签署单价(元/份)'},
{key:'number',title:'数量(份)'},
{key:'orderMoney',title:'订购金额(元)'},
];
const payOption = [
{value:'zhifubao',title:'支付宝'},
{value:'weixin',title:'微信'},
{value:'dgzz',title:'对公转账'},
];
const four_pay_controls = [
{key:'b',title:'请选择支付方式',type:'radioGroup',options:payOption,required:true},
];
const four_input_controls = [
{key:'a',title:'付款流水号',type:'text',required:true},
{key:'b',title:'付款时间',type:'date'},
{key:'c',title:'支付备注',type:'textArea'},
];
const four_look_controls = [
{key:'nativeOrderNo',title:'订单编号',type:"readonly"},
{key:'orderTime',title:'订购日期',type:"readonly"},
{key:'orderMoney',title:'订购金额',type:"readonly"},
{key:'consumerTotalAmount',title:'消费总额',type:"readonly"},
];
const four_look_tableCols1 = [
{key:'itemName',title:'业务项目'},
{key:'totalNum',title:'数量合计'},
{key:'totalAmount',title:'金额合计'},
];
const four_look_tableCols2 = [
{key:'itemName',title:'业务项目'},
{key:'consumerTime',title:'时间'},
{key:'predictPrice',title:'价格'},
{key:'unitType',title:'单位',options:unitOptions},
{key:'price',title:'实际扣费'},
{key:'fileNo',title:'关联系统编号'},
{key:'executor',title:'发起人'},
];
const optionApp = [
{title:'增值税专用发票',value:'1'},
{title:'普通发票',value:'2'},
];
const four_application_controls_title = [
{key:'a',title:'请选择开票类型',type:'radioGroup',options:optionApp,required:true},
];
const four_application_controls1 = [
{key:'b',title:'发票抬头',type:'text',required:true},
{key:'c',title:'纳税人识别号',type:'text',required:true},
{key:'d',title:'对公账号开户银行',type:'text',required:true},
{key:'e',title:'对公银行账号',type:'text',required:true},
{key:'f',title:'公司电话',type:'text',required:true},
{key:'g',title:'公司地址',type:'text',required:true},
];
const four_application_controls2 = [
{key:'b',title:'发票抬头',type:'text',required:true},
{key:'c',title:'纳税人识别号',type:'text',required:true},
];
const four_application_controls3 = [
{key:'j',title:'收件人',type:'text',required:true},
{key:'h',title:'收件人手机号',type:'text',required:true},
{key:'o',title:'省',type:'select',required:true},
{key:'x',title:'市',type:'select',required:true},
{key:'k',title:'区/县',type:'select',required:true},
{key:'y',title:'门牌号',type:'textArea',required:true},
];
const config = {
one:{
controls:oneControls,
LABELS,
checkItems:[
{key: 'isNotifiedByEmail', title: '邮件通知'},
{key: 'isNotifiedByPhone', title: '短信通知'},
],
diaLogOne:{
title:'修改密码',
controls:[
{key: 'oldPassword', title: '旧密码',type:'text',required:true},
{key: 'newPassword', title: '新密码',type:'text',required:true},
{key: 'codeType', title: '验证码通知方式',type:'select',options:[{value:'0',title:'邮件'},{value:'1',title:'短信'}],required:true},
{key: 'code', title: '验证码',type:'text',required:true, btn:{title:"获取验证码",key:'obtain'}},
]
},
diaLogTwo:{
title:'修改手机号码',
controls:[
{key: 'a', title: '请输入旧手机验证码',type:'text',required:true, btn:{title:"获取验证码",key:'obtain'}},
{key: 'b', title: '请输入新手机号码',type:'text',required:true},
{key: 'c', title: '请输入新手机号码收到的验证码',type:'text',required:true, btn:{title:"获取验证码",key:'obtain'}},
]
},
diaLogThree:{
title:'修改密码',
controls:[
{key: 'a', title: '旧密码',type:'text',required:true},
{key: 'b', title: '新密码',type:'text',required:true},
{key: 'c', title: '验证码通知方式',type:'select',options:[{value:'1',title:'邮件'},{value:'2',title:'短信'}],required:true},
{key: 'd', title: '验证码',type:'text',required:true, btn:{title:"获取验证码",key:'obtain'}},
]
}
},
two:{
buttons:two_buttons,
edit:{
controls:two_controls
}
},
three:{
tableCols:three_tableCols,
controls:three_controls,
buttons:three_buttons,
pageSize,
pageSizeType,
description,
searchConfig,
filters:[],
searchData:{},
tableItems:[]
},
four:{
tableCols:four_tableCols,
order: {
controls:four_controls,
cols:four_cols
},
pay: {
controls:four_pay_controls,
cols:four_pay_cols
},
input:{
controls:four_input_controls,
title:'录入付款',
width:350
},
look:{
controls:four_look_controls,
cols1:four_look_tableCols1,
cols2:four_look_tableCols2,
},
application:{
controls:{
one:four_application_controls_title,
two_1:four_application_controls1,
two_2:four_application_controls2,
three:four_application_controls3,
}
},
buttons:four_buttons,
filters:four_filters,
pageSize,
pageSizeType,
description,
searchConfig,
searchData:{},
tableItems:[]
},
tabs:[
{key: 'one', title:'账号设置', close: false},
{key: 'two', title:'签章管理', close: false},
{key: 'three',title:'授权管理', close: false},
{key: 'four', title:'订单管理', close: false},
]
};
export default config;
|
andela/ah-backend-prime | authors/apps/social_auth/twitter_auth.py | <reponame>andela/ah-backend-prime
import twitter
from authors.settings import TWITTER_CONSUMER_API_KEY, TWITTER_CONSUMER_API_SECRET
class TwitterAuthHandler:
'''This Class handles twitter token decoding and verification'''
@staticmethod
def validate_twitter_auth_tokens(tokens):
'''This function splits the validation tokens into the api_key and api_secret,
decodes all the tokens(with consumer api and consumer secret) into the data
required from the user and then returns the data in dictionary format
'''
if len(tokens.split(" ")) != 2:
return "Invalid. Please provide two tokens!"
access_token_key = tokens.split(" ")[0]
access_token_secret = tokens.split(" ")[1]
try:
consumer_api_key = TWITTER_CONSUMER_API_KEY
consumer_api_secret = TWITTER_CONSUMER_API_SECRET
api = twitter.Api(
consumer_key=consumer_api_key,
consumer_secret=consumer_api_secret,
access_token_key=access_token_key,
access_token_secret=access_token_secret
)
user_profile_info = api.VerifyCredentials(include_email=True)
return user_profile_info.__dict__
except twitter.error.TwitterError:
return 'Please provide valid access tokens'
|
Ptival/semtk | sparqlGraphLibrary/src/main/java/com/ge/research/semtk/edcquerygen/client/EdcQueryGenClient.java | <gh_stars>10-100
/**
** Copyright 2016-2020 General Electric Company
**
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
package com.ge.research.semtk.edcquerygen.client;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import com.ge.research.semtk.querygen.client.QueryGenClient;
import com.ge.research.semtk.resultSet.Table;
import com.ge.research.semtk.resultSet.TableResultSet;
import com.ge.research.semtk.services.client.RestClientConfig;
import com.ge.research.semtk.sparqlX.dispatch.QueryFlags;
/**
* Client for the EdcQueryGenerationService
*/
public class EdcQueryGenClient extends QueryGenClient {
/**
* Constructor
*/
public EdcQueryGenClient(RestClientConfig conf){
this.conf = conf;
}
/**
* Create a params JSON object
*/
@Override
public void buildParametersJSON() throws Exception {
// nothing to do here
}
/**
* Generate queries
*/
@SuppressWarnings("unchecked")
public TableResultSet execute(Table locationAndValueInfoTable, JSONObject runtimeConstraints, QueryFlags flags) throws Exception {
parametersJSON.put("locationAndValueInfoTableJsonStr", locationAndValueInfoTable.toJson().toString());
if(runtimeConstraints != null){
parametersJSON.put("constraintsJsonStr", runtimeConstraints.toJSONString());
}
if(flags != null && !flags.isEmpty()){
parametersJSON.put("flagsJsonArrayStr", flags.toJSONString());
}
// execute
JSONObject resultJSON = (JSONObject)super.execute();
// create TableResultSet to return
TableResultSet retval = new TableResultSet();
retval.readJson(resultJSON);
return retval;
}
}
|
ljcservice/autumn | src/main/java/com/hitzd/his/RowMapperBeans/LabTestItemsMapper.java | <reponame>ljcservice/autumn
package com.hitzd.his.RowMapperBeans;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.springframework.jdbc.core.RowMapper;
import com.hitzd.his.Beans.TLabTestItems;
/**
* 检验项目
* @author Administrator
*
*/
public class LabTestItemsMapper implements RowMapper
{
@Override
public Object mapRow(ResultSet rs, int rowNum) throws SQLException
{
TLabTestItems lti = new TLabTestItems();
lti.setTEST_NO(rs.getString("TEST_NO"));
lti.setITEM_NO(rs.getString("ITEM_NO"));
lti.setITEM_NAME(rs.getString("ITEM_NAME"));
lti.setITEM_CODE(rs.getString("ITEM_CODE"));
return lti;
}
}
|
masud-technope/ACER-Replication-Package-ASE2017 | corpus/class/ecf/1030.java | /****************************************************************************
* Copyright (c) 2004 Composent, Inc. and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Composent, Inc. - initial API and implementation
*****************************************************************************/
package org.eclipse.ecf.ui.dialogs;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.*;
import org.eclipse.ecf.internal.ui.Activator;
import org.eclipse.ecf.internal.ui.wizards.IWizardRegistryConstants;
import org.eclipse.ecf.ui.ContainerConfigurationResult;
import org.eclipse.ecf.ui.IConnectWizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IWorkbench;
/**
* Dialog to show container connect wizard.
*/
public class ContainerConnectWizardDialog extends WizardDialog {
public ContainerConnectWizardDialog(Shell parentShell, IWorkbench workbench, ContainerConfigurationResult containerHolder) throws CoreException {
super(parentShell, getWizard(workbench, containerHolder));
}
protected static IConnectWizard getWizard(IWorkbench workbench, ContainerConfigurationResult containerHolder) throws CoreException {
IConnectWizard connectWizard = null;
IConfigurationElement ce = findConnectWizardConfigurationElements(containerHolder)[0];
connectWizard = (IConnectWizard) ce.createExecutableExtension(IWizardRegistryConstants.ATT_CLASS);
connectWizard.init(workbench, containerHolder.getContainer());
return connectWizard;
}
protected static IConfigurationElement[] findConnectWizardConfigurationElements(ContainerConfigurationResult containerHolder) {
List result = new ArrayList();
IExtensionRegistry reg = Activator.getDefault().getExtensionRegistry();
if (reg != null) {
IExtensionPoint extensionPoint = reg.getExtensionPoint(IWizardRegistryConstants.CONNECT_EPOINT_ID);
if (extensionPoint == null) {
return null;
}
IConfigurationElement[] ce = extensionPoint.getConfigurationElements();
for (int i = 0; i < ce.length; i++) {
String value = ce[i].getAttribute(IWizardRegistryConstants.ATT_CONTAINER_TYPE_NAME);
if (value != null && value.equals(containerHolder.getContainerTypeDescription().getName()))
result.add(ce[i]);
}
return (IConfigurationElement[]) result.toArray(new IConfigurationElement[] {});
}
return new IConfigurationElement[0];
}
public boolean hasConnectWizard(ContainerConfigurationResult containerHolder) {
return (findConnectWizardConfigurationElements(containerHolder).length > 0);
}
}
|
jorgefilipecosta/g2 | packages/components/src/Checkbox/Checkbox.styles.js | <gh_stars>100-1000
import { css, styled, ui } from '@wp-g2/styles';
export const CheckboxWrapper = css`
${ui.alignment.content.center};
display: inline-flex;
height: ${ui.get('controlHeight')};
position: relative;
vertical-align: middle;
`;
export const CheckboxWrapperView = styled.div`
${CheckboxWrapper};
`;
export const Checkbox = css`
${ui.border.control.subtle};
${ui.borderRadius.round};
appearance: none;
box-shadow: ${ui.get('checkboxBoxShadow')};
cursor: pointer;
display: block;
height: ${ui.get('checkboxSize')};
line-height: 0;
margin: 0;
min-height: ${ui.get('checkboxSize')};
min-width: ${ui.get('checkboxSize')};
outline: none;
padding: 0;
transition: background ${ui.get('transitionDurationFastest')} linear;
width: ${ui.get('checkboxSize')};
&::before,
&::after {
display: none;
}
&:focus {
${ui.border.control.focus};
}
&:checked {
${ui.background.admin};
${ui.border.control.focus};
}
&:disabled {
${ui.opacity.muted};
}
`;
export const CheckboxIcon = css`
${ui.font.color.white};
${ui.alignment.content.center};
bottom: 0;
left: 0;
opacity: 0;
pointer-events: none;
position: absolute;
right: 0;
top: 0;
transition: opacity ${ui.get('transitionDurationFastest')} linear;
input:checked + & {
opacity: 1;
}
`;
export const CheckboxIconView = styled.div`
${CheckboxIcon};
`;
|
zsoltmester/bs4-finals | framework/src/main/java/hu/bsmart/framework/gcm/pushmessage/TaskRevokedPushMessage.java | <reponame>zsoltmester/bs4-finals
package hu.bsmart.framework.gcm.pushmessage;
import android.content.Context;
import android.support.annotation.NonNull;
import hu.bsmart.framework.R;
import hu.bsmart.framework.communication.annotations.Required;
import static hu.bsmart.framework.communication.data.Constants.UNSET_INTEGER;
/**
* Push message indicating that a task has been revoked
*/
public class TaskRevokedPushMessage extends PushMessage {
@Required
private String comment;
@Required
private int gameSessionId = UNSET_INTEGER;
private int[] playerIds = null;
@Required
private int[] taskInstanceIds = null;
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public int getGameSessionId() {
return gameSessionId;
}
public void setGameSessionId(int gameSessionId) {
this.gameSessionId = gameSessionId;
}
public int[] getPlayerIds() {
return playerIds;
}
public void setPlayerIds(int[] playerIds) {
this.playerIds = playerIds;
}
public int[] getTaskInstanceIds() {
return taskInstanceIds;
}
public void setTaskInstanceIds(int[] taskInstanceIds) {
this.taskInstanceIds = taskInstanceIds;
}
@Override
public PushMessageType getPushType() {
return PushMessageType.TASK_REVOKED;
}
@NonNull
@Override
public String getEventSummary(Context context) {
return context.getString(R.string.task_revoked_push_message_summary);
}
@NonNull
@Override
public String getEventDetails(Context context) {
int resource = taskInstanceIds.length == 1 ? R.string.task_revoked_push_message_single_details :
R.string.task_revoked_push_message_multi_details;
return context.getString(resource, comment);
}
@Override
public void validate() throws ValidationException {
if (comment == null) {
throw ValidationException.missingField(this, "comment");
}
if (gameSessionId == UNSET_INTEGER) {
throw ValidationException.missingField(this, "gameSessionId");
}
if (taskInstanceIds == null) {
throw ValidationException.missingField(this, "taskInstanceIds");
}
if (taskInstanceIds.length == 0) {
throw new ValidationException("No revoked taskInstanceId in the revoke push message!");
}
}
}
|
dhtech/fboss | fboss/agent/Main.cpp | /*
* Copyright (c) 2004-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#include "fboss/agent/Main.h"
#include <folly/MacAddress.h>
#include <folly/ScopeGuard.h>
#include <folly/SocketAddress.h>
#include <folly/String.h>
#include <folly/experimental/FunctionScheduler.h>
#include <folly/io/async/AsyncSignalHandler.h>
#include <folly/io/async/AsyncTimeout.h>
#include <folly/io/async/EventBase.h>
#include <folly/logging/Init.h>
#include <folly/logging/xlog.h>
#include <thrift/lib/cpp2/server/ThriftServer.h>
#include "common/stats/ServiceData.h"
#include "fboss/agent/ApplyThriftConfig.h"
#include "fboss/agent/HwSwitch.h"
#include "fboss/agent/Platform.h"
#include "fboss/agent/SwSwitch.h"
#include "fboss/agent/SwitchStats.h"
#include "fboss/agent/ThriftHandler.h"
#include "fboss/agent/TunManager.h"
#include <chrono>
#include <condition_variable>
#include <csignal>
#include <cstdio>
#include <future>
#include <functional>
#include <string>
#include <mutex>
#include <gflags/gflags.h>
using folly::FunctionScheduler;
using apache::thrift::ThriftServer;
using folly::AsyncSignalHandler;
using folly::AsyncTimeout;
using folly::EventBase;
using folly::SocketAddress;
using std::shared_ptr;
using std::unique_ptr;
using std::mutex;
using std::chrono::seconds;
using std::condition_variable;
using std::string;
using namespace std::chrono;
DEFINE_int32(port, 5909, "The thrift server port");
DEFINE_int32(stat_publish_interval_ms, 1000,
"How frequently to publish thread-local stats back to the "
"global store. This should generally be less than 1 second.");
DEFINE_int32(thrift_idle_timeout, 60, "Thrift idle timeout in seconds.");
// Programming 16K routes can take 20+ seconds
DEFINE_int32(thrift_task_expire_timeout, 30,
"Thrift task expire timeout in seconds.");
DEFINE_bool(tun_intf, true,
"Create tun interfaces to allow other processes to "
"send and receive traffic via the switch ports");
DEFINE_bool(enable_lacp, false, "Run LACP in agent");
DEFINE_bool(enable_lldp, true,
"Run LLDP protocol in agent");
DEFINE_bool(publish_boot_type, true,
"Publish boot type on startup");
DEFINE_bool(enable_nhops_prober, true,
"Enables prober for unresolved next hops");
DEFINE_int32(flush_warmboot_cache_secs, 60,
"Seconds to wait before flushing warm boot cache");
using facebook::fboss::SwSwitch;
using facebook::fboss::ThriftHandler;
namespace {
/*
* This function is executed periodically by the UpdateStats thread.
* It calls the hardware-specific function of the same name.
*/
void updateStats(SwSwitch *swSwitch) {
swSwitch->updateStats();
}
}
FOLLY_INIT_LOGGING_CONFIG("fboss=DBG2; default:async=true");
namespace facebook { namespace fboss {
class Initializer {
public:
Initializer(SwSwitch* sw, Platform* platform)
: sw_(sw),
platform_(platform) {}
void start() {
std::thread t(&Initializer::initThread, this);
t.detach();
}
void stopFunctionScheduler() {
std::unique_lock<mutex> lk(initLock_);
initCondition_.wait(lk, [&] { return sw_->isFullyInitialized();});
if (fs_) {
fs_->shutdown();
}
}
private:
void initThread() {
try {
initImpl();
} catch (const std::exception& ex) {
XLOG(FATAL) << "switch initialization failed: "
<< folly::exceptionStr(ex);
}
}
SwitchFlags setupFlags() {
SwitchFlags flags = SwitchFlags::DEFAULT;
if (FLAGS_enable_lacp) {
flags |= SwitchFlags::ENABLE_LACP;
}
if (FLAGS_tun_intf) {
flags |= SwitchFlags::ENABLE_TUN;
}
if (FLAGS_enable_lldp) {
flags |= SwitchFlags::ENABLE_LLDP;
}
if (FLAGS_publish_boot_type) {
flags |= SwitchFlags::PUBLISH_STATS;
}
if (FLAGS_enable_nhops_prober) {
flags |= SwitchFlags::ENABLE_NHOPS_PROBER;
}
return flags;
}
void initImpl() {
auto startTime = steady_clock::now();
std::lock_guard<mutex> g(initLock_);
// Determining the local MAC address can also take a few seconds the first
// time it is called, so perform this operation asynchronously, in parallel
// with the switch initialization.
auto ret = std::async(std::launch::async,
&Platform::getLocalMac, platform_);
// Initialize the switch. This operation can take close to a minute
// on some of our current platforms.
sw_->init(nullptr, setupFlags());
// Wait for the local MAC address to be available.
ret.wait();
auto localMac = ret.get();
XLOG(INFO) << "local MAC is " << localMac;
sw_->applyConfig("apply initial config");
// Enable route update logging for all routes so that when we are told
// the first set of routes after a warm boot, we can log any changes
// from what was programmed before the warm boot.
// e.g. any routes that were removed while the agent was restarting
if (sw_->getBootType() == BootType::WARM_BOOT) {
sw_->logRouteUpdates("::", 0, "fboss-agent-warmboot");
sw_->logRouteUpdates("0.0.0.0", 0, "fboss-agent-warmboot");
}
sw_->initialConfigApplied(startTime);
// Start the UpdateSwitchStatsThread
fs_ = new FunctionScheduler();
fs_->setThreadName("UpdateStatsThread");
std::function<void()> callback(std::bind(updateStats, sw_));
auto timeInterval = std::chrono::seconds(1);
const string& nameID = "updateStats";
fs_->addFunction(callback, timeInterval, nameID);
// Schedule function to signal to SwSwitch that all
// initial programming is now complete. We typically
// do that at the end of syncFib call from BGP but
// in case that does not arrive for 30 secs after
// applying config use the below function.
const string flushWarmboot = "flushWarmBoot";
auto flushWarmbootFunc = [=]() {
sw_->clearWarmBootCache();
fs_->cancelFunction(flushWarmboot);
};
// Call flushWarmBootFunc 30 seconds after applying config
fs_->addFunction(flushWarmbootFunc, seconds(1), flushWarmboot,
seconds(FLAGS_flush_warmboot_cache_secs)/*initial delay*/);
fs_->start();
XLOG(INFO) << "Started background thread: UpdateStatsThread";
initCondition_.notify_all();
}
SwSwitch *sw_;
Platform *platform_;
FunctionScheduler *fs_;
mutex initLock_;
condition_variable initCondition_;
};
class StatsPublisher : public AsyncTimeout {
public:
StatsPublisher(EventBase* eventBase, SwSwitch* sw,
std::chrono::milliseconds interval)
: AsyncTimeout(eventBase),
sw_(sw),
interval_(interval) {}
void start() {
scheduleTimeout(interval_);
}
void timeoutExpired() noexcept override {
sw_->publishStats();
scheduleTimeout(interval_);
}
private:
SwSwitch* sw_{nullptr};
std::chrono::milliseconds interval_;
};
/*
*/
class SignalHandler : public AsyncSignalHandler {
typedef std::function<void()> StopServices;
public:
SignalHandler(EventBase* eventBase, SwSwitch* sw,
StopServices stopServices) :
AsyncSignalHandler(eventBase), sw_(sw), stopServices_(stopServices) {
registerSignalHandler(SIGINT);
registerSignalHandler(SIGTERM);
}
void signalReceived(int /*signum*/) noexcept override {
XLOG(INFO) << "[Exit] Signal received ";
steady_clock::time_point begin = steady_clock::now();
stopServices_();
steady_clock::time_point servicesStopped = steady_clock::now();
XLOG(INFO)
<< "[Exit] Services stop time "
<< duration_cast<duration<float>>(servicesStopped - begin).count();
sw_->gracefulExit();
steady_clock::time_point switchGracefulExit = steady_clock::now();
XLOG(INFO)
<< "[Exit] Switch Graceful Exit time "
<< duration_cast<duration<float>>(switchGracefulExit - servicesStopped)
.count()
<< std::endl
<< "[Exit] Total graceful Exit time "
<< duration_cast<duration<float>>(switchGracefulExit - begin).count();
exit(0);
}
private:
SwSwitch* sw_;
StopServices stopServices_;
};
int fbossMain(int argc, char** argv, PlatformInitFn initPlatform) {
fbossInit(argc, argv);
// Internally we use a modified version of gflags that only shows VLOG
// messages if --minloglevel is set to 0. We pretty much always want to see
// VLOG messages, so set minloglevel to 0 by default, unless overridden on
// the command line.
gflags::SetCommandLineOptionWithMode(
"minloglevel", "0", gflags::SET_FLAGS_DEFAULT);
// Allow the fb303 setOption() call to update the command line flag
// settings. This allows us to change the log levels on the fly using
// setOption().
fbData->setUseOptionsAsFlags(true);
// Redirect stdin to /dev/null. This is really a extra precaution
// we already disallow access to linux shell as a result of
// executing thrift calls. Redirecting to /dev/null is done so that
// if somehow a client did manage to get into the shell, the shell
// would read EOF immediately and exit.
freopen("/dev/null", "r", stdin);
// Now that we have parsed the command line flags, create the Platform object
unique_ptr<Platform> platform = initPlatform();
// Create the SwSwitch and thrift handler
SwSwitch sw(std::move(platform));
auto platformPtr = sw.getPlatform();
auto handler =
std::shared_ptr<ThriftHandler>(platformPtr->createHandler(&sw));
EventBase eventBase;
// Start the thrift server
ThriftServer server;
server.setTaskExpireTime(std::chrono::milliseconds(
FLAGS_thrift_task_expire_timeout * 1000));
server.getEventBaseManager()->setEventBase(&eventBase, false);
server.setInterface(handler);
server.setDuplex(true);
handler->setEventBaseManager(server.getEventBaseManager());
// When a thrift connection closes, we need to clean up the associated
// callbacks.
server.setServerEventHandler(handler);
SocketAddress address;
address.setFromLocalPort(FLAGS_port);
server.setAddress(address);
server.setIdleTimeout(std::chrono::seconds(FLAGS_thrift_idle_timeout));
handler->setIdleTimeout(FLAGS_thrift_idle_timeout);
server.setup();
// Create an Initializer to initialize the switch in a background thread.
Initializer init(&sw, platformPtr);
// At this point, we are guaranteed no other agent process will initialize the
// ASIC because such a process would have crashed attempting to bind to the
// Thrift port 5909
init.start();
// Create a timeout to call sw->publishStats() once every second.
StatsPublisher statsPublisher(
&eventBase, &sw,
std::chrono::milliseconds(FLAGS_stat_publish_interval_ms));
statsPublisher.start();
auto stopServices = [&]() {
statsPublisher.cancelTimeout();
init.stopFunctionScheduler();
fbossFinalize();
};
SignalHandler signalHandler(&eventBase, &sw, stopServices);
SCOPE_EXIT { server.cleanUp(); };
XLOG(INFO) << "serving on localhost on port " << FLAGS_port;
// Run the EventBase main loop
eventBase.loopForever();
return 0;
}
}} // facebook::fboss
|
esasiela/hc-swarm | src/main/java/com/hedgecourt/swarm/impl2/UpDownSwarm.java | package com.hedgecourt.swarm.impl2;
import com.hedgecourt.swarm.Position;
import com.hedgecourt.swarm.Speck;
import com.hedgecourt.swarm.Swarm;
import com.hedgecourt.swarm.Velocity;
public class UpDownSwarm extends AbstractSwarm implements Swarm {
public void initializeSwarm() {
int maxY = 150;
int minY = 50;
int y = minY;
int vel = 5;
for (int x = 5; x < this.getFieldDimensionX(); x += 5) {
this.specks.add(new Speck(new Position(x, y), new Velocity(0, vel)));
y += vel;
if (y == maxY || y == minY) {
vel = -1 * vel;
}
}
/*
* this.specks.add(new Speck(new Position(5, 105), new Velocity(0, -5))); this.specks.add(new Speck(new Position(10, 110), new Velocity(0, -5))); this.specks.add(new Speck(new Position(15,
* 115), new Velocity(0, -5))); this.specks.add(new Speck(new Position(20, 120), new Velocity(0, -5))); this.specks.add(new Speck(new Position(25, 125), new Velocity(0, -5)));
* this.specks.add(new Speck(new Position(30, 130), new Velocity(0, -5))); this.specks.add(new Speck(new Position(35, 135), new Velocity(0, -5))); this.specks.add(new Speck(new Position(40,
* 140), new Velocity(0, -5))); this.specks.add(new Speck(new Position(45, 145), new Velocity(0, -5))); this.specks.add(new Speck(new Position(50, 150), new Velocity(0, -5)));
*
* this.specks.add(new Speck(new Position(55, 145), new Velocity(0, 5))); this.specks.add(new Speck(new Position(60, 140), new Velocity(0, 5))); this.specks.add(new Speck(new Position(65,
* 135), new Velocity(0, 5))); this.specks.add(new Speck(new Position(70, 130), new Velocity(0, 5))); this.specks.add(new Speck(new Position(75, 125), new Velocity(0, 5))); this.specks.add(new
* Speck(new Position(80, 120), new Velocity(0, 5))); this.specks.add(new Speck(new Position(85, 115), new Velocity(0, 5))); this.specks.add(new Speck(new Position(90, 110), new Velocity(0,
* 5))); this.specks.add(new Speck(new Position(95, 105), new Velocity(0, 5))); this.specks.add(new Speck(new Position(100, 100), new Velocity(0, 5)));
*
* this.specks.add(new Speck(new Position(105, 95), new Velocity(0, 5))); this.specks.add(new Speck(new Position(110, 90), new Velocity(0, 5))); this.specks.add(new Speck(new Position(115,
* 85), new Velocity(0, 5))); this.specks.add(new Speck(new Position(120, 80), new Velocity(0, 5))); this.specks.add(new Speck(new Position(125, 75), new Velocity(0, 5))); this.specks.add(new
* Speck(new Position(130, 70), new Velocity(0, 5))); this.specks.add(new Speck(new Position(135, 65), new Velocity(0, 5))); this.specks.add(new Speck(new Position(140, 60), new Velocity(0,
* 5))); this.specks.add(new Speck(new Position(145, 55), new Velocity(0, 5)));
*
* this.specks.add(new Speck(new Position(150, 50), new Velocity(0, 5)));
*/
}
public void queueVelocities() {
double GO = 5;
for (Speck s : this.specks) {
// System.out.println("speck: pos=" + s.position.toString() + " vel=" + s.velocity.toString());
if (s.position().y() <= 50) {
// too high, always move down
s.queuedVelocity().setY(GO);
s.queuedVelocity().setX(0);
} else if (s.position().y() >= 150) {
// to low, always move up
s.queuedVelocity().setY(-1 * GO);
s.queuedVelocity().setX(0);
} else {
// just leave the velocity alone, continue on current heading
// System.out.println("leave alone, queued velocity = " + s.queuedVelocity.toString());
}
}
}
}
|
Alchyr/EvilWithin | src/main/java/downfall/rooms/HeartShopRoom.java | <reponame>Alchyr/EvilWithin
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//
package downfall.rooms;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.megacrit.cardcrawl.dungeons.AbstractDungeon;
import com.megacrit.cardcrawl.monsters.AbstractMonster;
import com.megacrit.cardcrawl.monsters.MonsterGroup;
import com.megacrit.cardcrawl.rooms.AbstractRoom;
import com.megacrit.cardcrawl.rooms.ShopRoom;
import com.megacrit.cardcrawl.shop.Merchant;
import downfall.monsters.FleeingMerchant;
import downfall.patches.EvilModeCharacterSelect;
import downfall.util.HeartMerchant;
public class HeartShopRoom extends ShopRoom {
public HeartMerchant heartMerchant;
public boolean heartMerchantShown;
public boolean startedCombat = false;
public boolean yesFight = true;
public HeartShopRoom() {
super();
}
public HeartShopRoom(boolean yesFight) {
super();
this.yesFight = yesFight;
}
@Override
public void setMerchant(Merchant merc) {
super.setMerchant(merc);
}
public void setHeartMerchant(HeartMerchant merc) {
this.heartMerchant = merc;
}
public void onPlayerEntry() {
if (EvilModeCharacterSelect.evilMode)
if (!FleeingMerchant.DEAD && yesFight) {
startCombat();
startedCombat = true;
} else {
this.setHeartMerchant(new HeartMerchant());
this.heartMerchant.spawnHitbox();
showHeartMerchant();
}
if (!AbstractDungeon.id.equals("TheEnding") && !yesFight) {
this.playBGM("SHOP");
}
AbstractDungeon.overlayMenu.proceedButton.setLabel(TEXT[0]);
}
private static void startCombat() {
AbstractDungeon.closeCurrentScreen();
AbstractDungeon.getCurrRoom().phase = AbstractRoom.RoomPhase.COMBAT;
AbstractDungeon.lastCombatMetricKey = FleeingMerchant.ID;
AbstractDungeon.getCurrRoom().monsters = new MonsterGroup(new FleeingMerchant());
AbstractDungeon.getCurrRoom().rewards.clear();
AbstractDungeon.getCurrRoom().monsters.init();
for (AbstractMonster m : AbstractDungeon.getCurrRoom().monsters.monsters) {
m.usePreBattleAction();
m.useUniversalPreBattleAction();
}
((HeartShopRoom) AbstractDungeon.getCurrRoom()).heartMerchant = new HeartMerchant();
AbstractRoom.waitTimer = 0.1f;
AbstractDungeon.player.preBattlePrep();
}
public void render(SpriteBatch sb) {
if (this.heartMerchant != null && this.heartMerchantShown) {
this.heartMerchant.render(sb);
}
super.render(sb);
this.renderTips(sb);
}
public void dispose() {
super.dispose();
if (this.heartMerchant != null) {
this.heartMerchant.dispose();
this.heartMerchant = null;
}
}
public void update() {
super.update();
if (this.heartMerchant != null && this.heartMerchantShown) {
this.heartMerchant.update();
}
}
public void showHeartMerchant() {
this.heartMerchantShown = true;
this.heartMerchant.spawnHitbox();
}
}
|
SmarterApp/TechnologyReadinessTool | readiness/src/main/java/net/techreadiness/customer/datagrid/FileByIDsItemProviderImpl.java | package net.techreadiness.customer.datagrid;
import java.util.Collection;
import java.util.List;
import javax.inject.Inject;
import net.techreadiness.persistence.criteriaquery.Criteria;
import net.techreadiness.persistence.criteriaquery.CriteriaQuery;
import net.techreadiness.persistence.criteriaquery.QueryResult;
import net.techreadiness.persistence.domain.FileDO;
import net.techreadiness.service.common.DataGrid;
import net.techreadiness.service.common.DataGridItemProviderImpl;
import net.techreadiness.service.object.File;
import net.techreadiness.service.object.mapping.MappingService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Component
@Scope("prototype")
@Transactional(readOnly = true)
public class FileByIDsItemProviderImpl extends DataGridItemProviderImpl<File> implements FileByIDsItemProvider {
private Collection<Long> fileIds;
@Inject
MappingService mappingService;
@Autowired
CriteriaQuery<FileDO> criteriaQuery;
@Override
public List<File> getPage(DataGrid<File> grid) {
Criteria criteria = createCriteria(grid);
criteriaQuery.setBaseWhere("main.file_id in (:file_id_list)");
criteria.getParameters().putAll("file_id_list", fileIds);
QueryResult<FileDO> result = criteriaQuery.getData(criteria, FileDO.class);
setTotalNumberOfItems(result.getTotalRowCount());
return mappingService.mapFromDOList(result.getRows());
}
public Collection<Long> getFileIds() {
return fileIds;
}
@Override
public void setFileIds(Collection<Long> fileIds) {
this.fileIds = fileIds;
}
} |
sumon-dey/JavaConcepts | src/test/java/com/javaconcepts/unitTesting/junit/JUnitTestSuite.java | package com.javaconcepts.unitTesting.junit;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/*If we have several test classes, we can combine them into a test suite.
* Running a test suite executes all test classes in that suite in the specified order.
* A test suite can also contain other test suites.*/
@RunWith(Suite.class)
@Suite.SuiteClasses({ JUnitTestSuiteDemoTest1.class, JUnitTestSuiteDemoTest2.class })
public class JUnitTestSuite {
}
|
krajkumard/flynt | test/integration/samples_in/percent_numerics.py | a, b, c, d, e = tuple(range(5))
print('%d %f %e %g %s' % (a,b,c,d,e)) |
fractal520/dbops | dbops_venv/lib/python3.5/site-packages/engineio/async_eventlet.py | import importlib
import sys
from eventlet import sleep
from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI
class WebSocketWSGI(_WebSocketWSGI):
def __init__(self, *args, **kwargs):
super(WebSocketWSGI, self).__init__(*args, **kwargs)
self._sock = None
def __call__(self, environ, start_response):
if 'eventlet.input' not in environ:
raise RuntimeError('You need to use the eventlet server. '
'See the Deployment section of the '
'documentation for more information.')
self._sock = environ['eventlet.input'].get_socket()
return super(WebSocketWSGI, self).__call__(environ, start_response)
_async = {
'threading': importlib.import_module('eventlet.green.threading'),
'thread_class': 'Thread',
'queue': importlib.import_module('eventlet.queue'),
'queue_class': 'Queue',
'websocket': sys.modules[__name__],
'websocket_class': 'WebSocketWSGI',
'sleep': sleep
}
|
atlasapi/atlas-model | src/main/java/org/atlasapi/equiv/ChannelRef.java | <gh_stars>1-10
package org.atlasapi.equiv;
import com.google.common.base.Objects;
import org.atlasapi.media.channel.Channel;
import org.atlasapi.media.entity.Publisher;
import static com.google.common.base.Preconditions.checkNotNull;
public class ChannelRef {
private long id;
private String uri;
private Publisher publisher;
public ChannelRef() {
}
private ChannelRef(long id, String uri, Publisher publisher) {
this.id = checkNotNull(id);
this.uri = checkNotNull(uri);
this.publisher = checkNotNull(publisher);
}
public static ChannelRef create(long id, String uri, Publisher publisher) {
return new ChannelRef(id, uri, publisher);
}
public static ChannelRef fromChannel(Channel channel) {
return new ChannelRef(channel.getId(), channel.getUri(), channel.getSource());
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public Publisher getPublisher() {
return publisher;
}
public void setPublisher(Publisher publisher) {
this.publisher = publisher;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof ChannelRef) {
ChannelRef target = (ChannelRef) obj;
return Objects.equal(id, target.getId())
&& Objects.equal(uri, target.getUri())
&& Objects.equal(publisher, target.getPublisher());
}
return false;
}
@Override
public int hashCode() {
return Objects.hashCode(id, uri, publisher);
}
@Override
public String toString() {
return Objects.toStringHelper(ChannelRef.class)
.add("id", id)
.add("uri", uri)
.add("publisher", publisher.key())
.toString();
}
}
|
Franklin-Siqueira/mooc-lab | fcsSimpleMooc/accounts/forms.py | '''
Created on Jul 30, 2019
@author: franklincarrilho
'''
#
import datetime
from django import forms
from django.http import request
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import get_user_model
from django.conf import settings
#
from fcsSimpleMooc.core.mail import send_mail_template
from fcsSimpleMooc.core.utils import generate_hash_key
from .models import PasswordReset
User = get_user_model()
class LoginForm(AuthenticationForm):
username = forms.CharField(widget = forms.TextInput(attrs ={'class':'form-control',
'placeholder':' Please, enter your username...'}))
password = forms.CharField(widget = forms.PasswordInput(attrs = {'class':'form-control',
'placeholder':' Please, enter your password...'}))
class PasswordResetForm(forms.Form):
email = forms.EmailField(label = 'E-mail', widget = forms.TextInput(attrs = {'class':'form-control',
'placeholder':' Please, enter your email...'}))
def clean_email(self):
email = self.cleaned_data['email']
if User.objects.filter(email = email).exists():
return email
raise forms.ValidationError('Sorry! No User found for the informed e-mail!')
def save(self):
user = User.objects.get(email = self.cleaned_data['email'])
key = generate_hash_key(user.username)
reset = PasswordReset(key = key, user = user)
reset.save()
#
template_name = 'accounts/password_reset_mail.html'
subject = 'fcSMOOC new password creation request'
context = {}
# urlMail = Site.objects.get_current()
# site = RequestSite(request)
# print(site)
context['dateValue'] = datetime.datetime.now()
context['reset'] = reset
context['user'] = user
context['baseDir'] = get_current_site(request)
#
send_mail_template(subject, template_name, context, [user.email])
#
class RegisterForm(forms.ModelForm):
username = forms.CharField(label = 'Name', max_length=100,
widget= forms.TextInput(attrs = {'placeholder':' Type your username...'}))
email = forms.EmailField(label = 'E-mail',
widget= forms.TextInput(attrs = {'placeholder':' add your e-mail'}))
password1 = forms.CharField(label = 'Password',
widget = forms.PasswordInput(attrs = {'placeholder':' your password...'}))
password2 = forms.CharField(label = 'Confirm password',
widget = forms.PasswordInput(attrs = {'placeholder':' and confirm your password.'}))
#
class Meta:
model = User
fields = ['username', 'email']
def clean_password2(self):
password1 = self.cleaned_data.get("<PASSWORD>")
password2 = self.cleaned_data.get("<PASSWORD>")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Sorry! Passwords don't match")
return password2
def save(self, commit = True):
user = super(RegisterForm, self).save(commit = False)
user.set_password(self.cleaned_data['<PASSWORD>'])
if commit:
user.save()
return user
class EditAccountForm(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'email', 'name']
##########################################################
########## END ###########
########################################################## |
ucdavis/VGL_htsjdk | src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java | /*
* The MIT License
*
* Copyright (c) 2015 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package htsjdk.samtools;
import htsjdk.samtools.DuplicateScoringStrategy.ScoringStrategy;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Compares records based on if they should be considered PCR Duplicates (see MarkDuplicates).
*
* There are three orderings provided by this comparator: compare, duplicateSetCompare, and fileOrderCompare.
*
* Specify the headers when constructing this comparator if you would like to consider the library as the major sort key.
* The records being compared must also have non-null SAMFileHeaders.
*
* @author nhomer
*/
public class SAMRecordDuplicateComparator implements SAMRecordComparator, Serializable {
private static final long serialVersionUID = 1L;
/** An enum to provide type-safe keys for transient attributes the comparator puts on SAMRecords. */
private static enum Attr {
LibraryId, ReadCoordinate, MateCoordinate
}
private static final byte FF = 0, FR = 1, F = 2, RF = 3, RR = 4, R = 5;
private final Map<String, Short> libraryIds = new HashMap<String, Short>(); // from library string to library id
private short nextLibraryId = 1;
private ScoringStrategy scoringStrategy = ScoringStrategy.TOTAL_MAPPED_REFERENCE_LENGTH;
public SAMRecordDuplicateComparator() {}
public SAMRecordDuplicateComparator(final List<SAMFileHeader> headers) {
// pre-populate the library names
for (final SAMFileHeader header : headers) {
for (final SAMReadGroupRecord readGroup : header.getReadGroups()) {
final String libraryName = readGroup.getLibrary();
if (null != libraryName) {
final short libraryId = this.nextLibraryId++;
this.libraryIds.put(libraryName, libraryId);
}
}
}
}
public void setScoringStrategy(final ScoringStrategy scoringStrategy) {
this.scoringStrategy = scoringStrategy;
}
/**
* Populates the set of transient attributes on SAMRecords if they are not already there.
*/
private void populateTransientAttributes(final SAMRecord... recs) {
for (final SAMRecord rec : recs) {
if (rec.getTransientAttribute(Attr.LibraryId) != null) continue;
rec.setTransientAttribute(Attr.LibraryId, getLibraryId(rec));
rec.setTransientAttribute(Attr.ReadCoordinate, rec.getReadNegativeStrandFlag() ? rec.getUnclippedEnd() : rec.getUnclippedStart());
rec.setTransientAttribute(Attr.MateCoordinate, getMateCoordinate(rec));
}
}
/**
* Gets the library name from the header for the record. If the RG tag is not present on
* the record, or the library isn't denoted on the read group, a constant string is
* returned.
*/
private static String getLibraryName(final SAMRecord rec) {
final String readGroupId = (String) rec.getAttribute("RG");
if (readGroupId != null) {
final SAMFileHeader samHeader = rec.getHeader();
if (null != samHeader) {
final SAMReadGroupRecord rg = samHeader.getReadGroup(readGroupId);
if (rg != null) {
final String libraryName = rg.getLibrary();
if (null != libraryName) return libraryName;
}
}
}
return "Unknown Library";
}
/** Get the library ID for the given SAM record. */
private short getLibraryId(final SAMRecord rec) {
final String library = getLibraryName(rec);
Short libraryId = this.libraryIds.get(library);
if (libraryId == null) {
libraryId = this.nextLibraryId++;
this.libraryIds.put(library, libraryId);
}
return libraryId;
}
/**
* Convenience method for comparing two orientation bytes. This is critical if we have mapped reads compared to fragment reads.
*/
private int compareOrientationByteCollapseOrientation(final int orientation1, final int orientation2) {
// F == FR, F == FF
// R == RF, R == RR
if (F == orientation1 || R == orientation1) { // first orientation is fragment
/**
* We want
* F == FR, F == FF
* R == RF, R == RR
*/
if (F == orientation1) {
if (F == orientation2 || FR == orientation2 || FF == orientation2) {
return 0;
}
}
else { // R == orientation1
if (R == orientation2 || RF == orientation2 || RR == orientation2) {
return 0;
}
}
}
else if (F == orientation2 || R == orientation2) { // first orientation is paired, second is fragment
return -compareOrientationByteCollapseOrientation(orientation2, orientation1);
}
return orientation1 - orientation2;
}
/**
* Returns a single byte that encodes the orientation of the two reads in a pair.
*/
private static byte getPairedOrientationByte(final boolean read1NegativeStrand, final boolean read2NegativeStrand) {
if (read1NegativeStrand) {
if (read2NegativeStrand) return SAMRecordDuplicateComparator.RR;
else return SAMRecordDuplicateComparator.RF;
} else {
if (read2NegativeStrand) return SAMRecordDuplicateComparator.FR;
else return SAMRecordDuplicateComparator.FF;
}
}
private int getFragmentOrientation(final SAMRecord record) {
return record.getReadNegativeStrandFlag() ? SAMRecordDuplicateComparator.R : SAMRecordDuplicateComparator.F;
}
private int getPairedOrientation(final SAMRecord record) {
if (record.getReadPairedFlag() && !record.getReadUnmappedFlag() && !record.getMateUnmappedFlag()) {
return getPairedOrientationByte(record.getReadNegativeStrandFlag(), record.getMateNegativeStrandFlag());
} else {
return getFragmentOrientation(record);
}
}
private int getMateReferenceIndex(final SAMRecord record) {
if (record.getReadPairedFlag() && !record.getReadUnmappedFlag() && !record.getMateUnmappedFlag()) {
return record.getMateReferenceIndex();
} else {
return -1;
}
}
private int getMateCoordinate(final SAMRecord record) {
if (record.getReadPairedFlag() && !record.getReadUnmappedFlag() && !record.getMateUnmappedFlag()) {
return record.getMateNegativeStrandFlag() ? SAMUtils.getMateUnclippedEnd(record) : SAMUtils.getMateUnclippedStart(record);
} else {
return -1;
}
}
/** Is one end of a pair, or the fragment, unmapped? */
private boolean hasUnmappedEnd(final SAMRecord record) {
return (record.getReadUnmappedFlag() || (record.getReadPairedFlag() && record.getMateUnmappedFlag()));
}
/** Are both ends of a pair, or the fragment, mapped? */
private boolean hasMappedEnd(final SAMRecord record) {
return (!record.getReadUnmappedFlag() || (record.getReadPairedFlag() && !record.getMateUnmappedFlag()));
}
/** Is this paired end and are both ends of a pair mapped */
private boolean pairedEndAndBothMapped(final SAMRecord record) {
return (record.getReadPairedFlag() && !record.getReadUnmappedFlag() && !record.getMateUnmappedFlag());
}
/**
* Most stringent comparison.
*
* Two records are compared based on if they are duplicates of each other, and then based
* on if they should be prioritized for being the most "representative". Typically, the representative
* is the record in the SAM file that is *not* marked as a duplicate within a set of duplicates.
*
* Compare by file order, then duplicate scoring strategy, read name.
*
* If both reads are paired and both ends mapped, always prefer the first end over the second end. This is needed to
* properly choose the first end for optical duplicate identification when both ends are mapped to the same position etc.
*/
@Override
public int compare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
populateTransientAttributes(samRecord1, samRecord2);
int cmp;
// temporary variables for comparisons
int samRecord1Value, samRecord2Value;
cmp = fileOrderCompare(samRecord1, samRecord2);
// the duplicate scoring strategy
if (cmp == 0) {
cmp = DuplicateScoringStrategy.compare(samRecord1, samRecord2, this.scoringStrategy, true);
}
// the read name
if (cmp == 0) {
cmp = samRecord1.getReadName().compareTo(samRecord2.getReadName());
}
// needed for optical duplicate detection when both ends are mapped to the same position.
if (cmp == 0) {
if (samRecord1.getReadPairedFlag() && samRecord2.getReadPairedFlag()) {
samRecord1Value = samRecord1.getFirstOfPairFlag() ? 0 : 1;
samRecord2Value = samRecord2.getFirstOfPairFlag() ? 0 : 1;
cmp = samRecord1Value - samRecord2Value;
}
}
return cmp;
}
/**
* Compares: Library identifier, reference index, read coordinate, orientation of the read (or read pair), mate's coordinate (if paired and mapped),
* mapped ends, ...
*
* collapseOrientation - true if we want cases where fragment orientation to paired end orientation can be equal (ex. F == FR), false otherwise
* considerNumberOfEndsMappedAndPairing - true if we want to prefer paired ends with both ends mapped over paired ends with only one end mapped, or paired ends with end
* mapped over fragment reads, false otherwise.
*
*/
private int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2, final boolean collapseOrientation, final boolean considerNumberOfEndsMappedAndPairing) {
populateTransientAttributes(samRecord1, samRecord2);
int cmp;
if (null == samRecord1.getHeader() || null == samRecord2.getHeader()) {
throw new IllegalArgumentException("Records must have non-null SAMFileHeaders to be compared");
}
// temporary variables for comparisons
int samRecord1Value, samRecord2Value;
// library identifier
{
samRecord1Value = (Short) samRecord1.getTransientAttribute(Attr.LibraryId);
samRecord2Value = (Short) samRecord2.getTransientAttribute(Attr.LibraryId);
cmp = samRecord1Value - samRecord2Value;
}
// reference index
if (cmp == 0) {
samRecord1Value = samRecord1.getReferenceIndex();
samRecord2Value = samRecord2.getReferenceIndex();
// NB: this accounts for unmapped reads to be placed at the ends of the file
if (samRecord1Value == -1) {
cmp = (samRecord2Value == -1) ? 0 : 1;
}
else if (samRecord2Value == -1) {
cmp = -1;
}
else {
cmp = samRecord1Value - samRecord2Value;
}
}
// read coordinate
if (cmp == 0) {
samRecord1Value = (Integer) samRecord1.getTransientAttribute(Attr.ReadCoordinate);
samRecord2Value = (Integer) samRecord2.getTransientAttribute(Attr.ReadCoordinate);
cmp = samRecord1Value - samRecord2Value;
}
// orientation
if (cmp == 0) {
samRecord1Value = getPairedOrientation(samRecord1);
samRecord2Value = getPairedOrientation(samRecord2);
if (collapseOrientation) {
cmp = compareOrientationByteCollapseOrientation(samRecord1Value, samRecord2Value);
}
else {
cmp = samRecord1Value - samRecord2Value;
}
}
// both ends need to be mapped
if (pairedEndAndBothMapped(samRecord1) && pairedEndAndBothMapped(samRecord2)) {
// mate's reference index
if (cmp == 0) {
samRecord1Value = getMateReferenceIndex(samRecord1);
samRecord2Value = getMateReferenceIndex(samRecord2);
cmp = samRecord1Value - samRecord2Value;
}
// mate's coordinate
if (cmp == 0) {
samRecord1Value = (Integer) samRecord1.getTransientAttribute(Attr.MateCoordinate);
samRecord2Value = (Integer) samRecord2.getTransientAttribute(Attr.MateCoordinate);;
cmp = samRecord1Value - samRecord2Value;
}
}
if (cmp == 0) {
samRecord1Value = hasMappedEnd(samRecord1) ? 0 : 1;
samRecord2Value = hasMappedEnd(samRecord2) ? 0 : 1;
cmp = samRecord1Value - samRecord2Value;
}
// if both paired or both unpaired, then check if one of the two ends (or single end) is unmapped
// else prefer the one that is paired end
if (cmp == 0 && considerNumberOfEndsMappedAndPairing) {
if (samRecord1.getReadPairedFlag() == samRecord2.getReadPairedFlag()) {
// Is this unmapped or its mate?
samRecord1Value = hasUnmappedEnd(samRecord1) ? 1 : 0;
samRecord2Value = hasUnmappedEnd(samRecord2) ? 1 : 0;
cmp = samRecord1Value - samRecord2Value;
}
else { // if we care if one is paired and the other is not
cmp = samRecord1.getReadPairedFlag() ? -1 : 1;
}
}
return cmp;
}
/**
* Less stringent than compare, such that two records are equal enough such that their ordering within their duplicate set would be arbitrary.
*
* Major difference between this and fileOrderCompare is how we compare the orientation byte. Here we want:
* F == FR, F == FF
* R == RF, R == RR
*/
public int duplicateSetCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
return fileOrderCompare(samRecord1, samRecord2, true, false);
}
/**
* Less stringent than duplicateSetCompare, such that two records are equal enough such that their ordering in a sorted SAM file would be arbitrary.
*/
@Override
public int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
return fileOrderCompare(samRecord1, samRecord2, false, true);
}
}
|
JeffYFHuang/jt808demo | jt808app/src/main/java/com/liteon/javacint/common/Bytes.java | <filename>jt808app/src/main/java/com/liteon/javacint/common/Bytes.java
package com.liteon.javacint.common;
import java.io.ByteArrayOutputStream;
import java.util.Vector;
import com.liteon.javacint.logging.Logger;
/**
* Bytes management class.
*
* @author <NAME> / www.webingenia.com
*/
public class Bytes {
public static void byteArrayToHexString(StringBuffer sb, byte buf[]) {
byteArrayToHexString(sb, buf, 0, buf.length);
}
public static void byteArrayToHexString(StringBuffer sb, byte buf[], int offset, int length) {
if (length > 256) {
length = 256;
}
int max = offset + length;
for (int i = offset; i < max; i++) {
if (((int) buf[i] & 0xff) < 0x10) {
sb.append("0");
}
sb.append(Long.toString((int) buf[i] & 0xff, 16).
toUpperCase());
}
}
public static String byteArrayToHexString(byte[] buf) {
return byteArrayToHexString(buf, 0, buf.length);
}
public static String byteArrayToHexString(byte[] buf, int offset, int length) {
StringBuffer sb = new StringBuffer();
byteArrayToHexString(sb, buf, offset, length);
return sb.toString();
}
public static byte[] hexStringToByteArray(String str) {
byte[] array = new byte[str.length() / 2];
for (int i = 0; i < str.length(); i += 2) {
array[(i / 2)] = (byte) Integer.parseInt(str.substring(i, i + 2), 16);
}
return array;
}
/**
* Convert an unsigned int (signed long) to an array of bytes
*
* @param l Long to convert
* @param out Array fo bytes to put the unsigned int into
* @param i Index to consider
*/
public static void longToUInt32Bytes(long l, byte[] out, int i) {
out[i++] = (byte) ((l & 0xFF000000L) >> 24);
out[i++] = (byte) ((l & 0x00FF0000L) >> 16);
out[i++] = (byte) ((l & 0x0000FF00L) >> 8);
out[i++] = (byte) ((l & 0x000000FFL));
}
/**
* Convert an unsigned short (signed int) to an array of bytes
*
* @param s Short to convert
* @param out Array of bytes to put the short in
* @param i Index where to insert the short
*/
public static void intTo2Bytes(int s, byte[] out, int i) {
out[i++] = (byte) ((s & 0xFF00L) >> 8);
out[i++] = (byte) ((s & 0x00FFL));
}
/**
* Convert some bytes to an unsigned short (signed int)
*
* @param data Array of bytes to get the short from
* @param offset Position of the short within the byte array
* @return The value of the unsigned short at the defined position
*/
public static int bytesToShort(byte[] data, int offset) {
try {
int firstByte = (0x000000FF & ((int) data[offset]));
int secondByte = (0x000000FF & ((int) data[offset + 1]));
int ret = (firstByte << 8 | secondByte);
return ret;
} catch (Exception ex) {
if (Logger.BUILD_CRITICAL) {
Logger.log("Common.bytesToShort( " + Bytes.byteArrayToPrettyString(data) + ", " + offset + " )", ex, true);
}
return -1;
}
}
public static long bytesToLong(byte[] data, int offset) {
long l = 0;
for (int i = 0; i < 4; i++) {
l *= 256;
l += byteToInt(data[offset + i]);
}
return l;
}
/**
* Convert an int to a byte
*
* @param l Int to convert
* @param out Array of bytes to put the unsigned int into
* @param i Index where to insert the byte into the array
*/
public static void intTo1Byte(int l, byte[] out, int i) {
out[i] = (byte) ((l & 0x00FFL));
}
/**
* Get the unsigned value of a byte into an int
*
* @param b Byte to get the value from
* @return Unsigned value of the byte
*/
public static int byteToInt(byte b) {
int i = (int) b;
if (i < 0) {
i += 256;
}
return i;
}
/**
* Convert a float into an array of bytes
*
* @param f Float to convert
* @param out Array to insert the bytes within
* @param i Index where to inset the value of the bytes
*/
public static void floatToBytes(float f, byte[] out, int i) {
int j = Float.floatToIntBits(f);
out[i++] = (byte) ((j & 0xFF000000L) >> 24);
out[i++] = (byte) ((j & 0x00FF0000L) >> 16);
out[i++] = (byte) ((j & 0x0000FF00L) >> 8);
out[i++] = (byte) ((j & 0x000000FFL));
}
public static int getBit(byte[] data, int pos, int offset) {
try {
int posByte = pos / 8;
int posBit = pos % 8;
byte valByte = data[(posByte + offset)];
int valInt = valByte >> (8 - (posBit + 1)) & 0x0001;
return valInt;
} catch (Exception ex) {
if (Logger.BUILD_CRITICAL) {
Logger.log("Common.getBit( " + Bytes.byteArrayToPrettyString(data) + ", " + pos + " )", ex, true);
}
return -1;
}
}
public static boolean isBitSet(byte b, int bit) {
if (bit < 0) {
bit += 128;
}
return (b & (1 << bit)) != 0;
}
/**
* Convert a byte to a 2 chars hex string
*
* @param b Byte to convert
* @return Hex string
*/
public static String byteToHex(byte b) {
int i = (int) b;
if (i < 0) {
i += 256;
}
String s = Integer.toHexString(i).
toUpperCase();
if (s.length() < 2) {
s = "0" + s;
}
return s;
}
/**
* Give a pretty representation of an array of bytes
*
* @param data Array of byte to show
* @return pretty display of the array of bytes
*
* This used for debug logging
*/
public static String byteArrayToPrettyString(byte[] data) {
StringBuffer sb = new StringBuffer();
sb.append("[ ");
sb.append(data.length);
for (int i = 0; i < data.length; i++) {
sb.append(" 0x").
append(byteToHex(data[i]));
}
sb.append(" ]");
return sb.toString();
}
/**
* Calculate the LRC of the selected frame
*
* @param data Data to calculate the LRC from
* @param start Index to calculate the LRC from
* @param end Index to calculate the LRC to
* @return Value of the LRC
*/
public static byte calculateLrc(byte[] data, int start, int end) {
byte total = 0;
for (int i = start; i < end; i++) {
total += data[i];
}
total *= -1;
return total;
}
public static boolean checkLrc(byte[] data, int start, int end) {
byte total = 0;
for (int i = start; i < end; i++) {
total += data[i];
}
return total == 0;
}
/**
* Convert an array of strings to an array of arrays of bytes
*
* @param str Array of strings
* @return Array of arrays of bytes
*/
public static byte[][] stringsToBytes(String[] str) {
byte[][] bytes = new byte[str.length][];
for (int i = 0; i < str.length; i++) {
bytes[i] = str[i].getBytes();
}
return bytes;
}
/**
* Convert an array of bytes to an array of strings
*
* @param bytes Array of bytes to convert
* @return Array of strings
*/
public static String[] bytesToStrings(byte[][] bytes) {
String[] strings = new String[bytes.length];
for (int i = 0; i < strings.length; i++) {
strings[i] = new String(bytes[i]);
}
return strings;
}
/**
* Convert a vector of strings to an array of array of bytes
*
* @param vect Vector of strings
* @return Array of arrays of bytes
*/
public static byte[][] stringsToBytes(Vector vect) {
byte[][] bytes = new byte[vect.size()][];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = ((String) vect.elementAt(i)).getBytes();
}
return bytes;
}
// public static byte[] append(byte[] src, byte[] add, int offset, int length) {
// if (src == null) {
// src = new byte[0];
// }
// byte[] dst = new byte[src.length + add.length];
// System.arraycopy(src, 0, dst, 0, src.length);
// System.arraycopy(add, 0, dst, src.length, add.length);
// return dst;
// }
//
// public static byte[] extend(byte[] src, int newSize) {
// byte[] dst = new byte[src.length + newSize];
// System.arraycopy(src, 0, dst, 0, src.length);
// return dst;
// }
}
|
liangklfangl/structor-usage | node_modules/_rc-queue-anim@0.12.6@rc-queue-anim/lib/QueueAnim.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactDom = require('react-dom');
var _utils = require('./utils');
var _animTypes = require('./animTypes');
var _animTypes2 = _interopRequireDefault(_animTypes);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; }
function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : _defaults(subClass, superClass); }
var _ease = {
easeInElastic: function easeInElastic(_p, o, t) {
var p = _p;
var _p1 = o >= 1 ? o : 1;
var _p2 = (t || 1) / (o < 1 ? o : 1);
var _p3 = _p2 / Math.PI * 2 * (Math.asin(1 / _p1) || 0);
return -(_p1 * Math.pow(2, 10 * (p -= 1)) * Math.sin((p - _p3) * _p2));
},
easeOutElastic: function easeOutElastic(p, o, t) {
var _p1 = o >= 1 ? o : 1;
var _p2 = (t || 1) / (o < 1 ? o : 1);
var _p3 = _p2 / Math.PI * 2 * (Math.asin(1 / _p1) || 0);
return _p1 * Math.pow(2, -10 * p) * Math.sin((p - _p3) * _p2) + 1;
},
easeInOutElastic: function easeInOutElastic(_p, o, t) {
var p = _p;
var _p1 = o >= 1 ? o : 1;
var _p2 = (t || 1) / (o < 1 ? o : 1);
var _p3 = _p2 / Math.PI * 2 * (Math.asin(1 / _p1) || 0);
p *= 2;
return p < 1 ? -0.5 * (_p1 * Math.pow(2, 10 * (p -= 1)) * Math.sin((p - _p3) * _p2)) : _p1 * Math.pow(2, -10 * (p -= 1)) * Math.sin((p - _p3) * _p2) * 0.5 + 1;
},
easeInBounce: function easeInBounce(_p) {
var p = _p;
var __p = 1 - p;
if (__p < 1 / 2.75) {
return 1 - 7.5625 * p * p;
} else if (p < 2 / 2.75) {
return 1 - (7.5625 * (p -= 1.5 / 2.75) * p + 0.75);
} else if (p < 2.5 / 2.75) {
return 1 - (7.5625 * (p -= 2.25 / 2.75) * p + 0.9375);
}
return 1 - (7.5625 * (p -= 2.625 / 2.75) * p + 0.984375);
},
easeOutBounce: function easeOutBounce(_p) {
var p = _p;
if (p < 1 / 2.75) {
return 7.5625 * p * p;
} else if (p < 2 / 2.75) {
return 7.5625 * (p -= 1.5 / 2.75) * p + 0.75;
} else if (p < 2.5 / 2.75) {
return 7.5625 * (p -= 2.25 / 2.75) * p + 0.9375;
}
return 7.5625 * (p -= 2.625 / 2.75) * p + 0.984375;
},
easeInOutBounce: function easeInOutBounce(_p) {
var p = _p;
var invert = p < 0.5;
if (invert) {
p = 1 - p * 2;
} else {
p = p * 2 - 1;
}
if (p < 1 / 2.75) {
p = 7.5625 * p * p;
} else if (p < 2 / 2.75) {
p = 7.5625 * (p -= 1.5 / 2.75) * p + 0.75;
} else if (p < 2.5 / 2.75) {
p = 7.5625 * (p -= 2.25 / 2.75) * p + 0.9375;
} else {
p = 7.5625 * (p -= 2.625 / 2.75) * p + 0.984375;
}
return invert ? (1 - p) * 0.5 : p * 0.5 + 0.5;
}
};
var velocity = void 0;
if (typeof document !== 'undefined' && typeof window !== 'undefined') {
// only load velocity on the client
velocity = require('velocity-animate');
Object.keys(_ease).forEach(function (key) {
if (velocity.Easings) {
velocity.Easings[key] = _ease[key];
}
});
} else {
// provide a velocity stub for the server
velocity = function velocityServerDummy() {
var callback = arguments[arguments.length - 1];
// call after stack flushes
// in case you app depends on the asyncron nature of this function
setImmediate(function () {
return callback();
});
};
}
var BackEase = {
easeInBack: [0.6, -0.28, 0.735, 0.045],
easeOutBack: [0.175, 0.885, 0.32, 1.275],
easeInOutBack: [0.68, -0.55, 0.265, 1.55]
};
var placeholderKeyPrefix = 'ant-queue-anim-placeholder-';
var noop = function noop() {};
var QueueAnim = function (_React$Component) {
_inherits(QueueAnim, _React$Component);
function QueueAnim() {
_classCallCheck(this, QueueAnim);
var _this = _possibleConstructorReturn(this, _React$Component.apply(this, arguments));
_this.getInitAnimType = function (node, velocityConfig) {
/*
* enterForcedRePlay 为 false 时:
* 强行结束后,获取当前 dom 里是否有 data 里的 key 值,
* 如果有,出场开始启动为 dom 里的值
* 而不是 animTypes 里的初始值,如果是初始值将会跳动。
*/
var data = _extends({}, (0, _utils.assignChild)(velocityConfig));
var transformsBase = velocity && velocity.prototype.constructor && velocity.prototype.constructor.CSS.Lists.transformsBase || [];
var setPropertyValue = velocity && velocity.prototype.constructor && velocity.prototype.constructor.CSS.setPropertyValue || noop;
var getUnitType = velocity && velocity.prototype.constructor && velocity.prototype.constructor.CSS.Values.getUnitType || noop;
var nodeStyle = node.style;
Object.keys(data).forEach(function (dataKey) {
var cssName = dataKey;
if (transformsBase.indexOf(dataKey) >= 0) {
cssName = 'transform';
var transformString = nodeStyle[(0, _utils.checkStyleName)(cssName)];
if (transformString && transformString !== 'none') {
if (transformString.match(dataKey)) {
var rep = new RegExp('^.*' + dataKey + '\\(([^\\)]+?)\\).*', 'i');
var transformData = transformString.replace(rep, '$1');
data[dataKey][1] = parseFloat(transformData);
}
}
} else if (nodeStyle[dataKey] && parseFloat(nodeStyle[dataKey])) {
data[dataKey][1] = parseFloat(nodeStyle[dataKey]);
}
// 先把初始值设进 style 里。免得跳动;把下面的设置放到这里。
setPropertyValue(node, cssName, '' + data[dataKey][1] + getUnitType(dataKey));
});
return data;
};
_this.keysToEnter = [];
_this.keysToLeave = [];
_this.keysAnimating = [];
_this.placeholderTimeoutIds = {};
// 第一次进入,默认进场
var children = (0, _utils.toArrayChildren)((0, _utils.getChildrenFromProps)(_this.props));
children.forEach(function (child) {
if (!child || !child.key) {
return;
}
_this.keysToEnter.push(child.key);
});
_this.originalChildren = (0, _utils.toArrayChildren)((0, _utils.getChildrenFromProps)(_this.props));
_this.state = {
children: children,
childrenShow: {}
};
['performEnter', 'performLeave', 'enterBegin', 'leaveComplete'].forEach(function (method) {
return _this[method] = _this[method].bind(_this);
});
return _this;
}
QueueAnim.prototype.componentDidMount = function componentDidMount() {
this.componentDidUpdate();
};
QueueAnim.prototype.componentWillReceiveProps = function componentWillReceiveProps(nextProps) {
var _this2 = this;
var nextChildren = (0, _utils.toArrayChildren)(nextProps.children);
var currentChildren = this.originalChildren;
var newChildren = (0, _utils.mergeChildren)(currentChildren, nextChildren);
var childrenShow = !newChildren.length ? {} : this.state.childrenShow;
// 在出场没结束时,childrenShow 里的值将不会清除。再触发进场时, childrenShow 里的值是保留着的, 设置了 enterForcedRePlay 将重新播放进场。
this.keysToLeave.forEach(function (key) {
// 将所有在出场里的停止掉。避免间隔性出现
// 因为进场是用的间隔性进入,这里不做 stop 处理将会在这间隔里继续出场的动画。。
var node = (0, _reactDom.findDOMNode)(_this2.refs[key]);
velocity(node, 'stop');
if (nextProps.enterForcedRePlay) {
// 清掉所有出场的。
delete childrenShow[key];
}
});
this.keysToEnter = [];
this.keysToLeave = [];
this.keysAnimating = [];
// need render to avoid update
this.setState({
childrenShow: childrenShow,
children: newChildren
});
nextChildren.forEach(function (c) {
if (!c) {
return;
}
var key = c.key;
var hasPrev = (0, _utils.findChildInChildrenByKey)(currentChildren, key);
if (!hasPrev && key) {
_this2.keysToEnter.push(key);
}
});
currentChildren.forEach(function (c) {
if (!c) {
return;
}
var key = c.key;
var hasNext = (0, _utils.findChildInChildrenByKey)(nextChildren, key);
if (!hasNext && key) {
_this2.keysToLeave.push(key);
}
});
};
QueueAnim.prototype.componentDidUpdate = function componentDidUpdate() {
this.originalChildren = (0, _utils.toArrayChildren)((0, _utils.getChildrenFromProps)(this.props));
var keysToEnter = Array.prototype.slice.call(this.keysToEnter);
var keysToLeave = Array.prototype.slice.call(this.keysToLeave);
if (this.keysAnimating.length === 0) {
this.keysAnimating = keysToEnter.concat(keysToLeave);
}
keysToEnter.forEach(this.performEnter);
keysToLeave.forEach(this.performLeave);
};
QueueAnim.prototype.componentWillUnmount = function componentWillUnmount() {
var _this3 = this;
[].concat(this.keysToEnter, this.keysToLeave, this.keysAnimating).forEach(function (key) {
return _this3.refs[key] && velocity((0, _reactDom.findDOMNode)(_this3.refs[key]), 'stop');
});
Object.keys(this.placeholderTimeoutIds).forEach(function (key) {
clearTimeout(_this3.placeholderTimeoutIds[key]);
});
this.keysToEnter = [];
this.keysToLeave = [];
this.keysAnimating = [];
};
QueueAnim.prototype.getVelocityConfig = function getVelocityConfig(index) {
for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
if (this.props.animConfig) {
return _utils.transformArguments.apply(undefined, [this.props.animConfig].concat(args))[index];
}
return _animTypes2["default"][_utils.transformArguments.apply(undefined, [this.props.type].concat(args))[index]];
};
QueueAnim.prototype.getVelocityEnterConfig = function getVelocityEnterConfig() {
for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
return this.getVelocityConfig.apply(this, [0].concat(args));
};
QueueAnim.prototype.getVelocityLeaveConfig = function getVelocityLeaveConfig() {
for (var _len3 = arguments.length, args = Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
args[_key3] = arguments[_key3];
}
var config = this.getVelocityConfig.apply(this, [1].concat(args));
var ret = {};
Object.keys(config).forEach(function (key) {
if (Array.isArray(config[key])) {
ret[key] = Array.prototype.slice.call(config[key]).reverse();
} else {
ret[key] = config[key];
}
});
return ret;
};
QueueAnim.prototype.getVelocityEasing = function getVelocityEasing() {
for (var _len4 = arguments.length, args = Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
args[_key4] = arguments[_key4];
}
return _utils.transformArguments.apply(undefined, [this.props.ease].concat(args)).map(function (easeName) {
if (typeof easeName === 'string') {
return BackEase[easeName] || easeName;
}
return easeName;
});
};
QueueAnim.prototype.performEnter = function performEnter(key, i) {
var interval = (0, _utils.transformArguments)(this.props.interval, key, i)[0];
var delay = (0, _utils.transformArguments)(this.props.delay, key, i)[0];
this.placeholderTimeoutIds[key] = setTimeout(this.performEnterBegin.bind(this, key, i), interval * i + delay);
if (this.keysToEnter.indexOf(key) >= 0) {
this.keysToEnter.splice(this.keysToEnter.indexOf(key), 1);
}
};
QueueAnim.prototype.performEnterBegin = function performEnterBegin(key, i) {
var childrenShow = this.state.childrenShow;
childrenShow[key] = true;
this.setState({ childrenShow: childrenShow }, this.realPerformEnter.bind(this, key, i));
};
QueueAnim.prototype.realPerformEnter = function realPerformEnter(key, i) {
var node = (0, _reactDom.findDOMNode)(this.refs[key]);
if (!node) {
return;
}
var duration = (0, _utils.transformArguments)(this.props.duration, key, i)[0];
velocity(node, 'stop');
var data = this.props.enterForcedRePlay ? this.getVelocityEnterConfig(key, i) : this.getInitAnimType(node, this.getVelocityEnterConfig(key, i));
if (this.props.enterForcedRePlay) {
node.style.visibility = 'hidden';
}
velocity(node, data, {
duration: duration,
easing: this.getVelocityEasing(key, i)[0],
visibility: 'visible',
begin: this.enterBegin.bind(this, key),
complete: this.enterComplete.bind(this, key)
});
};
QueueAnim.prototype.performLeave = function performLeave(key, i) {
clearTimeout(this.placeholderTimeoutIds[key]);
delete this.placeholderTimeoutIds[key];
var node = (0, _reactDom.findDOMNode)(this.refs[key]);
if (!node) {
return;
}
var interval = (0, _utils.transformArguments)(this.props.interval, key, i)[1];
var delay = (0, _utils.transformArguments)(this.props.delay, key, i)[1];
var duration = (0, _utils.transformArguments)(this.props.duration, key, i)[1];
var order = this.props.leaveReverse ? this.keysToLeave.length - i - 1 : i;
velocity(node, 'stop');
node.style.visibility = 'visible';
var data = this.getInitAnimType(node, this.getVelocityLeaveConfig(key, i));
velocity(node, data, {
delay: interval * order + delay,
duration: duration,
easing: this.getVelocityEasing(key, i)[1],
begin: this.leaveBegin.bind(this, key),
complete: this.leaveComplete.bind(this, key)
});
};
QueueAnim.prototype.enterBegin = function enterBegin(key, elements) {
var _this4 = this;
elements.forEach(function (elem) {
var animatingClassName = _this4.props.animatingClassName;
elem.className = elem.className.replace(animatingClassName[1], '');
if (elem.className.indexOf(animatingClassName[0]) === -1) {
elem.className += ' ' + animatingClassName[0];
}
});
};
QueueAnim.prototype.enterComplete = function enterComplete(key, elements) {
var _this5 = this;
if (this.keysAnimating.indexOf(key) >= 0) {
this.keysAnimating.splice(this.keysAnimating.indexOf(key), 1);
}
elements.forEach(function (elem) {
elem.className = elem.className.replace(_this5.props.animatingClassName[0], '').trim();
});
this.props.onEnd({ key: key, type: 'enter' });
};
QueueAnim.prototype.leaveBegin = function leaveBegin(key, elements) {
var _this6 = this;
elements.forEach(function (elem) {
var animatingClassName = _this6.props.animatingClassName;
elem.className = elem.className.replace(animatingClassName[0], '');
if (elem.className.indexOf(animatingClassName[1]) === -1) {
elem.className += ' ' + animatingClassName[1];
}
});
};
QueueAnim.prototype.leaveComplete = function leaveComplete(key, elements) {
var _this7 = this;
if (this.keysAnimating.indexOf(key) < 0) {
return;
}
this.keysAnimating.splice(this.keysAnimating.indexOf(key), 1);
var childrenShow = this.state.childrenShow;
childrenShow[key] = false;
if (this.keysToLeave.indexOf(key) >= 0) {
this.keysToLeave.splice(this.keysToLeave.indexOf(key), 1);
}
var needLeave = this.keysToLeave.some(function (c) {
return childrenShow[c];
});
if (!needLeave) {
var currentChildren = (0, _utils.toArrayChildren)((0, _utils.getChildrenFromProps)(this.props));
this.setState({
children: currentChildren,
childrenShow: childrenShow
});
}
elements.forEach(function (elem) {
elem.className = elem.className.replace(_this7.props.animatingClassName[1], '').trim();
});
this.props.onEnd({ key: key, type: 'leave' });
};
QueueAnim.prototype.render = function render() {
var _this8 = this;
var childrenToRender = (0, _utils.toArrayChildren)(this.state.children).map(function (child) {
if (!child || !child.key) {
return child;
}
return _this8.state.childrenShow[child.key] ? (0, _react.cloneElement)(child, {
ref: child.key,
key: child.key
}) : (0, _react.createElement)('div', {
ref: placeholderKeyPrefix + child.key,
key: placeholderKeyPrefix + child.key
});
});
var tagProps = _objectWithoutProperties(this.props, []);
['component', 'interval', 'duration', 'delay', 'type', 'animConfig', 'ease', 'leaveReverse', 'animatingClassName', 'enterForcedRePlay', 'onEnd'].forEach(function (key) {
return delete tagProps[key];
});
return (0, _react.createElement)(this.props.component, _extends({}, tagProps), childrenToRender);
};
return QueueAnim;
}(_react2["default"].Component);
var numberOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.number, _react2["default"].PropTypes.array]);
var stringOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.string, _react2["default"].PropTypes.array]);
var objectOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.object, _react2["default"].PropTypes.array]);
var funcOrString = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.func, _react2["default"].PropTypes.string]);
var funcOrStringOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.func, stringOrArray]);
var funcOrObjectOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.func, objectOrArray]);
var funcOrNumberOrArray = _react2["default"].PropTypes.oneOfType([_react2["default"].PropTypes.func, numberOrArray]);
QueueAnim.propTypes = {
component: funcOrString,
interval: numberOrArray,
duration: funcOrNumberOrArray,
delay: funcOrNumberOrArray,
type: funcOrStringOrArray,
animConfig: funcOrObjectOrArray,
ease: funcOrStringOrArray,
leaveReverse: _react2["default"].PropTypes.bool,
enterForcedRePlay: _react2["default"].PropTypes.bool,
animatingClassName: _react2["default"].PropTypes.array,
onEnd: _react2["default"].PropTypes.func
};
QueueAnim.defaultProps = {
component: 'div',
interval: 100,
duration: 450,
delay: 0,
type: 'right',
animConfig: null,
ease: 'easeOutQuart',
leaveReverse: false,
enterForcedRePlay: false,
animatingClassName: ['queue-anim-entering', 'queue-anim-leaving'],
onEnd: noop
};
exports["default"] = QueueAnim;
module.exports = exports['default']; |
Falumpaset/handson-ml2 | backend/application/src/main/java/de/immomio/beans/landlord/application/ApplicationSearchBean.java | package de.immomio.beans.landlord.application;
import de.immomio.controller.paging.CustomPageable;
import de.immomio.data.base.type.application.ApplicationStatus;
import de.immomio.data.base.type.user.profile.PropertySearcherUserProfileType;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* @author <NAME>
*/
@Getter
@Setter
public class ApplicationSearchBean extends CustomPageable implements Serializable {
private static final long serialVersionUID = 2139647026440371395L;
private Long propertyId;
private List<ApplicationStatus> statuses = new ArrayList<>();
private Boolean wbs;
private Boolean processed;
private CustomQuestionFilterType customQuestionFilter;
private List<PropertySearcherUserProfileType> profileTypes = new ArrayList<>();
}
|
seguemodev/Meducated-Ninja | Android/app/src/main/java/com/seguetech/zippy/activities/MedicineActivity.java | package com.seguetech.zippy.activities;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.view.Menu;
import android.view.MenuItem;
import com.avast.android.dialogs.fragment.SimpleDialogFragment;
import com.avast.android.dialogs.iface.IPositiveButtonDialogListener;
import com.seguetech.zippy.R;
import com.seguetech.zippy.data.model.openfda.Result;
import com.seguetech.zippy.services.MedicineManagerService;
import timber.log.Timber;
public class MedicineActivity extends BaseActivity implements IPositiveButtonDialogListener {
String cabinet = null;
Result medicine = null;
private static final int DELETE_CODE = 4;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_medicine);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
}
if (getIntent() != null && getIntent().getExtras() != null) {
Bundle extras = getIntent().getExtras();
if (extras.containsKey("cabinet")) {
cabinet = extras.getString("cabinet","unknown cabinet");
}
if (extras.containsKey("medicine")) {
medicine = extras.getParcelable("medicine");
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (cabinet != null && medicine != null) {
getMenuInflater().inflate(R.menu.menu_medicine, menu);
return true;
}
return false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == android.R.id.home) {
finish();
return true;
}
if (id == R.id.delete_medicine && cabinet != null && medicine != null) {
SimpleDialogFragment.createBuilder(this,getSupportFragmentManager())
.setNegativeButtonText(android.R.string.cancel)
.setPositiveButtonText(android.R.string.ok).setRequestCode(DELETE_CODE).setTitle(R.string.confirm_delete_title).setMessage(String.format(getString(R.string.confirm_delete_message),cabinet)).show();
Timber.w("Deleting medication from cabinet: " + cabinet);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onPositiveButtonClicked(int i) {
if (i == DELETE_CODE) {
Intent deleteIntent = new Intent(this, MedicineManagerService.class);
deleteIntent.putExtra(MedicineManagerService.ACTION_KEY, MedicineManagerService.ACTION_DELETE_MEDICINE);
deleteIntent.putExtra(MedicineManagerService.CABINET_KEY, cabinet);
deleteIntent.putExtra(MedicineManagerService.MEDICINE_KEY, medicine);
startService(deleteIntent);
}
}
}
|
anishLearnsToCode/python-training-1 | solution-bank/pattern/solution_4.py | <reponame>anishLearnsToCode/python-training-1
rows = int(input())
for i in range(rows):
# spaces
print(' ' * i, end='')
# stars
print('* ' * (rows - i))
|
johandoornenbal/TurnToinez-Customer-System | fixture/src/main/java/domainapp/fixture/dom/tti/BestellingenFixtures.java | package domainapp.fixture.dom.tti;
import java.math.BigDecimal;
import javax.inject.Inject;
import domainapp.dom.bestellingen.Bestelling;
import domainapp.dom.bestellingen.Postuur;
import domainapp.dom.klanten.Klant;
import domainapp.dom.klanten.KlantRepository;
import domainapp.dom.medewerkers.Medewerker;
public class BestellingenFixtures extends BestellingAbstract {
@Override protected void execute(final ExecutionContext executionContext) {
Klant klantJohan = klantRepository.findUnique("<EMAIL>");
Bestelling bestellingJohan = createBestelling(
klantJohan,
"Lieuwkje",
"14 jaar",
"164",
"134 (vorige maand nog gemeten)",
Postuur.TENGER,
"<NAME>",
new BigDecimal("45.50"),
"moet lekker zitten",
"Broekje velours zwart",
new BigDecimal("10.00"),
null,
executionContext
);
bestellingJohan.besteld();
Klant klantMietje = klantRepository.findUnique("<EMAIL>");
Bestelling bestellingMietje = createBestelling(
klantMietje,
"Annechien",
"net 9",
"122",
"101 cm",
Postuur.STEVIG,
"P<NAME>",
new BigDecimal("35.40"),
"Moet niet te klein hoor",
"Broekje velours zwart",
new BigDecimal("10"),
"Graag de aanbieding",
executionContext
);
bestellingMietje.besteld();
bestellingMietje.betaald(false);
Bestelling legeBestelling = createBestelling(
null,
"Lisa",
"6",
"geen idee",
null,
null,
"Pakje Frozen Elsa",
new BigDecimal("65.00"),
null, "Wokkel", new BigDecimal("5.25"),null,
executionContext
);
Bestelling bestellingJohan2 = createBestelling(
klantJohan,
"Alberdientje",
"8 jaar",
"116",
"99 (vorige jaar dan)",
Postuur.STEVIG,
"<NAME>",
new BigDecimal("15.50"),
"G<NAME>",
"Broekje velours zwart",
new BigDecimal("10.00"),
null,
executionContext
);
bestellingJohan2.besteld();
bestellingJohan2.betaald(false);
bestellingJohan2.klaar(Medewerker.INEZ, null, null, null, null, null);
}
@Inject KlantRepository klantRepository;
}
|
seanwestfall/purescript_cowboy | client/output/Web.DOM.Node/index.js | // Generated by purs version 0.13.2
"use strict";
var $foreign = require("./foreign.js");
var Data_Enum = require("../Data.Enum/index.js");
var Data_Functor = require("../Data.Functor/index.js");
var Data_Maybe = require("../Data.Maybe/index.js");
var Data_Nullable = require("../Data.Nullable/index.js");
var Effect = require("../Effect/index.js");
var Unsafe_Coerce = require("../Unsafe.Coerce/index.js");
var Web_DOM_NodeType = require("../Web.DOM.NodeType/index.js");
var Web_Internal_FFI = require("../Web.Internal.FFI/index.js");
var toEventTarget = Unsafe_Coerce.unsafeCoerce;
var previousSibling = (function () {
var $1 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($2) {
return $1($foreign["_previousSibling"]($2));
};
})();
var parentNode = (function () {
var $3 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($4) {
return $3($foreign["_parentNode"]($4));
};
})();
var parentElement = (function () {
var $5 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($6) {
return $5($foreign["_parentElement"]($6));
};
})();
var ownerDocument = (function () {
var $7 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($8) {
return $7($foreign["_ownerDocument"]($8));
};
})();
var nodeValue = (function () {
var $9 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($10) {
return $9($foreign["_nodeValue"]($10));
};
})();
var nodeType = function (dictPartial) {
var $11 = Data_Maybe.fromJust(dictPartial);
var $12 = Data_Enum.toEnum(Web_DOM_NodeType.boundedEnumNodeType);
return function ($13) {
return $11($12($foreign.nodeTypeIndex($13)));
};
};
var nextSibling = (function () {
var $14 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($15) {
return $14($foreign["_nextSibling"]($15));
};
})();
var lookupPrefix = function (p) {
var $16 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
var $17 = $foreign["_lookupPrefix"](p);
return function ($18) {
return $16($17($18));
};
};
var lookupNamespaceURI = function (ns) {
var $19 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
var $20 = $foreign["_lookupNamespaceURI"](ns);
return function ($21) {
return $19($20($21));
};
};
var lastChild = (function () {
var $22 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($23) {
return $22($foreign["_lastChild"]($23));
};
})();
var fromEventTarget = Web_Internal_FFI.unsafeReadProtoTagged("Node");
var firstChild = (function () {
var $24 = Data_Functor.map(Effect.functorEffect)(Data_Nullable.toMaybe);
return function ($25) {
return $24($foreign["_firstChild"]($25));
};
})();
module.exports = {
fromEventTarget: fromEventTarget,
toEventTarget: toEventTarget,
nodeType: nodeType,
ownerDocument: ownerDocument,
parentNode: parentNode,
parentElement: parentElement,
firstChild: firstChild,
lastChild: lastChild,
previousSibling: previousSibling,
nextSibling: nextSibling,
nodeValue: nodeValue,
lookupPrefix: lookupPrefix,
lookupNamespaceURI: lookupNamespaceURI,
nodeTypeIndex: $foreign.nodeTypeIndex,
nodeName: $foreign.nodeName,
baseURI: $foreign.baseURI,
hasChildNodes: $foreign.hasChildNodes,
childNodes: $foreign.childNodes,
setNodeValue: $foreign.setNodeValue,
textContent: $foreign.textContent,
setTextContent: $foreign.setTextContent,
normalize: $foreign.normalize,
clone: $foreign.clone,
deepClone: $foreign.deepClone,
isEqualNode: $foreign.isEqualNode,
compareDocumentPositionBits: $foreign.compareDocumentPositionBits,
contains: $foreign.contains,
isDefaultNamespace: $foreign.isDefaultNamespace,
insertBefore: $foreign.insertBefore,
appendChild: $foreign.appendChild,
replaceChild: $foreign.replaceChild,
removeChild: $foreign.removeChild
};
|
alphya/nyaruga_util | doc/html/structnyaruga_1_1util_1_1get__function__argument__type_3_01_ret_07_c_1_1_5_08_07_args_8_8_8_08_00_01_n_01_4.js | var structnyaruga_1_1util_1_1get__function__argument__type_3_01_ret_07_c_1_1_5_08_07_args_8_8_8_08_00_01_n_01_4 =
[
[ "type", "structnyaruga_1_1util_1_1get__function__argument__type_3_01_ret_07_c_1_1_5_08_07_args_8_8_8_08_00_01_n_01_4.html#ade9f10e17cbe5247e65aec0f6910aa43", null ]
]; |
GoldyMark/EChartSummoner | src/cn/nh121/echarts/series/markline/EMarkLineType.java | <gh_stars>1-10
package cn.nh121.echarts.series.markline;
public enum EMarkLineType
{
MIN, MAX, AVERAGE;
}
|
TheStanfordDaily/loris-archives | loris/loris/wellcome_loris.py | <filename>loris/loris/wellcome_loris.py
# -*- encoding: utf-8 -*-
"""This file contains the code specific to the Wellcome Loris deployment.
Code in this file will usually be highly specialised, and is unlikely to
be of interest to other users -- anything generic should be submitted as
an upstream patch.
"""
from loris.resolver import TemplateHTTPResolver
from requests.exceptions import RequestException
from tenacity import retry, retry_if_exception_type, stop_after_attempt
class WellcomeTemplateHTTPResolver(TemplateHTTPResolver):
# We currently store the old Miro images in an S3 bucket, and request
# them over HTTP. Occasionally we've seen the HTTP connections flake out,
# which causes a user-facing 500 -- and reloading the page fixes it.
#
# This causes us to retry the request once, if it's some sort of
# HTTP error. We've been running this in prod for months, and that sort
# of 500 essentially vanished.
@retry(stop=stop_after_attempt(2), retry=retry_if_exception_type(RequestException))
def copy_to_cache(self, ident):
return super().copy_to_cache(ident)
|
radzikpwnz/twc | source/twc_design_old/mainwnd.h | #ifndef MAINWND_H
#define MAINWND_H
#include <windows.h>
LRESULT CALLBACK MainWndProc( HWND, UINT, WPARAM, LPARAM);
void actGenerateCode();
void actNewProject();
void actOpenProject();
void actSaveProject();
void actPreview();
#endif |
tommasodotNET/awesome-patterns | workerpool/main.go | package main
import (
"fmt"
"sync"
"time"
"github.com/labstack/gommon/log"
)
// Task encapsulates a work item that should go in a work
// pool.
type Task struct {
// Err holds an error that occurred during a task. Its
// result is only meaningful after Run has been called
// for the pool that holds it.
Err error
f func() error
}
// NewTask initializes a new task based on a given work
// function.
func NewTask(f func() error) *Task {
return &Task{f: f}
}
// Run runs a Task and does appropriate accounting via a
// given sync.WorkGroup.
func (t *Task) Run(wg *sync.WaitGroup) {
t.Err = t.f()
wg.Done()
}
type Pool struct {
Tasks []*Task
concurrency int
tasksChan chan *Task
wg sync.WaitGroup
}
func NewPool(tasks []*Task, concurrency int) *Pool {
return &Pool{
Tasks: tasks,
concurrency: concurrency,
tasksChan: make(chan *Task),
}
}
// Run runs all work within the pool and blocks until it's
// finished.
func (p *Pool) Run() {
for i := 0; i < p.concurrency; i++ {
go p.work()
}
p.wg.Add(len(p.Tasks))
for _, task := range p.Tasks {
p.tasksChan <- task
}
// all workers return
close(p.tasksChan)
p.wg.Wait()
}
// The work loop for any single goroutine.
func (p *Pool) work() {
for task := range p.tasksChan {
task.Run(&p.wg)
}
}
func main() {
f1 := func() error {
fmt.Println("F1 Ran")
time.Sleep(1 * time.Second)
fmt.Println("F1 finished")
return nil
}
f2 := func() error {
fmt.Println("F2 Ran")
time.Sleep(2 * time.Second)
fmt.Println("F2 finished")
return nil
}
f3 := func() error {
fmt.Println("F3 Ran")
time.Sleep(1 * time.Second)
fmt.Println("F3 finished")
return nil
}
f4 := func() error {
fmt.Println("F4 Ran")
time.Sleep(2 * time.Second)
fmt.Println("F4 finished")
return nil
}
tasks := []*Task{
NewTask(f1),
NewTask(f2),
NewTask(f3),
NewTask(f4),
}
p := NewPool(tasks, 4)
p.Run()
var numErrors int
for _, task := range p.Tasks {
if task.Err != nil {
log.Error(task.Err)
numErrors++
}
if numErrors >= 10 {
log.Error("Too many errors.")
break
}
}
}
|
bk1411389/autopsy | ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java | /*
* Autopsy Forensic Browser
*
* Copyright 2013-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.imagegallery.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
import javax.swing.SortOrder;
import static org.apache.commons.lang3.ObjectUtils.notEqual;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.DhsImageCategory;
import org.sleuthkit.autopsy.imagegallery.FileTypeUtils;
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
import org.sleuthkit.autopsy.imagegallery.ImageGalleryModule;
import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupKey;
import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupManager;
import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupSortBy;
import static org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupSortBy.GROUP_BY_VALUE;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbAccessQueryCallback;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.DbType;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.VersionNumber;
import org.sqlite.SQLiteJDBCLoader;
/**
* Provides access to the drawables database and selected tables in the case
* database.
*/
public final class DrawableDB {
private static final Logger logger = Logger.getLogger(DrawableDB.class.getName());
//column name constants//////////////////////
private static final String ANALYZED = "analyzed"; //NON-NLS
private static final String OBJ_ID = "obj_id"; //NON-NLS
private static final String HASH_SET_NAME = "hash_set_name"; //NON-NLS
private static final String GROUPS_TABLENAME = "image_gallery_groups"; //NON-NLS
private static final String GROUPS_SEEN_TABLENAME = "image_gallery_groups_seen"; //NON-NLS
private static final String IG_DB_INFO_TABLE = "image_gallery_db_info";
private static final String IG_SCHEMA_MAJOR_VERSION_KEY = "IG_SCHEMA_MAJOR_VERSION";
private static final String IG_SCHEMA_MINOR_VERSION_KEY = "IG_SCHEMA_MINOR_VERSION";
private static final String IG_CREATION_SCHEMA_MAJOR_VERSION_KEY = "IG_CREATION_SCHEMA_MAJOR_VERSION";
private static final String IG_CREATION_SCHEMA_MINOR_VERSION_KEY = "IG_CREATION_SCHEMA_MINOR_VERSION";
private static final VersionNumber IG_STARTING_SCHEMA_VERSION = new VersionNumber(1, 0, 0); // IG Schema Starting version
private static final VersionNumber IG_SCHEMA_VERSION = new VersionNumber(1, 1, 0); // IG Schema Current version
private PreparedStatement insertHashSetStmt;
private List<PreparedStatement> preparedStatements = new ArrayList<>();
private PreparedStatement removeFileStmt;
private PreparedStatement selectHashSetStmt;
private PreparedStatement selectHashSetNamesStmt;
private PreparedStatement insertHashHitStmt;
private PreparedStatement removeHashHitStmt;
private PreparedStatement updateDataSourceStmt;
private PreparedStatement updateFileStmt;
private PreparedStatement insertFileStmt;
private PreparedStatement pathGroupStmt;
private PreparedStatement nameGroupStmt;
private PreparedStatement created_timeGroupStmt;
private PreparedStatement modified_timeGroupStmt;
private PreparedStatement makeGroupStmt;
private PreparedStatement modelGroupStmt;
private PreparedStatement analyzedGroupStmt;
private PreparedStatement hashSetGroupStmt;
private PreparedStatement pathGroupFilterByDataSrcStmt;
/**
* map from {@link DrawableAttribute} to the {@link PreparedStatement} that
* is used to select groups for that attribute
*/
private final Map<DrawableAttribute<?>, PreparedStatement> groupStatementMap = new HashMap<>();
private final Map<DrawableAttribute<?>, PreparedStatement> groupStatementFilterByDataSrcMap = new HashMap<>();
private final GroupManager groupManager;
private final Path dbPath;
@GuardedBy("DBLock")
private Connection con;
private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy
private final Lock DBLock = rwLock.writeLock(); // Currently serializing everything with one database connection
// caches to make inserts / updates faster
private Cache<String, Boolean> groupCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build();
private final Cache<GroupKey<?>, Boolean> groupSeenCache = CacheBuilder.newBuilder().expireAfterWrite(30, TimeUnit.SECONDS).build();
private final Object cacheLock = new Object(); // protects access to the below cache-related objects
private boolean areCachesLoaded = false; // if true, the below caches contain valid data
private Set<Long> hasTagCache = new HashSet<>(); // contains obj id of files with tags
private Set<Long> hasHashCache = new HashSet<>(); // obj id of files with hash set hits
private Set<Long> hasExifCache = new HashSet<>(); // obj id of files with EXIF (make/model)
private int cacheBuildCount = 0; // number of tasks taht requested the caches be built
static {//make sure sqlite driver is loaded // possibly redundant
try {
Class.forName("org.sqlite.JDBC");
} catch (ClassNotFoundException ex) {
logger.log(Level.SEVERE, "Failed to load sqlite JDBC driver", ex); //NON-NLS
}
}
private final SleuthkitCase tskCase;
private final ImageGalleryController controller;
/**
* Enum to track Image gallery db rebuild status for a data source
*
* DO NOT add in the middle.
*/
public enum DrawableDbBuildStatusEnum {
UNKNOWN, /// no known status - not yet analyzed
IN_PROGRESS, /// ingest or db rebuild is in progress
COMPLETE, /// At least one file in the data source had a MIME type. Ingest filters may have been applied.
REBUILT_STALE; /// data source was rebuilt, but MIME types were missing during rebuild
}
private void dbWriteLock() {
DBLock.lock();
}
private void dbWriteUnlock() {
DBLock.unlock();
}
/**
* Constructs an object that provides access to the drawables database and
* selected tables in the case database. If the specified drawables database
* does not already exist, it is created.
*
* @param dbPath The path to the drawables database file.
* @param controller The controller for the IMage Gallery tool.
*
* @throws IOException The database directory could not be created.
* @throws SQLException The drawables database could not be created or
* opened.
* @throws TskCoreException The drawables database or the case database
* could not be correctly initialized for Image
* Gallery use.
*/
private DrawableDB(Path dbPath, ImageGalleryController controller) throws IOException, SQLException, TskCoreException {
this.dbPath = dbPath;
this.controller = controller;
tskCase = this.controller.getCaseDatabase();
groupManager = this.controller.getGroupManager();
Files.createDirectories(this.dbPath.getParent());
dbWriteLock();
try {
con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS
if (!initializeDBSchema() || !upgradeDBSchema() || !prepareStatements() || !initializeStandardGroups() || !initializeImageList()) {
close();
throw new TskCoreException("Failed to initialize drawables database for Image Gallery use"); //NON-NLS
}
} finally {
dbWriteUnlock();
}
}
private boolean prepareStatements() {
try {
updateFileStmt = prepareStatement(
"INSERT OR REPLACE INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed) " //NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?)"); //NON-NLS
insertFileStmt = prepareStatement(
"INSERT OR IGNORE INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed) " //NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?)"); //NON-NLS
updateDataSourceStmt = prepareStatement(
"INSERT OR REPLACE INTO datasources (ds_obj_id, drawable_db_build_status) " //NON-NLS
+ " VALUES (?,?)"); //NON-NLS
removeFileStmt = prepareStatement("DELETE FROM drawable_files WHERE obj_id = ?"); //NON-NLS
pathGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE path = ? ", DrawableAttribute.PATH); //NON-NLS
nameGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE name = ? ", DrawableAttribute.NAME); //NON-NLS
created_timeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE created_time = ? ", DrawableAttribute.CREATED_TIME); //NON-NLS
modified_timeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE modified_time = ? ", DrawableAttribute.MODIFIED_TIME); //NON-NLS
makeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE make = ? ", DrawableAttribute.MAKE); //NON-NLS
modelGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE model = ? ", DrawableAttribute.MODEL); //NON-NLS
analyzedGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE analyzed = ?", DrawableAttribute.ANALYZED); //NON-NLS
hashSetGroupStmt = prepareStatement("SELECT drawable_files.obj_id AS obj_id, analyzed FROM drawable_files , hash_sets , hash_set_hits WHERE drawable_files.obj_id = hash_set_hits.obj_id AND hash_sets.hash_set_id = hash_set_hits.hash_set_id AND hash_sets.hash_set_name = ?", DrawableAttribute.HASHSET); //NON-NLS
pathGroupFilterByDataSrcStmt = prepareFilterByDataSrcStatement("SELECT obj_id , analyzed FROM drawable_files WHERE path = ? AND data_source_obj_id = ?", DrawableAttribute.PATH);
selectHashSetNamesStmt = prepareStatement("SELECT DISTINCT hash_set_name FROM hash_sets"); //NON-NLS
insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) VALUES (?)"); //NON-NLS
selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"); //NON-NLS
insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, obj_id) VALUES (?,?)"); //NON-NLS
removeHashHitStmt = prepareStatement("DELETE FROM hash_set_hits WHERE obj_id = ?"); //NON-NLS
return true;
} catch (TskCoreException | SQLException ex) {
logger.log(Level.SEVERE, "Failed to prepare all statements", ex); //NON-NLS
return false;
}
}
private boolean initializeStandardGroups() {
CaseDbTransaction caseDbTransaction = null;
try {
caseDbTransaction = tskCase.beginTransaction();
for (DhsImageCategory cat : DhsImageCategory.values()) {
insertGroup(cat.getDisplayName(), DrawableAttribute.CATEGORY, caseDbTransaction);
}
caseDbTransaction.commit();
return true;
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to insert standard groups", ex); //NON-NLS
if (null != caseDbTransaction) {
try {
caseDbTransaction.rollback();
} catch (TskCoreException ex2) {
logger.log(Level.SEVERE, "Failed to roll back case DB transaction", ex2);
}
}
return false;
}
}
/**
* create PreparedStatement with the supplied string, and add the new
* statement to the list of PreparedStatements used in {@link DrawableDB#closeStatements()
*
* @param stmtString the string representation of the sqlite statement to
* prepare
*
* @return the prepared statement
*
* @throws SQLException if unable to prepare the statement
*/
private PreparedStatement prepareStatement(String stmtString) throws TskCoreException, SQLException {
dbWriteLock();
try {
if (isClosed()) {
throw new TskCoreException("The drawables database is closed");
}
PreparedStatement statement = con.prepareStatement(stmtString);
preparedStatements.add(statement);
return statement;
} catch (SQLException ex) {
throw new SQLException(String.format("Error preparing statement %s", stmtString, ex));
} finally {
dbWriteUnlock();
}
}
/**
* calls {@link DrawableDB#prepareStatement(java.lang.String) ,
* and then add the statement to the groupStatmentMap used to lookup
* statements by the attribute/column they group on
*
* @param stmtString the string representation of the sqlite statement to
* prepare
* @param attr the {@link DrawableAttribute} this query groups by
*
* @return the prepared statement
*
* @throws SQLExceptionif unable to prepare the statement
*/
private PreparedStatement prepareStatement(String stmtString, DrawableAttribute<?> attr) throws TskCoreException, SQLException {
PreparedStatement statement = prepareStatement(stmtString);
if (attr != null) {
groupStatementMap.put(attr, statement);
}
return statement;
}
/**
* calls {@link DrawableDB#prepareStatement(java.lang.String) ,
* and then add the statement to the groupStatementFilterByDataSrcMap map used to lookup
* statements by the attribute/column they group on
*
* @param stmtString the string representation of the sqlite statement to
* prepare
* @param attr the {@link DrawableAttribute} this query groups by
* *
* @return the prepared statement
*
* @throws SQLExceptionif unable to prepare the statement
*/
private PreparedStatement prepareFilterByDataSrcStatement(String stmtString, DrawableAttribute<?> attr) throws TskCoreException, SQLException {
PreparedStatement statement = prepareStatement(stmtString);
if (attr != null) {
groupStatementFilterByDataSrcMap.put(attr, statement);
}
return statement;
}
private void setQueryParams(PreparedStatement statement, GroupKey<?> groupKey) throws SQLException {
statement.setObject(1, groupKey.getValue());
if (groupKey.getDataSource().isPresent()
&& (groupKey.getAttribute() == DrawableAttribute.PATH)) {
statement.setObject(2, groupKey.getDataSourceObjId());
}
}
/**
* Public factory method. Creates and opens a connection to a new database *
* at the given path. If there is already a db at the path, it is checked
* for compatibility, and deleted if it is incompatible, before a connection
* is opened.
*
* @param controller
*
* @return A DrawableDB for the given controller.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public static DrawableDB getDrawableDB(ImageGalleryController controller) throws TskCoreException {
Path dbPath = ImageGalleryModule.getModuleOutputDir(controller.getCase()).resolve("drawable.db");
try {
deleteDatabaseIfOlderVersion(dbPath);
} catch (SQLException ex) {
throw new TskCoreException("Failed to check for obsolete drawables database schema", ex); //NON-NLS
} catch (IOException ex) {
throw new TskCoreException("Failed to delete obsolete drawables database", ex); //NON-NLS
}
try {
return new DrawableDB(dbPath, controller);
} catch (IOException ex) {
throw new TskCoreException("Failed to create drawables database directory", ex); //NON-NLS
} catch (SQLException ex) {
throw new TskCoreException("Failed to create/open the drawables database", ex); //NON-NLS
}
}
/**
* Checks if the specified table exists in Drawable DB
*
* @param tableName table to check
* @return true if the table exists in the database
*
* @throws SQLException
*/
private boolean doesTableExist(String tableName) throws SQLException {
ResultSet tableQueryResults = null;
boolean tableExists = false;
try (Statement stmt = con.createStatement()) {
tableQueryResults = stmt.executeQuery("SELECT name FROM sqlite_master WHERE type='table'"); //NON-NLS
while (tableQueryResults.next()) {
if (tableQueryResults.getString("name").equalsIgnoreCase(tableName)) {
tableExists = true;
break;
}
}
}
finally {
if (tableQueryResults != null) {
tableQueryResults.close();
}
}
return tableExists;
}
private static void deleteDatabaseIfOlderVersion(Path dbPath) throws SQLException, IOException {
if (Files.exists(dbPath)) {
boolean hasDrawableFilesTable = false;
boolean hasDataSourceIdColumn = false;
try (Connection con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString())) {
Statement stmt = con.createStatement();
try (ResultSet tableQueryResults = stmt.executeQuery("SELECT name FROM sqlite_master WHERE type='table'")) { //NON-NLS
while (tableQueryResults.next()) {
if ("drawable_files".equals(tableQueryResults.getString("name"))) {
hasDrawableFilesTable = true;
break;
}
}
}
if (hasDrawableFilesTable) {
try (ResultSet results = stmt.executeQuery("PRAGMA table_info('drawable_files')")) {
while (results.next()) {
if ("data_source_obj_id".equals(results.getString("name"))) {
hasDataSourceIdColumn = true;
break;
}
}
}
}
}
if (!hasDrawableFilesTable || !hasDataSourceIdColumn) {
Files.delete(dbPath);
}
}
}
private void setPragmas() throws SQLException {
dbWriteLock();
try {
if (isClosed()) {
throw new SQLException("The drawables database is closed");
}
//this should match Sleuthkit db setupt
try (Statement statement = con.createStatement()) {
//reduce i/o operations, we have no OS crash recovery anyway
statement.execute("PRAGMA synchronous = OFF;"); //NON-NLS
//allow to query while in transaction - no need read locks
statement.execute("PRAGMA read_uncommitted = True;"); //NON-NLS
//TODO: do we need this?
statement.execute("PRAGMA foreign_keys = ON"); //NON-NLS
//TODO: test this
statement.execute("PRAGMA journal_mode = MEMORY"); //NON-NLS
//we don't use this feature, so turn it off for minimal speed up on queries
//this is deprecated and not recomended
statement.execute("PRAGMA count_changes = OFF;"); //NON-NLS
//this made a big difference to query speed
statement.execute("PRAGMA temp_store = MEMORY"); //NON-NLS
//this made a modest improvement in query speeds
statement.execute("PRAGMA cache_size = 50000"); //NON-NLS
//we never delete anything so...
statement.execute("PRAGMA auto_vacuum = 0"); //NON-NLS
}
try {
logger.log(Level.INFO, String.format("sqlite-jdbc version %s loaded in %s mode", //NON-NLS
SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode()
? "native" : "pure-java")); //NON-NLS
} catch (Exception exception) {
logger.log(Level.SEVERE, "exception while checking sqlite-jdbc version and mode", exception); //NON-NLS
}
} finally {
dbWriteUnlock();
}
}
/**
* create the table and indices if they don't already exist
*
* @return the number of rows in the table , count > 0 indicating an
* existing table
*/
private boolean initializeDBSchema() {
dbWriteLock();
try {
boolean drawableDbTablesExist = true;
if (isClosed()) {
logger.log(Level.SEVERE, "The drawables database is closed"); //NON-NLS
return false;
}
try {
setPragmas();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to set pragmas", ex); //NON-NLS
return false;
}
/*
* Create tables in the drawables database.
*/
try (Statement stmt = con.createStatement()) {
// Check if the database is new or an existing database
drawableDbTablesExist = doesTableExist("drawable_files");
if (false == doesTableExist(IG_DB_INFO_TABLE)) {
try {
VersionNumber ig_creation_schema_version = drawableDbTablesExist
? IG_STARTING_SCHEMA_VERSION
: IG_SCHEMA_VERSION;
stmt.execute("CREATE TABLE IF NOT EXISTS " + IG_DB_INFO_TABLE + " (name TEXT PRIMARY KEY, value TEXT NOT NULL)");
// backfill creation schema ver
stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_CREATION_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor() ));
stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_CREATION_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor() ));
// set current schema ver: at DB initialization - current version is same as starting version
stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor() ));
stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor() ));
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create ig_db_info table", ex); //NON-NLS
return false;
}
}
try {
String sql = "CREATE TABLE IF NOT EXISTS datasources " //NON-NLS
+ "( id INTEGER PRIMARY KEY, " //NON-NLS
+ " ds_obj_id BIGINT UNIQUE NOT NULL, "
+ " drawable_db_build_status VARCHAR(128) )"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create datasources table", ex); //NON-NLS
return false;
}
try {
String sql = "CREATE TABLE if not exists drawable_files " //NON-NLS
+ "( obj_id BIGINT PRIMARY KEY, " //NON-NLS
+ " data_source_obj_id BIGINT NOT NULL, "
+ " path TEXT, " //NON-NLS
+ " name TEXT, " //NON-NLS
+ " created_time integer, " //NON-NLS
+ " modified_time integer, " //NON-NLS
+ " make TEXT DEFAULT NULL, " //NON-NLS
+ " model TEXT DEFAULT NULL, " //NON-NLS
+ " analyzed integer DEFAULT 0)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create drawable_files table", ex); //NON-NLS
return false;
}
try {
String sql = "CREATE TABLE if not exists hash_sets " //NON-NLS
+ "( hash_set_id INTEGER primary key," //NON-NLS
+ " hash_set_name TEXT UNIQUE NOT NULL)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create hash_sets table", ex); //NON-NLS
return false;
}
try {
String sql = "CREATE TABLE if not exists hash_set_hits " //NON-NLS
+ "(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, " //NON-NLS
+ " obj_id BIGINT REFERENCES drawable_files(obj_id) not null, " //NON-NLS
+ " PRIMARY KEY (hash_set_id, obj_id))"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create hash_set_hits table", ex); //NON-NLS
return false;
}
try {
String sql = "CREATE INDEX if not exists path_idx ON drawable_files(path)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to create path_idx", ex); //NON-NLS
}
try {
String sql = "CREATE INDEX if not exists name_idx ON drawable_files(name)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to create name_idx", ex); //NON-NLS
}
try {
String sql = "CREATE INDEX if not exists make_idx ON drawable_files(make)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to create make_idx", ex); //NON-NLS
}
try {
String sql = "CREATE INDEX if not exists model_idx ON drawable_files(model)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to create model_idx", ex); //NON-NLS
}
try {
String sql = "CREATE INDEX if not exists analyzed_idx ON drawable_files(analyzed)"; //NON-NLS
stmt.execute(sql);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to create analyzed_idx", ex); //NON-NLS
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to create statement", ex); //NON-NLS
return false;
}
/*
* Create tables in the case database.
*/
String autogenKeyType = (DbType.POSTGRESQL == tskCase.getDatabaseType()) ? "BIGSERIAL" : "INTEGER";
try {
boolean caseDbTablesExist = tskCase.getCaseDbAccessManager().tableExists(GROUPS_TABLENAME);
VersionNumber ig_creation_schema_version = caseDbTablesExist
? IG_STARTING_SCHEMA_VERSION
: IG_SCHEMA_VERSION;
String tableSchema = "( id " + autogenKeyType + " PRIMARY KEY, "
+ " name TEXT UNIQUE NOT NULL,"
+ " value TEXT NOT NULL )";
tskCase.getCaseDbAccessManager().createTable(IG_DB_INFO_TABLE, tableSchema);
// backfill creation version
String creationMajorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_CREATION_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor());
String creationMinorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_CREATION_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor());
// set current version - at the onset, current version is same as creation version
String currentMajorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor());
String currentMinorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor());
if (DbType.POSTGRESQL == tskCase.getDatabaseType()) {
creationMajorVerSQL += " ON CONFLICT DO NOTHING ";
creationMinorVerSQL += " ON CONFLICT DO NOTHING ";
currentMajorVerSQL += " ON CONFLICT DO NOTHING ";
currentMinorVerSQL += " ON CONFLICT DO NOTHING ";
}
tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, creationMajorVerSQL);
tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, creationMinorVerSQL);
tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, currentMajorVerSQL);
tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, currentMinorVerSQL);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to create ig_db_info table in Case database", ex); //NON-NLS
return false;
}
try {
String tableSchema
= "( group_id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS
+ " data_source_obj_id BIGINT DEFAULT 0, "
+ " value TEXT not null, " //NON-NLS
+ " attribute TEXT not null, " //NON-NLS
+ " is_analyzed integer DEFAULT 0, "
+ " UNIQUE(data_source_obj_id, value, attribute) )"; //NON-NLS
tskCase.getCaseDbAccessManager().createTable(GROUPS_TABLENAME, tableSchema);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create %s table in case database", GROUPS_TABLENAME), ex); //NON-NLS
return false;
}
try {
String tableSchema
= "( id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS
+ " group_id integer not null, " //NON-NLS
+ " examiner_id integer not null, " //NON-NLS
+ " seen integer DEFAULT 0, " //NON-NLS
+ " UNIQUE(group_id, examiner_id),"
+ " FOREIGN KEY(group_id) REFERENCES " + GROUPS_TABLENAME + "(group_id),"
+ " FOREIGN KEY(examiner_id) REFERENCES tsk_examiners(examiner_id)"
+ " )"; //NON-NLS
tskCase.getCaseDbAccessManager().createTable(GROUPS_SEEN_TABLENAME, tableSchema);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create %s table in case database", GROUPS_SEEN_TABLENAME), ex); //NON-NLS
return false;
}
return true;
} finally {
dbWriteUnlock();
}
}
/**
* Gets the Schema version from DrawableDB
*
* @return image gallery schema version in DrawableDB
* @throws SQLException
* @throws TskCoreException
*/
private VersionNumber getDrawableDbIgSchemaVersion() throws SQLException, TskCoreException {
Statement statement = con.createStatement();
ResultSet resultSet = null;
try {
int majorVersion = -1;
String majorVersionStr = null;
resultSet = statement.executeQuery(String.format("SELECT value FROM %s WHERE name='%s'", IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY));
if (resultSet.next()) {
majorVersionStr = resultSet.getString("value");
try {
majorVersion = Integer.parseInt(majorVersionStr);
} catch (NumberFormatException ex) {
throw new TskCoreException("Bad value for schema major version = " + majorVersionStr, ex);
}
} else {
throw new TskCoreException("Failed to read schema major version from ig_db_info table");
}
int minorVersion = -1;
String minorVersionStr = null;
resultSet = statement.executeQuery(String.format("SELECT value FROM %s WHERE name='%s'", IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY));
if (resultSet.next()) {
minorVersionStr = resultSet.getString("value");
try {
minorVersion = Integer.parseInt(minorVersionStr);
} catch (NumberFormatException ex) {
throw new TskCoreException("Bad value for schema minor version = " + minorVersionStr, ex);
}
} else {
throw new TskCoreException("Failed to read schema minor version from ig_db_info table");
}
return new VersionNumber(majorVersion, minorVersion, 0 );
}
finally {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
}
}
/**
* Gets the ImageGallery schema version from CaseDB
*
* @return image gallery schema version in CaseDB
* @throws SQLException
* @throws TskCoreException
*/
private VersionNumber getCaseDbIgSchemaVersion() throws TskCoreException {
// Callback to process result of get version query
class GetSchemaVersionQueryResultProcessor implements CaseDbAccessQueryCallback {
private int version = -1;
int getVersion() {
return version;
}
@Override
public void process(ResultSet resultSet) {
try {
if (resultSet.next()) {
String versionStr = resultSet.getString("value");
try {
version = Integer.parseInt(versionStr);
} catch (NumberFormatException ex) {
logger.log(Level.SEVERE, "Bad value for version = " + versionStr, ex);
}
} else {
logger.log(Level.SEVERE, "Failed to get version");
}
}
catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to get version", ex); //NON-NLS
}
}
}
GetSchemaVersionQueryResultProcessor majorVersionResultProcessor = new GetSchemaVersionQueryResultProcessor();
GetSchemaVersionQueryResultProcessor minorVersionResultProcessor = new GetSchemaVersionQueryResultProcessor();
String versionQueryTemplate = "value FROM %s WHERE name = \'%s\' ";
tskCase.getCaseDbAccessManager().select(String.format(versionQueryTemplate, IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY), majorVersionResultProcessor);
tskCase.getCaseDbAccessManager().select(String.format(versionQueryTemplate, IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY), minorVersionResultProcessor);
return new VersionNumber(majorVersionResultProcessor.getVersion(), minorVersionResultProcessor.getVersion(), 0);
}
/**
* Updates the IG schema version in the Drawable DB
*
* @param version new version number
* @param transaction transaction under which the update happens
*
* @throws SQLException
*/
private void updateDrawableDbIgSchemaVersion(VersionNumber version, DrawableTransaction transaction) throws SQLException, TskCoreException {
if (transaction == null) {
throw new TskCoreException("Schema version update must be done in a transaction");
}
dbWriteLock();
try {
Statement statement = con.createStatement();
// update schema version
statement.execute(String.format("UPDATE %s SET value = '%s' WHERE name = '%s'", IG_DB_INFO_TABLE, version.getMajor(), IG_SCHEMA_MAJOR_VERSION_KEY ));
statement.execute(String.format("UPDATE %s SET value = '%s' WHERE name = '%s'", IG_DB_INFO_TABLE, version.getMinor(), IG_SCHEMA_MINOR_VERSION_KEY ));
statement.close();
}
finally {
dbWriteUnlock();
}
}
/**
* Updates the IG schema version in CaseDB
*
* @param version new version number
* @param caseDbTransaction transaction to use to update the CaseDB
*
* @throws SQLException
*/
private void updateCaseDbIgSchemaVersion(VersionNumber version, CaseDbTransaction caseDbTransaction) throws TskCoreException {
String updateSQLTemplate = " SET value = %s WHERE name = '%s' ";
tskCase.getCaseDbAccessManager().update(IG_DB_INFO_TABLE, String.format(updateSQLTemplate, version.getMajor(), IG_SCHEMA_MAJOR_VERSION_KEY), caseDbTransaction);
tskCase.getCaseDbAccessManager().update(IG_DB_INFO_TABLE, String.format(updateSQLTemplate, version.getMinor(), IG_SCHEMA_MINOR_VERSION_KEY), caseDbTransaction);
}
/**
* Upgrades the DB schema.
*
* @return true if the upgrade is successful
*
* @throws SQLException
*
*/
private boolean upgradeDBSchema() throws TskCoreException, SQLException {
// Read current version from the DBs
VersionNumber drawableDbIgSchemaVersion = getDrawableDbIgSchemaVersion();
VersionNumber caseDbIgSchemaVersion = getCaseDbIgSchemaVersion();
// Upgrade Schema in both DrawableDB and CaseDB
CaseDbTransaction caseDbTransaction = tskCase.beginTransaction();
DrawableTransaction transaction = beginTransaction();
try {
caseDbIgSchemaVersion = upgradeCaseDbIgSchema1dot0TO1dot1(caseDbIgSchemaVersion, caseDbTransaction);
drawableDbIgSchemaVersion = upgradeDrawableDbIgSchema1dot0TO1dot1(drawableDbIgSchemaVersion, transaction);
// update the versions in the tables
updateCaseDbIgSchemaVersion(caseDbIgSchemaVersion, caseDbTransaction );
updateDrawableDbIgSchemaVersion(drawableDbIgSchemaVersion, transaction);
caseDbTransaction.commit();
caseDbTransaction = null;
commitTransaction(transaction, false);
transaction = null;
}
catch (TskCoreException | SQLException ex) {
if (null != caseDbTransaction) {
try {
caseDbTransaction.rollback();
} catch (TskCoreException ex2) {
logger.log(Level.SEVERE, String.format("Failed to roll back case db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
}
}
if (null != transaction) {
try {
rollbackTransaction(transaction);
} catch (SQLException ex2) {
logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
}
}
throw ex;
}
return true;
}
/**
* Upgrades IG tables in CaseDB from 1.0 to 1.1
* Does nothing if the incoming version is not 1.0
*
* @param currVersion version to upgrade from
* @param caseDbTransaction transaction to use for all updates
*
* @return new version number
* @throws TskCoreException
*/
private VersionNumber upgradeCaseDbIgSchema1dot0TO1dot1(VersionNumber currVersion, CaseDbTransaction caseDbTransaction ) throws TskCoreException {
// Upgrade if current version is 1.0
// or 1.1 - a bug in versioning alllowed some databases to be versioned as 1.1 without the actual corresponding upgrade. This allows such databases to be fixed, if needed.
if (!(currVersion.getMajor() == 1 &&
(currVersion.getMinor() == 0 || currVersion.getMinor() == 1))) {
return currVersion;
}
// Add a 'is_analyzed' column to groups table in CaseDB
String alterSQL = " ADD COLUMN is_analyzed integer DEFAULT 1 "; //NON-NLS
if (false == tskCase.getCaseDbAccessManager().columnExists(GROUPS_TABLENAME, "is_analyzed", caseDbTransaction )) {
tskCase.getCaseDbAccessManager().alterTable(GROUPS_TABLENAME, alterSQL, caseDbTransaction);
}
return new VersionNumber(1,1,0);
}
/**
* Upgrades IG tables in DrawableDB from 1.0 to 1.1
* Does nothing if the incoming version is not 1.0
*
* @param currVersion version to upgrade from
* @param transaction transaction to use for all updates
*
* @return new version number
* @throws TskCoreException
*/
private VersionNumber upgradeDrawableDbIgSchema1dot0TO1dot1(VersionNumber currVersion, DrawableTransaction transaction ) throws TskCoreException {
if (currVersion.getMajor() != 1 ||
currVersion.getMinor() != 0) {
return currVersion;
}
// There are no changes in DrawableDB schema in 1.0 -> 1.1
return new VersionNumber(1,1,0);
}
@Override
protected void finalize() throws Throwable {
/*
* This finalizer is a safety net for freeing this resource. See
* "Effective Java" by <NAME>, Item #7.
*/
dbWriteLock();
try {
if (!isClosed()) {
logger.log(Level.SEVERE, "Closing drawable.db in finalizer, this should never be necessary"); //NON-NLS
try {
close();
} finally {
super.finalize();
}
}
} finally {
dbWriteUnlock();
}
}
public void close() {
dbWriteLock();
try {
if (!isClosed()) {
logger.log(Level.INFO, "Closing the drawable.db"); //NON-NLS
for (PreparedStatement pStmt : preparedStatements) {
try {
pStmt.close();
} catch (SQLException ex) {
logger.log(Level.SEVERE, String.format("Failed to close prepared statement %s for drawable.db", pStmt.toString()), ex); //NON-NLS
}
}
try {
con.close();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to close connection to drawable.db", ex); //NON-NLS
}
}
} finally {
con = null;
dbWriteUnlock();
}
}
private boolean isClosed() {
dbWriteLock();
try {
return ((con == null) || (con.isClosed()));
} catch (SQLException unused) {
return false;
} finally {
dbWriteUnlock();
}
}
/**
* get the names of the hashsets that the given fileID belongs to
*
* @param fileID the fileID to get all the Hashset names for
*
* @return a set of hash set names, each of which the given file belongs to
*
* @throws TskCoreException
*
*
* //TODO: this is mostly a cut and paste from *
* AbstractContent.getHashSetNames, is there away to dedupe?
*/
Set<String> getHashSetsForFile(long fileID) throws TskCoreException {
Set<String> hashNames = new HashSet<>();
ArrayList<BlackboardArtifact> artifacts = tskCase.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, fileID);
for (BlackboardArtifact a : artifacts) {
BlackboardAttribute attribute = a.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (attribute != null) {
hashNames.add(attribute.getValueString());
}
}
return Collections.unmodifiableSet(hashNames);
}
/**
* get all the hash set names used in the db
*
* @return a set of the names of all the hash sets that have hash set hits
*/
public Set<String> getHashSetNames() {
Set<String> names = new HashSet<>();
// "SELECT DISTINCT hash_set_name FROM hash_sets"
dbWriteLock();
try (ResultSet rs = selectHashSetNamesStmt.executeQuery();) {
while (rs.next()) {
names.add(rs.getString(HASH_SET_NAME));
}
} catch (SQLException sQLException) {
logger.log(Level.WARNING, "failed to get hash set names", sQLException); //NON-NLS
} finally {
dbWriteUnlock();
}
return names;
}
static private String getGroupIdQuery(GroupKey<?> groupKey) {
// query to find the group id from attribute/value
return String.format(" SELECT group_id FROM " + GROUPS_TABLENAME
+ " WHERE attribute = \'%s\' AND value = \'%s\' AND data_source_obj_id = %d",
SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
(groupKey.getAttribute() == DrawableAttribute.PATH) ? groupKey.getDataSourceObjId() : 0);
}
/**
* Returns true if the specified group has been any examiner
*
* @param groupKey
*
* @return
*/
public boolean isGroupSeen(GroupKey<?> groupKey) {
return isGroupSeenByExaminer(groupKey, -1);
}
/**
* Returns true if the specified group has been seen by the specified
* examiner
*
* @param groupKey - key to identify the group
* @param examinerId
*
* @return true if the examine has this group, false otherwise
*/
public boolean isGroupSeenByExaminer(GroupKey<?> groupKey, long examinerId) {
// Callback to process result of seen query
class GroupSeenQueryResultProcessor extends CompletableFuture<Boolean> implements CaseDbAccessQueryCallback {
@Override
public void process(ResultSet resultSet) {
try {
if (resultSet != null) {
while (resultSet.next()) {
complete(resultSet.getInt("count") > 0); //NON-NLS;
return;
}
}
} catch (SQLException ex) {
completeExceptionally(ex);
}
}
}
// Callback to process result of seen query
GroupSeenQueryResultProcessor queryResultProcessor = new GroupSeenQueryResultProcessor();
try {
String groupSeenQueryStmt = "COUNT(*) as count FROM " + GROUPS_SEEN_TABLENAME
+ " WHERE seen = 1 "
+ " AND group_id in ( " + getGroupIdQuery(groupKey) + ")"
+ (examinerId > 0 ? " AND examiner_id = " + examinerId : "");// query to find the group id from attribute/value
tskCase.getCaseDbAccessManager().select(groupSeenQueryStmt, queryResultProcessor);
return queryResultProcessor.get();
} catch (ExecutionException | InterruptedException | TskCoreException ex) {
String msg = String.format("Failed to get is group seen for group key %s", groupKey.getValueDisplayName()); //NON-NLS
logger.log(Level.SEVERE, msg, ex);
}
return false;
}
/**
* Record in the DB that the group with the given key is seen
* by given examiner id.
*
* @param groupKey key identifying the group.
* @param examinerID examiner id.
*
* @throws TskCoreException
*/
public void markGroupSeen(GroupKey<?> groupKey, long examinerID) throws TskCoreException {
/*
* Check the groupSeenCache to see if the seen status for this group was set recently.
* If recently set to seen, there's no need to update it
*/
Boolean cachedValue = groupSeenCache.getIfPresent(groupKey);
if (cachedValue != null && cachedValue == true) {
return;
}
// query to find the group id from attribute/value
String innerQuery = String.format("( SELECT group_id FROM " + GROUPS_TABLENAME //NON-NLS
+ " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d )", //NON-NLS
SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0);
String insertSQL = String.format(" (group_id, examiner_id, seen) VALUES (%s, %d, %d)", innerQuery, examinerID, 1); //NON-NLS
if (DbType.POSTGRESQL == tskCase.getDatabaseType()) {
insertSQL += String.format(" ON CONFLICT (group_id, examiner_id) DO UPDATE SET seen = %d", 1); //NON-NLS
}
tskCase.getCaseDbAccessManager().insertOrUpdate(GROUPS_SEEN_TABLENAME, insertSQL);
groupSeenCache.put(groupKey, true);
}
/**
* Record in the DB that given group is unseen.
* The group is marked unseen for ALL examiners that have seen the group.
*
* @param groupKey key identifying the group.
*
* @throws TskCoreException
*/
public void markGroupUnseen(GroupKey<?> groupKey) throws TskCoreException {
/*
* Check the groupSeenCache to see if the seen status for this group was set recently.
* If recently set to unseen, there's no need to update it
*/
Boolean cachedValue = groupSeenCache.getIfPresent(groupKey);
if (cachedValue != null && cachedValue == false) {
return;
}
String updateSQL = String.format(" SET seen = 0 WHERE group_id in ( " + getGroupIdQuery(groupKey) + ")" ); //NON-NLS
tskCase.getCaseDbAccessManager().update(GROUPS_SEEN_TABLENAME, updateSQL);
groupSeenCache.put(groupKey, false);
}
/**
* Sets the isAnalysed flag in the groups table for the given group to true.
*
* @param groupKey group key.
*
* @throws TskCoreException
*/
public void markGroupAnalyzed(GroupKey<?> groupKey) throws TskCoreException {
String updateSQL = String.format(" SET is_analyzed = %d "
+ " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d ",
1,
SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0);
tskCase.getCaseDbAccessManager().update(GROUPS_TABLENAME, updateSQL);
}
/**
* Removes a file from the drawables databse.
*
* @param id The object id of the file.
*
* @return True or false.
*
* @throws TskCoreException
* @throws SQLException
*/
public void removeFile(long id) throws TskCoreException, SQLException {
DrawableTransaction trans = null;
try {
trans = beginTransaction();
removeFile(id, trans);
commitTransaction(trans, true);
} catch (TskCoreException | SQLException ex) {
if (null != trans) {
try {
rollbackTransaction(trans);
} catch (SQLException ex2) {
logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
}
}
throw ex;
}
}
/**
* Updates the image file.
*
* @param f file to update.
*
* @throws TskCoreException
* @throws SQLException
*/
public void updateFile(DrawableFile f) throws TskCoreException, SQLException {
DrawableTransaction trans = null;
CaseDbTransaction caseDbTransaction = null;
try {
trans = beginTransaction();
caseDbTransaction = tskCase.beginTransaction();
updateFile(f, trans, caseDbTransaction);
caseDbTransaction.commit();
commitTransaction(trans, true);
} catch (TskCoreException | SQLException ex) {
if (null != caseDbTransaction) {
try {
caseDbTransaction.rollback();
} catch (TskCoreException ex2) {
logger.log(Level.SEVERE, String.format("Failed to roll back case db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
}
}
if (null != trans) {
try {
rollbackTransaction(trans);
} catch (SQLException ex2) {
logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
}
}
throw ex;
}
}
/**
* Update an existing entry (or make a new one) into the DB that includes
* group information. Called when a file has been analyzed or during a bulk
* rebuild
*
* @param f file to update
* @param tr
* @param caseDbTransaction
*/
public void updateFile(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) {
insertOrUpdateFile(f, tr, caseDbTransaction, true);
}
/**
* Populate caches based on current state of Case DB
*/
public void buildFileMetaDataCache() {
synchronized (cacheLock) {
cacheBuildCount++;
if (areCachesLoaded == true) {
return;
}
try {
// get tags
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM content_tags")) {
ResultSet rs = dbQuery.getResultSet();
while (rs.next()) {
long id = rs.getLong("obj_id");
hasTagCache.add(id);
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error getting tags from DB", ex); //NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error executing query to get tags", ex); //NON-NLS
}
try {
// hash sets
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID())) {
ResultSet rs = dbQuery.getResultSet();
while (rs.next()) {
long id = rs.getLong("obj_id");
hasHashCache.add(id);
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error getting hashsets from DB", ex); //NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error executing query to get hashsets", ex); //NON-NLS
}
try {
// EXIF
try (SleuthkitCase.CaseDbQuery dbQuery = tskCase.executeQuery("SELECT obj_id FROM blackboard_artifacts WHERE artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())) {
ResultSet rs = dbQuery.getResultSet();
while (rs.next()) {
long id = rs.getLong("obj_id");
hasExifCache.add(id);
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error getting EXIF from DB", ex); //NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error executing query to get EXIF", ex); //NON-NLS
}
areCachesLoaded = true;
}
}
/**
* Add a file to cache of files that have EXIF data
*
* @param objectID ObjId of file with EXIF
*/
public void addExifCache(long objectID) {
synchronized (cacheLock) {
// bail out if we are not maintaining caches
if (cacheBuildCount == 0) {
return;
}
hasExifCache.add(objectID);
}
}
/**
* Add a file to cache of files that have hash set hits
*
* @param objectID ObjId of file with hash set
*/
public void addHashSetCache(long objectID) {
synchronized (cacheLock) {
// bail out if we are not maintaining caches
if (cacheBuildCount == 0) {
return;
}
hasHashCache.add(objectID);
}
}
/**
* Add a file to cache of files that have tags
*
* @param objectID ObjId of file with tags
*/
public void addTagCache(long objectID) {
synchronized (cacheLock) {
// bail out if we are not maintaining caches
if (cacheBuildCount == 0) {
return;
}
hasTagCache.add(objectID);
}
}
/**
* Free the cached case DB data
*/
public void freeFileMetaDataCache() {
synchronized (cacheLock) {
// dont' free these if there is another task still using them
if (--cacheBuildCount > 0) {
return;
}
areCachesLoaded = false;
hasTagCache.clear();
hasHashCache.clear();
hasExifCache.clear();
}
}
/**
* Update (or insert) a file in(to) the drawable db. Weather this is an
* insert or an update depends on the given prepared statement. This method
* also inserts hash set hits and groups into their respective tables for
* the given file.
*
* //TODO: this is a kinda weird design, is their a better way? //TODO:
* implement batch version -jm
*
* @param f The file to insert.
* @param tr a transaction to use, must not be null
* @param caseDbTransaction
* @param addGroups True if groups for file should be inserted into
* db too
*/
private void insertOrUpdateFile(DrawableFile f, @Nonnull DrawableTransaction tr, @Nonnull CaseDbTransaction caseDbTransaction, boolean addGroups) {
PreparedStatement stmt;
if (tr.isCompleted()) {
throw new IllegalArgumentException("can't update database with closed transaction");
}
// assume that we are doing an update if we are adding groups - i.e. not pre-populating
if (addGroups) {
stmt = updateFileStmt;
} else {
stmt = insertFileStmt;
}
// get data from caches. Default to true and force the DB lookup if we don't have caches
boolean hasExif = true;
boolean hasHashSet = true;
boolean hasTag = true;
synchronized (cacheLock) {
if (areCachesLoaded) {
hasExif = hasExifCache.contains(f.getId());
hasHashSet = hasHashCache.contains(f.getId());
hasTag = hasTagCache.contains(f.getId());
}
}
// if we are going to just add basic data, then mark flags that we do not have metadata to prevent lookups
if (addGroups == false) {
hasExif = false;
hasHashSet = false;
hasTag = false;
}
dbWriteLock();
try {
// "INSERT OR IGNORE/ INTO drawable_files (obj_id, data_source_obj_id, path, name, created_time, modified_time, make, model, analyzed)"
stmt.setLong(1, f.getId());
stmt.setLong(2, f.getAbstractFile().getDataSourceObjectId());
stmt.setString(3, f.getDrawablePath());
stmt.setString(4, f.getName());
stmt.setLong(5, f.getCrtime());
stmt.setLong(6, f.getMtime());
if (hasExif) {
stmt.setString(7, f.getMake());
stmt.setString(8, f.getModel());
} else {
stmt.setString(7, "");
stmt.setString(8, "");
}
stmt.setBoolean(9, f.isAnalyzed());
stmt.executeUpdate();
// Update the list of file IDs in memory
addImageFileToList(f.getId());
// update the groups if we are not doing pre-populating
if (addGroups) {
// Update the hash set tables
if (hasHashSet) {
try {
for (String name : f.getHashSetNames()) {
// "insert or ignore into hash_sets (hash_set_name) values (?)"
insertHashSetStmt.setString(1, name);
insertHashSetStmt.executeUpdate();
//TODO: use nested select to get hash_set_id rather than seperate statement/query
//"select hash_set_id from hash_sets where hash_set_name = ?"
selectHashSetStmt.setString(1, name);
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
while (rs.next()) {
int hashsetID = rs.getInt("hash_set_id"); //NON-NLS
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
insertHashHitStmt.setInt(1, hashsetID);
insertHashHitStmt.setLong(2, f.getId());
insertHashHitStmt.executeUpdate();
break;
}
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "failed to insert/update hash hits for file" + f.getContentPathSafe(), ex); //NON-NLS
}
}
//and update all groups this file is in
for (DrawableAttribute<?> attr : DrawableAttribute.getGroupableAttrs()) {
// skip attributes that we do not have data for
if ((attr == DrawableAttribute.TAGS) && (hasTag == false)) {
continue;
} else if ((attr == DrawableAttribute.MAKE || attr == DrawableAttribute.MODEL) && (hasExif == false)) {
continue;
}
Collection<? extends Comparable<?>> vals = attr.getValue(f);
for (Comparable<?> val : vals) {
if ((null != val) && (val.toString().isEmpty() == false)) {
if (attr == DrawableAttribute.PATH) {
insertGroup(f.getAbstractFile().getDataSource().getId(), val.toString(), attr, caseDbTransaction);
} else {
insertGroup(val.toString(), attr, caseDbTransaction);
}
}
}
}
}
// @@@ Consider storing more than ID so that we do not need to requery each file during commit
tr.addUpdatedFile(f.getId());
} catch (SQLException | NullPointerException | TskCoreException ex) {
/*
* This is one of the places where we get an error if the case is
* closed during processing, which doesn't need to be reported here.
*/
if (Case.isCaseOpen()) {
logger.log(Level.SEVERE, "failed to insert/update file" + f.getContentPathSafe(), ex); //NON-NLS
}
} finally {
dbWriteUnlock();
}
}
/**
* Gets all data source object ids from datasources table, and their
* DrawableDbBuildStatusEnum
*
* @return map of known data source object ids, and their db status
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<Long, DrawableDbBuildStatusEnum> getDataSourceDbBuildStatus() throws TskCoreException {
Statement statement = null;
Map<Long, DrawableDbBuildStatusEnum> map = new HashMap<>();
dbWriteLock();
try {
if (isClosed()) {
throw new TskCoreException("The drawables database is closed");
}
statement = con.createStatement();
ResultSet rs = statement.executeQuery("SELECT ds_obj_id, drawable_db_build_status FROM datasources "); //NON-NLS
while (rs.next()) {
map.put(rs.getLong("ds_obj_id"), DrawableDbBuildStatusEnum.valueOf(rs.getString("drawable_db_build_status")));
}
} catch (SQLException e) {
throw new TskCoreException("SQLException while getting data source object ids", e);
} finally {
if (statement != null) {
try {
statement.close();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error closing statement ", ex); //NON-NLS
}
}
dbWriteUnlock();
}
return map;
}
/**
* Get the build status for the given data source.
* Will return UNKNOWN if the data source is not yet in the database.
*
* @param dataSourceId
*
* @return The status of the data source or UKNOWN if it is not found.
*
* @throws TskCoreException
*/
public DrawableDbBuildStatusEnum getDataSourceDbBuildStatus(Long dataSourceId) throws TskCoreException {
Map<Long, DrawableDbBuildStatusEnum> statusMap = getDataSourceDbBuildStatus();
if (statusMap.containsKey(dataSourceId) == false) {
return DrawableDbBuildStatusEnum.UNKNOWN;
}
return statusMap.get(dataSourceId);
}
/**
* Insert/update given data source object id and it's DB rebuild status in
* the datasources table.
*
* If the object id exists in the table already, it updates the status
*
* @param dsObjectId data source object id to insert
* @param status The db build statsus for datasource.
*/
public void insertOrUpdateDataSource(long dsObjectId, DrawableDbBuildStatusEnum status) {
dbWriteLock();
try {
// "INSERT OR REPLACE INTO datasources (ds_obj_id, drawable_db_build_status) " //NON-NLS
updateDataSourceStmt.setLong(1, dsObjectId);
updateDataSourceStmt.setString(2, status.name());
updateDataSourceStmt.executeUpdate();
} catch (SQLException | NullPointerException ex) {
logger.log(Level.SEVERE, "failed to insert/update datasources table", ex); //NON-NLS
} finally {
dbWriteUnlock();
}
}
public DrawableTransaction beginTransaction() throws TskCoreException, SQLException {
return new DrawableTransaction();
}
/**
*
* @param tr
* @param notifyGM If true, notify GroupManager about the changes.
*/
public void commitTransaction(DrawableTransaction tr, Boolean notifyGM) throws SQLException {
if (tr.isCompleted()) {
throw new IllegalArgumentException("Attempt to commit completed transaction");
}
tr.commit(notifyGM);
}
public void rollbackTransaction(DrawableTransaction tr) throws SQLException {
if (tr.isCompleted()) {
throw new IllegalArgumentException("Attempt to roll back completed transaction");
}
tr.rollback();
}
public Boolean areFilesAnalyzed(Collection<Long> fileIds) throws SQLException {
dbWriteLock();
try {
if (isClosed()) {
throw new SQLException("The drawables database is closed");
}
try (Statement stmt = con.createStatement()) {
//Can't make this a preprared statement because of the IN ( ... )
ResultSet analyzedQuery = stmt.executeQuery("SELECT COUNT(analyzed) AS analyzed FROM drawable_files WHERE analyzed = 1 AND obj_id IN (" + StringUtils.join(fileIds, ", ") + ")"); //NON-NLS
while (analyzedQuery.next()) {
return analyzedQuery.getInt(ANALYZED) == fileIds.size();
}
return false;
}
} finally {
dbWriteUnlock();
}
}
/**
* Returns whether or not the given group is analyzed and ready to be viewed.
*
* @param groupKey group key.
* @return true if the group is analyzed.
* @throws SQLException
* @throws TskCoreException
*/
public Boolean isGroupAnalyzed(GroupKey<?> groupKey) throws SQLException, TskCoreException {
// Callback to process result of isAnalyzed query
class IsGroupAnalyzedQueryResultProcessor implements CaseDbAccessQueryCallback {
private boolean isAnalyzed = false;
boolean getIsAnalyzed() {
return isAnalyzed;
}
@Override
public void process(ResultSet resultSet) {
try {
if (resultSet.next()) {
isAnalyzed = resultSet.getInt("is_analyzed") == 1 ? true: false;
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to get group is_analyzed", ex); //NON-NLS
}
}
}
IsGroupAnalyzedQueryResultProcessor queryResultProcessor = new IsGroupAnalyzedQueryResultProcessor();
try {
String groupAnalyzedQueryStmt = String.format("is_analyzed FROM " + GROUPS_TABLENAME
+ " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d ",
SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0);
tskCase.getCaseDbAccessManager().select(groupAnalyzedQueryStmt, queryResultProcessor);
return queryResultProcessor.getIsAnalyzed();
} catch ( TskCoreException ex) {
String msg = String.format("Failed to get group is_analyzed for group key %s", groupKey.getValueDisplayName()); //NON-NLS
logger.log(Level.SEVERE, msg, ex);
}
return false;
}
/**
* Find and return list of all ids of files matching the specific Where
* clause
*
* @param sqlWhereClause a SQL where clause appropriate for the desired
* files (do not begin the WHERE clause with the word
* WHERE!)
*
* @return a list of file ids each of which satisfy the given WHERE clause
*
* @throws TskCoreException
*/
public Set<Long> findAllFileIdsWhere(String sqlWhereClause) throws TskCoreException {
dbWriteLock();
try {
if (isClosed()) {
throw new TskCoreException("The drawables database is closed");
}
try (Statement statement = con.createStatement()) {
ResultSet rs = statement.executeQuery("SELECT obj_id FROM drawable_files WHERE " + sqlWhereClause);
Set<Long> ret = new HashSet<>();
while (rs.next()) {
ret.add(rs.getLong(1));
}
return ret;
} catch (SQLException ex) {
throw new TskCoreException(String.format("Failed to query file id for WHERE clause %s", sqlWhereClause), ex);
}
} finally {
dbWriteUnlock();
}
}
/**
* Return the number of files matching the given clause.
*
* @param sqlWhereClause a SQL where clause appropriate for the desired
* files (do not begin the WHERE clause with the word
* WHERE!)
*
* @return Number of files matching the given where clause
*
* @throws TskCoreException
*/
public long countFilesWhere(String sqlWhereClause) throws TskCoreException {
dbWriteLock();
try {
if (isClosed()) {
throw new TskCoreException("The drawables database is closed");
}
try (Statement statement = con.createStatement()) {
ResultSet rs = statement.executeQuery("SELECT COUNT(*) AS COUNT FROM drawable_files WHERE " + sqlWhereClause);
return rs.getLong("COUNT");
} catch (SQLException e) {
throw new TskCoreException("SQLException thrown when calling 'DrawableDB.countFilesWhere(): " + sqlWhereClause, e);
}
} finally {
dbWriteUnlock();
}
}
/**
* Get all the values that are in db for the given attribute.
*
*
* @param <A> The type of values for the given attribute.
* @param groupBy The attribute to get the values for.
* @param sortBy The way to sort the results. Only GROUP_BY_VAL and
* FILE_COUNT are supported.
* @param sortOrder Sort ascending or descending.
* @param dataSource
*
* @return Map of data source (or null of group by attribute ignores data
* sources) to list of unique group values
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
@SuppressWarnings("unchecked")
public <A extends Comparable<A>> Multimap<DataSource, A> findValuesForAttribute(DrawableAttribute<A> groupBy, GroupSortBy sortBy, SortOrder sortOrder, DataSource dataSource) throws TskCoreException {
switch (groupBy.attrName) {
case ANALYZED:
case CATEGORY:
case HASHSET:
//these are somewhat special cases for now as they have fixed values, or live in the main autopsy database
//they should have special handling at a higher level of the stack.
throw new UnsupportedOperationException();
default:
dbWriteLock();
try {
if (isClosed()) {
throw new TskCoreException("The drawables database is closed");
}
//TODO: convert this to prepared statement
StringBuilder query = new StringBuilder("SELECT data_source_obj_id, " + groupBy.attrName.toString() + ", COUNT(*) FROM drawable_files "); //NON-NLS
// skip any null/blank values
query.append("WHERE LENGTH(" + groupBy.attrName.toString() + ") > 0 ");
if (dataSource != null) {
query.append(" AND data_source_obj_id = ").append(dataSource.getId());
}
query.append(" GROUP BY data_source_obj_id, ").append(groupBy.attrName.toString());
String orderByClause = "";
if (sortBy == GROUP_BY_VALUE) {
orderByClause = " ORDER BY " + groupBy.attrName.toString();
} else if (sortBy == GroupSortBy.FILE_COUNT) {
orderByClause = " ORDER BY COUNT(*)";
}
query.append(orderByClause);
if (orderByClause.isEmpty() == false) {
String sortOrderClause = "";
switch (sortOrder) {
case DESCENDING:
sortOrderClause = " DESC"; //NON-NLS
break;
case ASCENDING:
sortOrderClause = " ASC"; //NON-NLS
break;
default:
orderByClause = "";
}
query.append(sortOrderClause);
}
try (Statement stmt = con.createStatement()) {
ResultSet results = stmt.executeQuery(query.toString());
Multimap<DataSource, A> values = HashMultimap.create();
while (results.next()) {
/*
* I don't like that we have to do this cast to A
* here, but can't think of a better alternative at
* the momment unless something has gone seriously
* wrong, we know this should be of type A even if
* JAVA doesn't
*/
values.put(tskCase.getDataSource(results.getLong("data_source_obj_id")),
(A) results.getObject(groupBy.attrName.toString()));
}
return values;
} catch (SQLException | TskDataException ex) {
throw new TskCoreException("Unable to get values for attribute", ex); //NON-NLS
}
} finally {
dbWriteUnlock();
}
}
}
/**
* Insert new group into DB
*
* @param value Value of the group (unique to the type)
* @param groupBy Type of the grouping (CATEGORY, MAKE, etc.)
* @param caseDbTransaction transaction to use for CaseDB insert/updates
*
* @throws TskCoreException
*/
private void insertGroup(final String value, DrawableAttribute<?> groupBy, CaseDbTransaction caseDbTransaction) throws TskCoreException {
insertGroup(0, value, groupBy, caseDbTransaction);
}
/**
* Insert new group into DB
*
* @param ds_obj_id data source object id
* @param value Value of the group (unique to the type)
* @param groupBy Type of the grouping (CATEGORY, MAKE, etc.)
* @param caseDbTransaction transaction to use for CaseDB insert/updates
*/
private void insertGroup(long ds_obj_id, final String value, DrawableAttribute<?> groupBy, CaseDbTransaction caseDbTransaction) throws TskCoreException {
/*
* Check the groups cache to see if the group has already been added to
* the case database.
*/
String cacheKey = Long.toString(ds_obj_id) + "_" + value + "_" + groupBy.getDisplayName();
if (groupCache.getIfPresent(cacheKey) != null) {
return;
}
int isAnalyzed = (groupBy == DrawableAttribute.PATH) ? 0 : 1;
String insertSQL = String.format(" (data_source_obj_id, value, attribute, is_analyzed) VALUES (%d, \'%s\', \'%s\', %d)",
ds_obj_id, SleuthkitCase.escapeSingleQuotes(value), SleuthkitCase.escapeSingleQuotes(groupBy.attrName.toString()), isAnalyzed);
if (DbType.POSTGRESQL == tskCase.getDatabaseType()) {
insertSQL += " ON CONFLICT DO NOTHING";
}
tskCase.getCaseDbAccessManager().insert(GROUPS_TABLENAME, insertSQL, caseDbTransaction);
groupCache.put(cacheKey, Boolean.TRUE);
}
/**
* @param id the obj_id of the file to return
*
* @return a DrawableFile for the given obj_id
*
* @throws TskCoreException if unable to get a file from the currently open
* {@link SleuthkitCase}
*/
public DrawableFile getFileFromID(Long id) throws TskCoreException {
AbstractFile f = tskCase.getAbstractFileById(id);
try {
return DrawableFile.create(f, areFilesAnalyzed(Collections.singleton(id)), isVideoFile(f));
} catch (SQLException ex) {
throw new TskCoreException(String.format("Failed to get file (id=%d)", id), ex);
}
}
public Set<Long> getFileIDsInGroup(GroupKey<?> groupKey) throws TskCoreException {
if (groupKey.getAttribute().isDBColumn == false) {
switch (groupKey.getAttribute().attrName) {
case MIME_TYPE:
return groupManager.getFileIDsWithMimeType((String) groupKey.getValue());
case CATEGORY:
return groupManager.getFileIDsWithCategory((DhsImageCategory) groupKey.getValue());
case TAGS:
return groupManager.getFileIDsWithTag((TagName) groupKey.getValue());
}
}
Set<Long> files = new HashSet<>();
dbWriteLock();
try {
PreparedStatement statement = getGroupStatment(groupKey);
setQueryParams(statement, groupKey);
try (ResultSet valsResults = statement.executeQuery()) {
while (valsResults.next()) {
files.add(valsResults.getLong(OBJ_ID));
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "failed to get file for group:" + groupKey.getAttribute() + " == " + groupKey.getValue(), ex); //NON-NLS
} finally {
dbWriteUnlock();
}
return files;
}
private PreparedStatement getGroupStatment(GroupKey<?> groupKey) {
DrawableAttribute<?> groupBy = groupKey.getAttribute();
if ((groupBy == DrawableAttribute.PATH) && groupKey.getDataSource().isPresent()) {
return this.groupStatementFilterByDataSrcMap.get(groupBy);
}
return groupStatementMap.get(groupBy);
}
public long countAllFiles() throws TskCoreException {
return countFilesWhere(" 1 ");
}
/**
* delete the row with obj_id = id.
*
* @param id the obj_id of the row to be deleted
*/
public void removeFile(long id, DrawableTransaction tr) {
if (tr.isCompleted()) {
throw new IllegalArgumentException("Attempt to use a completed transaction");
}
dbWriteLock();
try {
// Update the list of file IDs in memory
removeImageFileFromList(id);
//"delete from hash_set_hits where (obj_id = " + id + ")"
removeHashHitStmt.setLong(1, id);
removeHashHitStmt.executeUpdate();
//"delete from drawable_files where (obj_id = " + id + ")"
removeFileStmt.setLong(1, id);
removeFileStmt.executeUpdate();
tr.addRemovedFile(id);
} catch (SQLException ex) {
logger.log(Level.WARNING, "failed to delete row for obj_id = " + id, ex); //NON-NLS
} finally {
dbWriteUnlock();
}
}
public class MultipleTransactionException extends IllegalStateException {
public MultipleTransactionException() {
super("cannot have more than one open transaction");//NON-NLS
}
}
/**
* For performance reasons, keep a list of all file IDs currently in the
* drawable database. Otherwise the database is queried many times to
* retrieve the same data.
*/
@GuardedBy("fileIDlist")
private final Set<Long> fileIDsInDB = new HashSet<>();
public boolean isInDB(Long id) {
synchronized (fileIDsInDB) {
return fileIDsInDB.contains(id);
}
}
private void addImageFileToList(Long id) {
synchronized (fileIDsInDB) {
fileIDsInDB.add(id);
}
}
private void removeImageFileFromList(Long id) {
synchronized (fileIDsInDB) {
fileIDsInDB.remove(id);
}
}
public int getNumberOfImageFilesInList() {
synchronized (fileIDsInDB) {
return fileIDsInDB.size();
}
}
private boolean initializeImageList() {
dbWriteLock();
try {
if (isClosed()) {
logger.log(Level.SEVERE, "The drawables database is closed"); //NON-NLS
return false;
}
try (Statement stmt = con.createStatement()) {
ResultSet analyzedQuery = stmt.executeQuery("select obj_id from drawable_files");
while (analyzedQuery.next()) {
addImageFileToList(analyzedQuery.getLong(OBJ_ID));
}
return true;
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to add image file object ids in drawables database to cache", ex); //NON-NLS
return false;
}
} finally {
dbWriteUnlock();
}
}
/**
* For performance reasons, keep the file type in memory
*/
private final Map<Long, Boolean> videoFileMap = new ConcurrentHashMap<>();
/**
* is this File a video file?
*
* @param f check if this file is a video. will return false for null file.
*
* @return returns true if this file is a video as determined by {@link ImageGalleryModule#isVideoFile(org.sleuthkit.datamodel.AbstractFile)
* } but caches the result. returns false if passed a null AbstractFile
*/
public boolean isVideoFile(AbstractFile f) {
return isNull(f) ? false
: videoFileMap.computeIfAbsent(f.getId(), id -> FileTypeUtils.hasVideoMIMEType(f));
}
/**
* get the number of files with the given category.
*
* NOTE: although the category data is stored in autopsy as Tags, this
* method is provided on DrawableDb to provide a single point of access for
* ImageGallery data.
*
* //TODO: think about moving this and similar methods that don't actually
* get their data form the drawabledb to a layer wrapping the drawable db:
* something like ImageGalleryCaseData?
*
* @param cat the category to count the number of files for
*
* @return the number of the with the given category
*/
public long getCategoryCount(DhsImageCategory cat) {
try {
TagName tagName = controller.getTagsManager().getTagName(cat);
if (nonNull(tagName)) {
return tskCase.getContentTagsByTagName(tagName).stream()
.map(ContentTag::getContent)
.map(Content::getId)
.filter(this::isInDB)
.count();
}
} catch (IllegalStateException ex) {
logger.log(Level.WARNING, "Case closed while getting files"); //NON-NLS
} catch (TskCoreException ex1) {
logger.log(Level.SEVERE, "Failed to get content tags by tag name.", ex1); //NON-NLS
}
return -1;
}
/**
* get the number of files in the given set that are uncategorized(Cat-0).
*
* NOTE: although the category data is stored in autopsy as Tags, this
* method is provided on DrawableDb to provide a single point of access for
* ImageGallery data.
*
* //TODO: think about moving this and similar methods that don't actually
* get their data form the drawabledb to a layer wrapping the drawable db:
* something like ImageGalleryCaseData?
*
* @param fileIDs the the files ids to count within
*
* @return the number of files in the given set with Cat-0
*/
public long getUncategorizedCount(Collection<Long> fileIDs) throws TskCoreException {
// if the fileset is empty, return count as 0
if (fileIDs.isEmpty()) {
return 0;
}
// get a comma seperated list of TagName ids for non zero categories
DrawableTagsManager tagsManager = controller.getTagsManager();
String catTagNameIDs = tagsManager.getCategoryTagNames().stream()
.filter(tagName -> notEqual(tagName.getDisplayName(), DhsImageCategory.ZERO.getDisplayName()))
.map(TagName::getId)
.map(Object::toString)
.collect(Collectors.joining(",", "(", ")"));
String fileIdsList = "(" + StringUtils.join(fileIDs, ",") + " )";
//count the file ids that are in the given list and don't have a non-zero category assigned to them.
String name
= "SELECT COUNT(obj_id) as obj_count FROM tsk_files where obj_id IN " + fileIdsList //NON-NLS
+ " AND obj_id NOT IN (SELECT obj_id FROM content_tags WHERE content_tags.tag_name_id IN " + catTagNameIDs + ")"; //NON-NLS
try (SleuthkitCase.CaseDbQuery executeQuery = tskCase.executeQuery(name);
ResultSet resultSet = executeQuery.getResultSet();) {
while (resultSet.next()) {
return resultSet.getLong("obj_count"); //NON-NLS
}
} catch (SQLException ex) {
throw new TskCoreException("Error getting category count.", ex); //NON-NLS
}
return -1;
}
/**
* Encapsulates a drawables database transaction that uses the enclosing
* DrawableDB object's single JDBC connection. The transaction is begun when
* the DrawableTransaction object is created; clients MUST call either
* commit or rollback.
*
* IMPORTANT: This transaction must be thread-confined. It acquires and
* release a lock specific to a single thread.
*/
public class DrawableTransaction {
// The files are processed ORDERED BY parent path
// We want to preserve that order here, so that we can detect a
// change in path, and thus mark the path group as analyzed
// Hence we use a LinkedHashSet here.
private final Set<Long> updatedFiles = new LinkedHashSet<>();
private final Set<Long> removedFiles = new LinkedHashSet<>();
private boolean completed;
private DrawableTransaction() throws TskCoreException, SQLException {
dbWriteLock(); // Normally released when commit or rollback is called.
if (DrawableDB.this.isClosed()) {
dbWriteUnlock();
throw new TskCoreException("The drawables database is closed");
}
try {
con.setAutoCommit(false);
completed = false;
} catch (SQLException ex) {
completed = true;
dbWriteUnlock();
throw new SQLException("Failed to begin transaction", ex);
}
}
synchronized public void rollback() throws SQLException {
if (!completed) {
try {
updatedFiles.clear();
con.rollback();
} finally {
complete();
}
}
}
/**
* Commit changes that happened during this transaction
*
* @param notifyGM If true, notify GroupManager about the changes.
*/
synchronized public void commit(Boolean notifyGM) throws SQLException {
if (!completed) {
try {
con.commit();
/*
* Need to close the transaction before notifying the Group
* Manager, so that the lock is released.
*/
complete();
if (notifyGM) {
if (groupManager != null) {
groupManager.handleFileUpdate(updatedFiles);
groupManager.handleFileRemoved(removedFiles);
}
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to commit transaction, will attempt rollback", ex); //NON-NLS
rollback();
}
}
}
synchronized private void complete() {
if (!completed) {
try {
con.setAutoCommit(true);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Failed to set auto-commit to false", ex); //NON-NLS
} finally {
completed = true;
dbWriteUnlock();
}
}
}
synchronized private Boolean isCompleted() {
return completed;
}
synchronized private void addUpdatedFile(Long f) {
updatedFiles.add(f);
}
synchronized private void addRemovedFile(long id) {
removedFiles.add(id);
}
}
}
|
Sygmei/ObEngine | src/Core/Bindings/obe/Graphics/Exceptions/Exceptions.cpp | #include <Bindings/obe/Graphics/Exceptions/Exceptions.hpp>
#include <Graphics/Exceptions.hpp>
#include <Bindings/Config.hpp>
namespace obe::Graphics::Exceptions::Bindings
{
void LoadClassCanvasElementAlreadyExists(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::CanvasElementAlreadyExists>
bindCanvasElementAlreadyExists
= ExceptionsNamespace
.new_usertype<obe::Graphics::Exceptions::CanvasElementAlreadyExists>(
"CanvasElementAlreadyExists", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::CanvasElementAlreadyExists(
std::string_view, std::string_view, std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<CanvasElementAlreadyExists>, obe::BaseException>());
}
void LoadClassImageFileNotFound(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::ImageFileNotFound> bindImageFileNotFound
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::ImageFileNotFound>(
"ImageFileNotFound", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::ImageFileNotFound(
std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<ImageFileNotFound>, obe::BaseException>());
}
void LoadClassInvalidColorName(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::InvalidColorName> bindInvalidColorName
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::InvalidColorName>(
"InvalidColorName", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::InvalidColorName(
std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<InvalidColorName>, obe::BaseException>());
}
void LoadClassInvalidHexFormat(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::InvalidHexFormat> bindInvalidHexFormat
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::InvalidHexFormat>(
"InvalidHexFormat", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::InvalidHexFormat(
std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<InvalidHexFormat>, obe::BaseException>());
}
void LoadClassInvalidHsvFormat(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::InvalidHsvFormat> bindInvalidHsvFormat
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::InvalidHsvFormat>(
"InvalidHsvFormat", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::InvalidHsvFormat(
const int, const double, const double, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<InvalidHsvFormat>, obe::BaseException>());
}
void LoadClassInvalidRgbFormat(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::InvalidRgbFormat> bindInvalidRgbFormat
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::InvalidRgbFormat>(
"InvalidRgbFormat", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::InvalidRgbFormat(
const double, const double, const double, const double, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<InvalidRgbFormat>, obe::BaseException>());
}
void LoadClassInvalidSpriteColorType(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::InvalidSpriteColorType> bindInvalidSpriteColorType
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::InvalidSpriteColorType>(
"InvalidSpriteColorType", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::InvalidSpriteColorType(
std::string_view, std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<InvalidSpriteColorType>, obe::BaseException>());
}
void LoadClassReadOnlyTexture(sol::state_view state)
{
sol::table ExceptionsNamespace = state["obe"]["Graphics"]["Exceptions"].get<sol::table>();
sol::usertype<obe::Graphics::Exceptions::ReadOnlyTexture> bindReadOnlyTexture
= ExceptionsNamespace.new_usertype<obe::Graphics::Exceptions::ReadOnlyTexture>(
"ReadOnlyTexture", sol::call_constructor,
sol::constructors<obe::Graphics::Exceptions::ReadOnlyTexture(
std::string_view, obe::DebugInfo)>(),
sol::base_classes,
sol::bases<obe::Exception<ReadOnlyTexture>, obe::BaseException>());
}
}; |
mtaghiza/tinyos-main-1 | support/sdk/c/coap/tests/test_uri.c | <reponame>mtaghiza/tinyos-main-1<filename>support/sdk/c/coap/tests/test_uri.c
/* libcoap unit tests
*
* Copyright (C) 2012 <NAME> <<EMAIL>>
*
* This file is part of the CoAP library libcoap. Please see
* README for terms of use.
*/
#include <stdio.h>
#include <coap.h>
#include "test_uri.h"
void
t_parse_uri1(void) {
char teststr[] = "coap://[::1]/.well-known/core";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 3);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "::1", 3);
CU_ASSERT(uri.port == COAP_DEFAULT_PORT);
CU_ASSERT(uri.path.length == 16);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, ".well-known/core", 16);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri2(void) {
char teststr[] = "coap://[::1]:8000/.well-known/core";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 3);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "::1", 3);
CU_ASSERT(uri.port == 8000);
CU_ASSERT(uri.path.length == 16);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, ".well-known/core", 16);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri3(void) {
char teststr[] = "coap://localhost/?foo&bla=fasel";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 9);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "localhost", 9);
CU_ASSERT(uri.port == COAP_DEFAULT_PORT);
CU_ASSERT(uri.path.length == 0);
CU_ASSERT(uri.query.length == 13);
CU_ASSERT_NSTRING_EQUAL(uri.query.s, "foo&bla=fasel", 13);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri4(void) {
char teststr[] = "coap://:100000";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
CU_ASSERT(result < 0);
}
void
t_parse_uri5(void) {
char teststr[] = "coap://foo:100000";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 3);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "foo", 3);
CU_ASSERT(uri.path.length == 0);
CU_ASSERT(uri.path.s == NULL);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
CU_FAIL("invalid port not detected");
} else {
CU_PASS("detected invalid port");
}
}
void
t_parse_uri6(void) {
char teststr[] = "coap://192.168.3.11/.well-known/core";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 13);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "192.168.3.11", 13);
CU_ASSERT(uri.port == COAP_DEFAULT_PORT);
CU_ASSERT(uri.path.length == 16);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, ".well-known/core", 16);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri7(void) {
char teststr[] = "coap://foo.bar:5683/some_resource/with/multiple/segments";
int result;
coap_uri_t uri;
unsigned char buf[40];
size_t buflen = sizeof(buf);
/* The list of path segments to check against. Each segment is
preceded by a dummy option indicating that holds the (dummy)
delta value 0 and the actual segment length. */
const unsigned char checkbuf[] = {
0x0d, 0x00, 's', 'o', 'm', 'e', '_', 'r', 'e', 's', 'o', 'u', 'r', 'c', 'e',
0x04, 'w', 'i', 't', 'h',
0x08, 'm', 'u', 'l', 't', 'i', 'p', 'l', 'e',
0x08, 's', 'e', 'g', 'm', 'e', 'n', 't', 's'
};
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 7);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "foo.bar", 7);
CU_ASSERT(uri.port == 5683);
CU_ASSERT(uri.path.length == 36);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, "some_resource/with/multiple/segments", 36);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
/* check path segments */
result = coap_split_path(uri.path.s, uri.path.length, buf, &buflen);
CU_ASSERT(result == 4);
CU_ASSERT(buflen == sizeof(checkbuf));
CU_ASSERT_NSTRING_EQUAL(buf, checkbuf, buflen);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri8(void) {
char teststr[] = "http://example.com/%7E%AB%13";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result < 0) {
CU_PASS("detected non-coap URI");
} else {
CU_FAIL("non-coap URI not recognized");
}
}
void
t_parse_uri9(void) {
char teststr[] = "http://example.com/%x";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result < 0) {
CU_PASS("detected non-coap URI");
} else {
CU_FAIL("non-coap URI not recognized");
}
}
void
t_parse_uri10(void) {
char teststr[] = "/absolute/path";
int result;
coap_uri_t uri;
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 0);
CU_ASSERT(uri.host.s == NULL);
CU_ASSERT(uri.port == COAP_DEFAULT_PORT);
CU_ASSERT(uri.path.length == 13);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, "absolute/path", 13);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri11(void) {
char teststr[] =
"coap://xn--18j4d.example/%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF";
int result;
coap_uri_t uri;
unsigned char buf[40];
size_t buflen = sizeof(buf);
/* The list of path segments to check against. Each segment is
preceded by a dummy option indicating that holds the (dummy)
delta value 0 and the actual segment length. */
const unsigned char checkbuf[] = {
0x0d, 0x02, 0xE3, 0x81, 0x93, 0xE3, 0x82, 0x93,
0xE3, 0x81, 0xAB, 0xE3, 0x81, 0xA1, 0xE3, 0x81,
0xAF
};
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 17);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "xn--18j4d.example", 17);
CU_ASSERT(uri.port == COAP_DEFAULT_PORT);
CU_ASSERT(uri.path.length == 45);
CU_ASSERT_NSTRING_EQUAL(uri.path.s,
"%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF", 45);
CU_ASSERT(uri.query.length == 0);
CU_ASSERT(uri.query.s == NULL);
/* check path segments */
result = coap_split_path(uri.path.s, uri.path.length, buf, &buflen);
CU_ASSERT(result == 1);
CU_ASSERT(buflen == sizeof(checkbuf));
CU_ASSERT_NSTRING_EQUAL(buf, checkbuf, buflen);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri12(void) {
char teststr[] = "coap://198.51.100.1:61616//%2F//?%2F%2F&?%26";
int result;
coap_uri_t uri;
unsigned char buf[40];
size_t buflen = sizeof(buf);
/* The list of path segments to check against. Each segment is
preceded by a dummy option indicating that holds the (dummy)
delta value 0 and the actual segment length. */
const unsigned char uricheckbuf[] = { 0x00, 0x01, 0x2f, 0x00, 0x00 };
const unsigned char querycheckbuf[] = { 0x02, 0x2f, 0x2f, 0x02, 0x3f, 0x26 };
result = coap_split_uri((unsigned char *)teststr, strlen(teststr), &uri);
if (result == 0) {
CU_ASSERT(uri.host.length == 12);
CU_ASSERT_NSTRING_EQUAL(uri.host.s, "198.51.100.1", 12);
CU_ASSERT(uri.port == 61616);
CU_ASSERT(uri.path.length == 6);
CU_ASSERT_NSTRING_EQUAL(uri.path.s, "/%2F//", 6);
CU_ASSERT(uri.query.length == 11);
CU_ASSERT_NSTRING_EQUAL(uri.query.s, "%2F%2F&?%26", 11);
/* check path segments */
result = coap_split_path(uri.path.s, uri.path.length, buf, &buflen);
CU_ASSERT(result == 4);
CU_ASSERT(buflen == sizeof(uricheckbuf));
CU_ASSERT_NSTRING_EQUAL(buf, uricheckbuf, buflen);
/* check query segments */
buflen = sizeof(buf);
result = coap_split_query(uri.query.s, uri.query.length, buf, &buflen);
CU_ASSERT(result == 2);
CU_ASSERT(buflen == sizeof(querycheckbuf));
CU_ASSERT_NSTRING_EQUAL(buf, querycheckbuf, buflen);
} else {
CU_FAIL("uri parser error");
}
}
void
t_parse_uri13(void) {
char teststr[] __attribute__ ((aligned (8))) = {
0x00, 0x00, 0x00, 0x00, 0x80, 0x03, 'f', 'o',
'o', 0x3b, '.', 'w', 'e', 'l', 'l', '-',
'k', 'n', 'o', 'w', 'n', 0x04, 'c', 'o',
'r', 'e'
};
coap_pdu_t pdu = {
.max_size = sizeof(teststr),
.hdr = (coap_hdr_t *)teststr,
.length = sizeof(teststr)
};
coap_key_t key;
coap_hash_request_uri(&pdu, key);
CU_ASSERT(sizeof(key) == sizeof(COAP_DEFAULT_WKC_HASHKEY) - 1);
CU_ASSERT_NSTRING_EQUAL(key, COAP_DEFAULT_WKC_HASHKEY, sizeof(key));
}
CU_pSuite
t_init_uri_tests(void) {
CU_pSuite suite;
suite = CU_add_suite("uri parser", NULL, NULL);
if (!suite) { /* signal error */
fprintf(stderr, "W: cannot add uri parser test suite (%s)\n",
CU_get_error_msg());
return NULL;
}
#define URI_TEST(s,t) \
if (!CU_ADD_TEST(s,t)) { \
fprintf(stderr, "W: cannot add uri parser test (%s)\n", \
CU_get_error_msg()); \
}
URI_TEST(suite, t_parse_uri1);
URI_TEST(suite, t_parse_uri2);
URI_TEST(suite, t_parse_uri3);
URI_TEST(suite, t_parse_uri4);
URI_TEST(suite, t_parse_uri5);
URI_TEST(suite, t_parse_uri6);
URI_TEST(suite, t_parse_uri7);
URI_TEST(suite, t_parse_uri8);
URI_TEST(suite, t_parse_uri9);
URI_TEST(suite, t_parse_uri10);
URI_TEST(suite, t_parse_uri11);
URI_TEST(suite, t_parse_uri12);
URI_TEST(suite, t_parse_uri13);
return suite;
}
|
Tokey2018/Client_ios | tokeys-iOS/Extension/UIView+XYView.h | <gh_stars>0
//
// UIView+XYView.h
// tokeys-iOS
//
// Created by 杨卢银 on 2019/1/9.
// Copyright © 2019 杨卢银. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "UIImageView+TKImageView.h"
@interface UIView (XYView)
@property (nonatomic, assign) CGSize size;
@property (nonatomic, assign) CGPoint origin;
/**
*视图宽度
*/
@property (nonatomic,assign) CGFloat width;
/**
*视图高度
*/
@property (nonatomic,assign) CGFloat height;
/**
*视图X坐标
*/
@property (nonatomic,assign) CGFloat x;
/**
*视图Y坐标
*/
@property (nonatomic,assign) CGFloat y;
/**
*视图X坐标(最大)
*/
@property (nonatomic,assign) CGFloat maxX;
/**
*视图Y坐标 (最大)
*/
@property (nonatomic,assign) CGFloat maxY;
/**
* 小红点
*/
@property (nonatomic,copy) NSString *badgeString;
/**
*设置为圆形视图
*/
-(void)setRoundView;
/**
*设置为圆角视图<angle 角度>
*/
-(void)setRoundViewByAngle:(float)angle;
-(void)setRoundViewByAngle:(float)angle byRoundingCorners:(UIRectCorner)corners;
//-(void)setShadow:(float)width color:(UIColor*)color;
//-(void)setShadowBorderWidth:(CGFloat)width borderColor:(UIColor*)color;
/**
* 设置view 的 边线
*/
-(void)setBorderWidth:(CGFloat)width borderColor:(UIColor*)color;
/**
设置实图阴影
@param shadowRadius 阴影角度
@param opacity 阴影线条大小
@param offset 偏移量
@param color 颜色
*/
-(void)setShadowRadius:(CGFloat)shadowRadius opacity:(CGFloat)opacity offset:(CGSize)offset color:(UIColor*)color;
@end
|
CanadaHealthInfoway/message-builder | chi-maven-plugin/src/main/java/ca/infoway/messagebuilder/generator/lang/FieldDefinition.java | <filename>chi-maven-plugin/src/main/java/ca/infoway/messagebuilder/generator/lang/FieldDefinition.java
/**
* Copyright 2013 Canada Health Infoway, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: $LastChangedBy: tmcgrady $
* Last modified: $LastChangedDate: 2013-01-02 17:05:34 -0500 (Wed, 02 Jan 2013) $
* Revision: $LastChangedRevision: 6471 $
*/
package ca.infoway.messagebuilder.generator.lang;
import ca.infoway.messagebuilder.generator.util.ProgrammingLanguage;
interface FieldDefinition {
boolean isWritable();
String getFieldName();
String getCapitalizedPropertyName();
/**
* <p>Get the field type. The field type is used in internal field defintion.
*/
String getFieldType();
String getFieldImplementationType();
/**
* <p>Get the property type. The property type is used in the public getter and setter.
*/
String getPropertyType();
/**
* <p>Get the collection coded property element type. The collection coded property element type is used in the body of the public getter and setter.
*/
String getCollectionOfCodedPropertyElementType();
String[] getXmlPathName();
GetterBodyStyle getGetterBodyStyle();
SetterBodyStyle getSetterBodyStyle();
GetterBodyStyle getDerivedChoiceHasBodyStyle();
String getInitializationArguments();
/**
* <p>Get the implementation type of individual field elements.
*
* <p>If the field is a collapsed relationship, then the field type is a list, and
* this type is the implementation type of elements in the list. If the field
* is not a collapsed relationship, the the field type and the field element type are
* the same thing.
*/
String getFieldElementImplementationType();
/**
* <p>If the field is a collapsed relationship, then the field type is a list, and
* this type is the type of elements in the list. If the field
* is not a collapsed relationship, the the field type and the field element type are
* the same thing. */
String getFieldElementType();
/**
* <p>Get the implementation type of individual field elements.
*
* <p>If the field is a collapsed relationship, then the field type is a list, and
* this type is the implementation type of elements in the list. If the field
* is not a collapsed relationship, the the field type and the field element type are
* the same thing.
*/
String getPropertyElementImplementationType();
boolean isDerivedChoice();
boolean isInitializedAtConstructionTime();
void initializeContext(ClassNameManager manager, BaseRelationshipNameResolver resolver);
void resetContext();
BaseRelationship getBaseRelationship();
ProgrammingLanguage getProgrammingLanguage();
}
|
Tsroc/esep-core-java | exercises/exercises/solutions/interfaces_solution/src/main/java/domain/Broker.java | package domain;
public class Broker {
}
|
Bradinz/Vanjaro.Platform | DesktopModules/Vanjaro/UXManager/Extensions/Block/SearchInput/Views/Setting/SearchInput.js | <filename>DesktopModules/Vanjaro/UXManager/Extensions/Block/SearchInput/Views/Setting/SearchInput.js
app.controller('setting_searchinput', function ($scope, $attrs, $http, CommonSvc, SweetAlert) {
var common = CommonSvc.getData($scope);
$scope.loaded = false;
$scope.CurrentSearch;
$scope.onInit = function () {
$(".uxmanager-modal .modal-title", parent.document).html('[L:SettingTitle]');
$scope.CurrentSearch = window.parent.VjEditor.getSelected();
if ($scope.CurrentSearch != undefined) {
$scope.ui.data.Global.Value = $scope.CurrentSearch.attributes.attributes["data-block-global"] == "false" ? false : true;
if ($scope.ui.data.Global.Value) {
$scope.ui.data.Template.Value = $scope.ui.data.GlobalConfigs.Options["data-block-template"];
}
else {
$scope.ui.data.Template.Value = $scope.CurrentSearch.attributes.attributes["data-block-template"];
}
}
$scope.ui.data.IsAdmin.Value = ($scope.ui.data.IsAdmin.Value != '' && $scope.ui.data.IsAdmin.Value == 'true') ? true : false;
$scope.loaded = true;
};
$scope.ApplyChanges = function (searchinput) {
if ($scope.ui.data.Global.Value) {
common.webApi.post('search/update', '', searchinput.attributes.attributes).then(function () {
window.parent.RenderBlock(searchinput);
});
}
else
window.parent.RenderBlock(searchinput);
};
$scope.$watch('ui.data.Template.Value', function (newValue, oldValue) {
if (newValue != undefined && oldValue != undefined) {
var searchinput = window.parent.VjEditor.getSelected();
searchinput.addAttributes({ 'data-block-template': newValue });
$scope.ApplyChanges(searchinput);
}
});
$scope.$watch('ui.data.Global.Value', function (newValue, oldValue) {
if (newValue != undefined && oldValue != undefined) {
var searchinput = window.parent.VjEditor.getSelected();
if (newValue)
searchinput.addAttributes({ 'data-block-global': 'true' });
else
searchinput.addAttributes({ 'data-block-global': 'false' });
$scope.ApplyChanges(searchinput);
}
});
}); |
Juppi88/mgui | Skin/SkinSimple.h | <reponame>Juppi88/mgui
/**********************************************************************
*
* PROJECT: Mylly GUI
* FILE: SkinSimple.h
* LICENCE: See Licence.txt
* PURPOSE: An implementation of a basic, textureless skin.
*
* (c) <NAME> 2012-13
*
**********************************************************************/
#pragma once
#ifndef __MYLLY_GUI_SKINSIMPLE_H
#define __MYLLY_GUI_SKINSIMPLE_H
#include "Skin.h"
MGuiSkin* mgui_setup_skin_simple( void );
#endif /* __MYLLY_GUI_SKINSIMPLE_H */
|
msansone73/spring-cloud-mystocks | stock-db-service/src/main/java/br/com/msansone/api/stockdbservice/repository/TransactionRepository.java | <filename>stock-db-service/src/main/java/br/com/msansone/api/stockdbservice/repository/TransactionRepository.java
package br.com.msansone.api.stockdbservice.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import br.com.msansone.api.stockdbservice.model.Transaction;
public interface TransactionRepository extends JpaRepository<Transaction, Long>{
}
|
lyric-com/idol | test/src/lib/idol/js/codegen/tests/abs/three/SideImport2.js | <filename>test/src/lib/idol/js/codegen/tests/abs/three/SideImport2.js
// DO NOT EDIT
// This file was generated by idol_js, any changes will be overwritten when idol_js is run again.
import { Primitive, Struct } from "../../../__idol__";
export class TestsAbsThreeSideImport2 {
constructor(val) {
this._original = val;
}
// These methods are implemented via the runtime, stubs exist here for reference.
static validate(val) {}
static isValid(val) {
return true;
}
static expand(val) {
return val;
}
static unwrap(val) {
return val;
}
static wrap(val) {
return null;
}
get side_import2() {
return Primitive.of("int").wrap(this._original["side_import2"]);
}
set side_import2(val) {
this._original["side_import2"] = Primitive.of("int").unwrap(val);
}
get sideImport2() {
return this.side_import2;
}
set sideImport2(val) {
this.side_import2 = val;
}
}
Struct(TestsAbsThreeSideImport2, [
{ fieldName: "side_import2", type: Primitive.of("int"), optional: false }
]);
|
jiangshide/zd112_android | app/src/main/java/com/android/zd112/data/CircleListData.java | package com.android.zd112.data;
/**
* Created by etongdai on 2018/3/20.
*/
public class CircleListData {
public String name;
public String url;
public String thumb;
}
|
simplymichael/express-user-manager | __tests__/databases/database.test.js | const chai = require('chai');
const spies = require('chai-spies');
const env = require('../../src/dotenv');
let users = require('./_test-users.json');
const { getValidUserId } = require('../_utils');
const database = require('../../src/databases');
const chaiAsPromised = require('chai-as-promised');
const userModule = require('../../src/user-module');
const { expect } = chai;
const usersBackup = users.slice();
const db = database.getAdapter(env.DB_ADAPTER);
function getRandomData(array) {
return array[Math.floor(Math.random() * array.length)];
}
function createTestUsers(cb) {
let counter = 0;
users.forEach(async userData => {
const user = await db.createUser(userData);
const targetUser = users.find(curruser => curruser === userData);
targetUser.id = user.id;
targetUser.signupDate = user.signupDate;
counter++;
if(counter === users.length) {
cb();
}
});
}
function deleteTestUsers(cb) {
let counter = 0;
users.forEach(async user => {
await db.deleteUser(user.id);
counter++;
if(counter === users.length) {
cb();
}
});
}
chai.use(spies);
chai.use(chaiAsPromised);
let connection = null;
before(async function() {
connection = await db.connect({
host: env.DB_HOST,
port: env.DB_PORT,
user: env.DB_USERNAME,
pass: env.DB_PASSWORD,
engine: env.DB_ENGINE,
dbName: env.DB_DBNAME,
storagePath: env.DB_STORAGE_PATH,
debug: env.DB_DEBUG,
exitOnFail: env.EXIT_ON_DB_CONNECT_FAIL,
});
});
after(async function() {
await db.disconnect();
connection = null;
});
describe('Users', () => {
describe('Create User', () => {
it('should reject with a VALIDATION_ERROR if "firstname" is missing', () => {
const userData = { ...getRandomData(users) };
delete userData.firstname;
return expect(db.createUser(userData)).to.eventually
.be.rejected
.and.to.be.an.instanceOf(Object)
.and.to.have.property('type', 'VALIDATION_ERROR');
});
it('should reject with a VALIDATION_ERROR if "lastname" is missing', () => {
const userData = { ...getRandomData(users) };
delete userData.lastname;
return expect(db.createUser(userData)).to.eventually
.be.rejected
.and.to.be.an.instanceOf(Object)
.and.to.have.property('type', 'VALIDATION_ERROR');
});
it('should reject with a VALIDATION_ERROR if "username" is missing', () => {
const userData = { ...getRandomData(users) };
delete userData.username;
return expect(db.createUser(userData)).to.eventually
.be.rejected
.and.to.be.an.instanceOf(Object)
.and.to.have.property('type', 'VALIDATION_ERROR');
});
it('should reject with a VALIDATION_ERROR if "email" is missing', () => {
const userData = { ...getRandomData(users) };
delete userData.email;
return expect(db.createUser(userData)).to.eventually
.be.rejected
.and.to.be.an.instanceOf(Object)
.and.to.have.property('type', 'VALIDATION_ERROR');
});
it('should reject with a VALIDATION_ERROR if "password" is missing', () => {
const userData = { ...getRandomData(users) };
delete userData.password;
return expect(db.createUser(userData)).to.eventually
.be.rejected
.and.to.be.an.instanceOf(Object)
.and.to.have.property('type', 'VALIDATION_ERROR');
});
it('should create a user and return an object when every value is supplied', async () => {
const userData = getRandomData(users);
const user = await db.createUser(userData);
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.equal(userData.firstname);
expect(user).to.have.property('lastname').to.equal(userData.lastname);
expect(user).to.have.property('username').to.equal(userData.username);
expect(user).to.have.property('email').to.equal(userData.email);
expect(user).to.have.property('password');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
await db.deleteUser(user.id);
});
it('userModule should emit a "createUser" event on user creation success', async () => {
const userData = getRandomData(users);
const spy = chai.spy.on(userModule, 'emit');
const user = await db.createUser(userData);
expect(user).to.be.an('object');
expect(spy).to.have.been.called.with('createUser');
chai.spy.restore();
await db.deleteUser(user.id);
});
it('should reject with an error if user with email already exists', async () => {
const userData = { ...getRandomData(users) };
userData.username = userData.username.split('').reverse().join('');
const spy = chai.spy.on(userModule, 'emit');
const user = await db.createUser(userData);
expect(spy).to.have.been.called.with('createUser');
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.equal(userData.firstname);
expect(user).to.have.property('lastname').to.equal(userData.lastname);
expect(user).to.have.property('username').to.equal(userData.username);
expect(user).to.have.property('email').to.equal(userData.email);
expect(user).to.have.property('password');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
try {
await db.createUser(userData);
} catch(err) {
expect(spy).to.not.have.been.called;
expect(err).to.be.an.an('object').to.have.property('type');
expect(['USER_EXISTS_ERROR', 'VALIDATION_ERROR']).to.include(err.type);
expect(err).to.have.property('error');
}
chai.spy.restore();
await db.deleteUser(user.id);
});
it('should reject with an error on duplicate username', async () => {
const userData = { ...getRandomData(users) };
userData.email = '<EMAIL>';
const spy = chai.spy.on(userModule, 'emit');
const user = await db.createUser(userData);
expect(spy).to.have.been.called.with('createUser');
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.equal(userData.firstname);
expect(user).to.have.property('lastname').to.equal(userData.lastname);
expect(user).to.have.property('username').to.equal(userData.username);
expect(user).to.have.property('email').to.equal(userData.email);
expect(user).to.have.property('password');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
try {
await db.createUser(userData);
} catch(err) {
expect(spy).to.not.have.been.called;
expect(err).to.be.an.an('object').to.have.property('type');
expect(['USER_EXISTS_ERROR', 'VALIDATION_ERROR']).to.include(err.type);
expect(err).to.have.property('error');
}
chai.spy.restore();
await db.deleteUser(user.id);
});
});
describe('Get Users', () => {
beforeEach(function(done) {
createTestUsers(done);
});
afterEach(function(done) {
deleteTestUsers(function() {
users = usersBackup;
done();
});
});
it('should return every user if no filters are specified', async () => {
const firstnames = usersBackup.map(user => user.firstname);
const lastnames = usersBackup.map(user => user.lastname);
const usernames = usersBackup.map(user => user.username);
const emails = usersBackup.map(user => user.email);
const result = await db.getUsers();
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(users.length);
expect(result).to.have.property('length').to.equal(users.length);
expect(result).to.have.property('users').to.be.an('array');
const fetchedUsers = result.users;
expect(fetchedUsers.length).to.equal(users.length);
fetchedUsers.forEach(user => {
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.be.a('string');
expect(firstnames).to.include(user.firstname);
expect(user).to.have.property('lastname').to.be.a('string');
expect(lastnames).to.include(user.lastname);
expect(user).to.have.property('username').to.be.a('string');
expect(usernames).to.include(user.username);
expect(user).to.have.property('email').to.be.a('string');
expect(emails).to.include(user.email);
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
});
});
it('should return users with the specified firstname filter', async () => {
const user = getRandomData(users);
const firstname = user.firstname;
const matches = users.filter(user => user.firstname === firstname);
const result = await db.getUsers({ firstname });
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matches.length);
expect(result).to.have.property('length').to.equal(matches.length);
expect(result).to.have.property('users').to.be.an('array');
const fetchedUsers = result.users;
expect(fetchedUsers).to.be.an('array');
expect(fetchedUsers.length).to.equal(matches.length);
fetchedUsers.forEach(user => {
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.be.a('string');
expect(user.firstname).to.equal(firstname);
expect(user).to.have.property('lastname').to.be.a('string');
expect(user).to.have.property('username').to.be.a('string');
expect(user).to.have.property('email').to.be.a('string');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
});
});
it('should return users with the specified lastname filter', async () => {
const user = getRandomData(users);
const lastname = user.lastname;
const matches = users.filter(user => user.lastname === lastname);
const result =await db.getUsers({ lastname });
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matches.length);
expect(result).to.have.property('length').to.equal(matches.length);
expect(result).to.have.property('users').to.be.an('array');
const fetchedUsers = result.users;
expect(fetchedUsers).to.be.an('array');
expect(fetchedUsers.length).to.equal(matches.length);
fetchedUsers.forEach(user => {
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname').to.be.a('string');
expect(user).to.have.property('lastname').to.be.a('string');
expect(user.lastname).to.equal(lastname);
expect(user).to.have.property('username').to.be.a('string');
expect(user).to.have.property('email').to.be.a('string');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
});
});
});
describe('Search Users', () => {
beforeEach(function(done) {
createTestUsers(done);
});
afterEach(function(done) {
deleteTestUsers(function() {
users = usersBackup;
done();
});
});
function assertOnUser(user) {
expect(user).to.be.an('object');
expect(user).to.have.property('id');
expect(user).to.have.property('firstname');
expect(user).to.have.property('lastname');
expect(user).to.have.property('username');
expect(user).to.have.property('email');
expect(user).to.have.property('password');
expect(user).to.have.property('signupDate').to.be.instanceOf(Date);
}
it('should reject with an error if the "query" parameter is missing', () => {
return expect(db.searchUsers()).to.eventually
.be.rejectedWith(/Please specify the search term/);
});
it('should return no results if search by non-existent user details', async () => {
const users = await db.searchUsers({ query: 'here and there' });
expect(users).to.be.an('object');
expect(users).to.have.property('total').to.equal(0);
expect(users).to.have.property('length').to.equal(0);
expect(users).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return matching results if "query" parameter matches registered users', async () => {
const searchTerm = 'Lanister';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1 ||
user.username.indexOf(searchTerm) > -1 ||
user.email.indexOf(searchTerm) > -1
);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({ query: searchTerm });
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname, user.username, user.email])
.to.include(searchTerm);
});
});
it('should search case-insensitively', async () => {
const searchTerm = 'Lan<PASSWORD>';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1 ||
user.username.indexOf(searchTerm) > -1 ||
user.email.indexOf(searchTerm) > -1
);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm.toLowerCase(),
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname, user.username, user.email])
.to.include(searchTerm);
});
});
it('should return no results if search by non-existent firstname', async () => {
const searchTerm = 'Lan<PASSWORD>';
const result = await db.searchUsers({
query: searchTerm,
by: 'firstname',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(0);
expect(result).to.have.property('length').to.equal(0);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return matching success data if search by existing firstname', async () => {
const searchTerm = 'Jamie';
const matchingUsers = users.filter(user => {
return user.firstname.indexOf(searchTerm) > -1;
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
by: 'firstname'
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.firstname ]).to.include(searchTerm);
});
});
it('should return no results if search by non-existent lastname', async () => {
const searchTerm = 'Jamie';
const result = await db.searchUsers({
query: searchTerm,
by: 'lastname',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(0);
expect(result).to.have.property('length').to.equal(0);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return matching success data if search by existing lastname', async () => {
const searchTerm = 'Lanister';
const matchingUsers = users.filter(user => {
return user.lastname.indexOf(searchTerm) > -1;
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
by: 'lastname'
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.lastname ]).to.include(searchTerm);
});
});
it('should return no results if search by non-existent username', async () => {
const searchTerm = 'Lanister';
const result = await db.searchUsers({
query: searchTerm,
by: 'username',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(0);
expect(result).to.have.property('length').to.equal(0);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return matching success data if search by existing username', async () => {
const searchTerm = 'kingslayer';
const matchingUsers = users.filter(user => {
return user.username.indexOf(searchTerm) > -1;
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
by: 'username'
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.username ]).to.include(searchTerm);
});
});
it('should return no results if search by non-existent firstname', async () => {
const searchTerm = 'k<PASSWORD>';
const result = await db.searchUsers({
query: searchTerm,
by: 'email',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(0);
expect(result).to.have.property('length').to.equal(0);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return matching success data if search by existing email', async () => {
const searchTerm = 'arya';
const matchingUsers = users.filter(user => {
return user.email.indexOf(searchTerm) > -1;
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
by: 'email'
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect(user.email).to.match(new RegExp(searchTerm, 'i'));
});
});
it('should return matching success data if search by more than one criteria, at least one matching', async () => {
const searchTerm = 'Lanister';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1
);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
by: 'firstname:lastname',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname ]).to.include(searchTerm);
});
});
it('should return no results if search by more than one criteria, none matching', async () => {
const searchTerm = 'Johnson';
const result = await db.searchUsers({
query: searchTerm,
by: 'firstname:lastname:username:email',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(0);
expect(result).to.have.property('length').to.equal(0);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(0);
});
it('should return a maximum of LIMIT users when "limit" is specified', async () => {
const LIMIT = 1;
const searchTerm = 'Lan<PASSWORD>';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1 ||
user.username.indexOf(searchTerm) > -1 ||
user.email.indexOf(searchTerm) > -1
);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm.toLowerCase(),
limit: LIMIT,
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(LIMIT);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(LIMIT);
result.users.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname, user.username, user.email])
.to.include(searchTerm);
});
});
/*it('should sort by DESC signupDate if no sort order is specified', async () => {
const searchTerm = 'Lanister';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1 ||
user.username.indexOf(searchTerm) > -1 ||
user.email.indexOf(searchTerm) > -1
);
});
const sortedUsers = matchingUsers.sort((a, b) => {
return new Date(b.signupDate) - new Date(a.signupDate);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({ query: searchTerm });
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
const fetchedUsers = result.users;
fetchedUsers.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname, user.username, user.email])
.to.include(searchTerm);
});
for(let i = 0; i < fetchedUsers.length; i++) {
expect(getValidUserId(fetchedUsers[i].id)).to.equal(
getValidUserId(sortedUsers[i].id));
}
});
it('should sort by ASC signupDate if specified', async () => {
const searchTerm = 'Lanister';
const matchingUsers = users.filter(user => {
return (
user.firstname.indexOf(searchTerm) > -1 ||
user.lastname.indexOf(searchTerm) > -1 ||
user.username.indexOf(searchTerm) > -1 ||
user.email.indexOf(searchTerm) > -1
);
});
const sortedUsers = matchingUsers.sort((a, b) => {
return new Date(a.signupDate) - new Date(b.signupDate);
});
const matchingUsersLength = matchingUsers.length;
const result = await db.searchUsers({
query: searchTerm,
sort: 'signupDate:asc',
});
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(matchingUsersLength);
expect(result).to.have.property('length').to.equal(matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
const fetchedUsers = result.users;
fetchedUsers.forEach(user => {
assertOnUser(user);
expect([ user.firstname, user.lastname, user.username, user.email])
.to.include(searchTerm);
});
for(let i = 0; i < fetchedUsers.length; i++) {
expect(getValidUserId(fetchedUsers[i].id)).to.equal(
getValidUserId(sortedUsers[i].id));
}
});
it('should sort by DESC creationDate if invalid creationDate sort order is specified', async () => {
const searchTerm = '<PASSWORD>';
const matchingUsers = users.filter(post => {
return user.firstname.indexOf(searchTerm) > -1
});
const result = await db.searchUsers({
query: searchTerm,
orderBy: {
creationDate: 'order-can-only-be-ASC-or-DESC',
}
});
const sortedUsers = matchingUsers.sort((a, b) => {
return new Date(b.signupDate) - new Date(a.signupDate);
});
const matchingUsersLength = matchingUsers.length;
expect(result).to.be.an('object');
expect(result).to.have.property('total').to.equal(
matchingUsersLength);
expect(result).to.have.property('length').to.equal(
matchingUsersLength);
expect(result).to.have.property('users').to.be.an('array')
.and.to.have.lengthOf(matchingUsersLength);
const fetchedUsers = result.users;
retrievedPosts.forEach(post => {
});
for(let i = 0; i < fetchedUsers.length; i++) {
expect(fetchedUsers[i].id.toString()).to.equal(sortedUsers[i].id.toString());
}
});*/
});
describe('Get User By Id', () => {
beforeEach(function(done) {
createTestUsers(done);
});
afterEach(function(done) {
deleteTestUsers(function() {
users = usersBackup;
done();
});
});
it('should return no result for a non-existent user', async () => {
const user = getRandomData(users);
const invalidUserId = getValidUserId(user.id).split('').reverse().join('');
const nullUser = await db.findById(invalidUserId);
expect(nullUser).to.satisfy(function(user) {
return user === undefined || user === null;
});
});
it('should return a registered user by their id', async () => {
const user = getRandomData(users);
const foundUser = await db.findById(user.id);
expect(foundUser).to.be.an('object');
expect(foundUser).to.have.property('id');
expect(getValidUserId(foundUser.id)).to.equal(getValidUserId(user.id));
expect(foundUser).to.have.property('firstname').to.equal(user.firstname);
expect(foundUser).to.have.property('lastname').to.equal(user.lastname);
expect(foundUser).to.have.property('username').to.equal(user.username);
expect(foundUser).to.have.property('email').to.equal(user.email);
expect(foundUser).to.have.property('signupDate').to.be.instanceOf(Date);
expect(foundUser.signupDate.toString()).to.equal(user.signupDate.toString());
});
});
describe('Update User By Id', () => {
beforeEach(function(done) {
createTestUsers(done);
});
afterEach(function(done) {
deleteTestUsers(function() {
users = usersBackup;
done();
});
});
it('should update a registered user by their id', async () => {
const user = getRandomData(users);
let foundUser = null;
foundUser = await db.findById(user.id);
expect(foundUser).to.be.an('object');
expect(foundUser).to.have.property('id');
expect(getValidUserId(foundUser.id)).to.equal(getValidUserId(user.id));
expect(foundUser).to.have.property('firstname').to.equal(user.firstname);
expect(foundUser).to.have.property('lastname').to.equal(user.lastname);
expect(foundUser).to.have.property('username').to.equal(user.username);
expect(foundUser).to.have.property('email').to.equal(user.email);
expect(foundUser).to.have.property('signupDate').to.be.instanceOf(Date);
expect(foundUser.signupDate.toString()).to.equal(user.signupDate.toString());
const updateData = {
firstname: 'updatedFirstname',
lastname: 'updatedLastname',
email: '<EMAIL>',
username: 'updatedUsername',
};
await db.updateUser(user.id, updateData);
foundUser = await db.findById(user.id);
expect(foundUser).to.be.an('object');
expect(foundUser).to.have.property('id');
expect(getValidUserId(foundUser.id)).to.equal(getValidUserId(user.id));
expect(foundUser).to.have.property('firstname').to.equal(updateData.firstname);
expect(foundUser).to.have.property('lastname').to.equal(updateData.lastname);
expect(foundUser).to.have.property('username').to.equal(updateData.username);
expect(foundUser).to.have.property('email').to.equal(updateData.email);
expect(foundUser).to.have.property('signupDate').to.be.instanceOf(Date);
expect(foundUser.signupDate.toString()).to.equal(user.signupDate.toString());
});
});
describe('Delete User By Id', () => {
beforeEach(function(done) {
createTestUsers(done);
});
afterEach(function(done) {
deleteTestUsers(function() {
users = usersBackup;
done();
});
});
it('should delete a registered user by their id', async () => {
const user = getRandomData(users);
let foundUser = null;
foundUser = await db.findById(user.id);
expect(foundUser).to.be.an('object');
expect(foundUser).to.have.property('id');
expect(getValidUserId(foundUser.id)).to.equal(getValidUserId(user.id));
expect(foundUser).to.have.property('firstname').to.equal(user.firstname);
expect(foundUser).to.have.property('lastname').to.equal(user.lastname);
expect(foundUser).to.have.property('username').to.equal(user.username);
expect(foundUser).to.have.property('email').to.equal(user.email);
expect(foundUser).to.have.property('signupDate').to.be.instanceOf(Date);
expect(foundUser.signupDate.toString()).to.equal(user.signupDate.toString());
await db.deleteUser(user.id);
foundUser = await db.findById(user.id);
expect(foundUser).to.satisfy(function(user) {
return user === undefined || user === null;
});
});
});
});
|
tycoer/rfvision-1 | rfvision/core/bbox/match_costs/builder.py | from rflib.utils import Registry, build_from_cfg
MATCH_COST = Registry('Match Cost')
def build_match_cost(cfg, default_args=None):
"""Builder of IoU calculator."""
return build_from_cfg(cfg, MATCH_COST, default_args)
|
instantlinux/apicrud-ui | src/ra-views/picture.js | <gh_stars>1-10
// created 2-feb-2020 by <NAME> <<EMAIL>>
import React from 'react';
import { AutocompleteInput, Create, Edit, ImageField, NumberInput,
ReferenceInput, SelectInput, SimpleForm,
TextInput } from 'react-admin';
import { privacyChoices } from '../lib/constants';
import { MediaInput } from '../lib/media';
import { validateNameShort, validate64String } from '../lib/validate';
export const pictureCreate = props => (
<Create {...props}>
<SimpleForm>
<TextInput source='name' validate={validateNameShort} />
<MediaInput />
<TextInput multiline source='caption' />
<AutocompleteInput source='privacy' choices={privacyChoices}
defaultValue='secret' />
<TextInput source='path' validate={validate64String} />
<ReferenceInput source='category_id' reference='category' >
<SelectInput optionText='name' />
</ReferenceInput>
<TextInput disabled source='status' defaultValue='active' />
</SimpleForm>
</Create>
);
export const pictureEdit = props => (
<Edit {...props}>
<SimpleForm>
<TextInput source='name' validate={validateNameShort} />
<ImageField source='thumbnail50x50' label='Thumbnail' />
<TextInput multiline source='caption' />
<AutocompleteInput source='privacy' choices={privacyChoices} />
<ReferenceInput source='category_id' reference='category' >
<SelectInput optionText='name' />
</ReferenceInput>
<SelectInput source='orientation' disabled choices={[
{ id: 1, name: 'normal' },
{ id: 3, name: 'inverted' },
{ id: 5, name: '90 clockwise' },
{ id: 7, name: '90 counterclockwise' },
]} />
<NumberInput source='size' label='File size' disabled />
<NumberInput source='height' disabled />
<NumberInput source='width' disabled />
<TextInput source='model' disabled />
<MediaInput />
</SimpleForm>
</Edit>
);
export default pictureCreate;
|
dperl-sol/cctbx_project | boost_adaptbx/graph/connected_component_algorithm.py | <reponame>dperl-sol/cctbx_project
from __future__ import absolute_import, division, print_function
import boost_adaptbx.boost.python as bp
ext = bp.import_ext( "boost_adaptbx_graph_connected_component_algorithm_ext" )
def connected_components(graph):
result = {}
for ( desc, component ) in ext.connected_components( graph = graph ):
result.setdefault( component, [] ).append( desc )
return list(result.values())
|
manfredsteyer/2016-12-14 | src/app/app.constants.js | "use strict";
var core_1 = require("@angular/core");
exports.BASE_URL = new core_1.OpaqueToken("BASE_URL");
//# sourceMappingURL=app.constants.js.map |
iot-dsa-v2/sdk-dslink-cpp | src/broker/module/authorizer/permission_nodes.cc | <filename>src/broker/module/authorizer/permission_nodes.cc<gh_stars>1-10
#include "dsa_common.h"
#include "permission_nodes.h"
#include "module/logger.h"
#include "module/stream_acceptor.h"
#include "responder/node_state.h"
#include "responder/value_node_model.h"
#include "util/string.h"
#include "util/string_encode.h"
namespace dsa {
static const string_ DEFAULT_ROLE_NAME = "default";
PermissionRoleRootNode::PermissionRoleRootNode(const LinkStrandRef &strand)
: NodeModel(strand),
_storage(_strand->storage().get_strand_bucket("roles", _strand)) {}
void PermissionRoleRootNode::destroy_impl() {
_storage.reset();
NodeModel::destroy_impl();
}
void PermissionRoleRootNode::initialize() {
NodeModel::initialize();
_storage->read_all( //
[ this, keepref = get_ref() ](const string_ &key,
std::vector<uint8_t> data,
BucketReadStatus read_status) mutable {
if (PathData::invalid_name(key)) {
return;
}
Var map = Var::from_json(reinterpret_cast<const char *>(data.data()),
data.size());
if (map.is_map()) {
// add a child dslink node
ref_<PermissionRoleNode> child;
if (key == DEFAULT_ROLE_NAME) {
child = make_ref_<PermissionRoleNode>(_strand, get_ref(), nullptr);
} else {
child = make_ref_<PermissionRoleNode>(
_strand, get_ref(),
_strand->stream_acceptor().get_profile("broker/permission-role",
true));
}
child->load(map.get_map());
add_list_child(key, child->get_ref());
}
},
[ this, keepref = get_ref() ]() {
if (_list_children.count(DEFAULT_ROLE_NAME) > 0) {
// check existing default role
_default_role.reset(dynamic_cast<PermissionRoleNode *>(
_list_children[DEFAULT_ROLE_NAME].get()));
}
if (_default_role == nullptr) {
_default_role =
make_ref_<PermissionRoleNode>(_strand, get_ref(), nullptr);
_default_role->set_value_lite(Var(PermissionName::CONFIG));
_default_role->_default_level = PermissionLevel ::CONFIG;
add_list_child(DEFAULT_ROLE_NAME, _default_role->get_ref());
}
init_default_role_node();
});
}
// default role node doesn't use the Permission_Role profile, and needs to be
// handled separately
void PermissionRoleRootNode::init_default_role_node() {
if (_default_role == nullptr) {
return;
}
_default_role->update_property("$type", Var("string"));
_default_role->update_property("$editor",
Var("enum[,none,list,read,write,config]"));
if (_default_role->_default_level == PermissionLevel::INVALID) {
_default_role->_default_level = PermissionLevel::NONE;
_default_role->set_value_lite(Var(PermissionName::NONE));
}
// copy a profile node into list children
// since the profile node is already in state tree
// its state will still be the original one under Pub node
_default_role->add_list_child("add-rule",
_strand->stream_acceptor().get_profile(
"broker/permission-role/add-rule", true));
// remove fallback node
_default_role->remove_list_child("fallback");
}
PermissionRoleNode::PermissionRoleNode(const LinkStrandRef &strand,
ref_<PermissionRoleRootNode> &&parent,
ref_<NodeModel> &&profile)
: NodeModel(strand, std::move(profile), PermissionLevel::CONFIG),
_parent(std::move(parent)) {
_fallback_name_node.reset(new ValueNodeModel(
_strand, "string",
[ this, keepref = get_ref() ](const Var &v)->StatusDetail {
if (v.is_string()) {
if (v.get_string() != _fallback_name) {
_fallback_name = _fallback_name;
_fallback_role_cache.reset();
save_role();
}
return Status::DONE;
}
return Status::INVALID_PARAMETER;
}));
add_list_child("fallback", std::move(_fallback_name_node));
}
PermissionRoleNode::~PermissionRoleNode() = default;
PermissionLevel PermissionRoleNode::get_permission(const string_ &path,
int loop_protect) {
for (auto it = _rules.rbegin(); it != _rules.rend(); ++it) {
if (str_starts_with(path, it->first)) {
// math full path or match as folder
if (path.size() == it->first.size() || path[it->first.size()] == '/') {
return it->second;
}
}
}
if (_default_level != PermissionLevel::INVALID) {
return _default_level;
}
if (loop_protect <= 0) {
LOG_ERROR(
__FILENAME__,
LOG << "find loop in permission role fallback chain, ends at role: "
<< _fallback_name);
return PermissionLevel ::NONE;
}
return get_fallback_role()->get_permission(path, loop_protect - 1);
}
ref_<PermissionRoleNode> &PermissionRoleNode::get_fallback_role() {
if (_fallback_role_cache == nullptr || _fallback_role_cache->is_destroyed()) {
_fallback_role_cache = nullptr;
auto role = _parent->get_child(_fallback_name);
if (role != nullptr) {
_fallback_role_cache.reset(
dynamic_cast<PermissionRoleNode *>(role.get()));
}
if (_fallback_role_cache == nullptr) {
_fallback_role_cache = _parent->_default_role;
}
if (_fallback_role_cache == this) {
LOG_ERROR(__FILENAME__,
LOG << "reference self as fallback role: " << _fallback_name);
}
}
return _fallback_role_cache;
}
void PermissionRoleNode::destroy_impl() {
_parent.reset();
_fallback_name_node.reset();
_fallback_role_cache.reset();
NodeModel::destroy_impl();
}
StatusDetail PermissionRoleNode::on_set_value(MessageValue &&value) {
if (value.value.is_string()) {
const string_ &str = value.value.get_string();
auto level = PermissionName::parse(str);
if (level != PermissionLevel::INVALID || str.empty()) {
if (level != _default_level) {
_default_level = level;
auto result = NodeModel::on_set_value(std::move(value));
save(*_parent->_storage, _state->get_path().node_name(), false, true);
return std::move(result);
}
}
}
return Status::INVALID_PARAMETER;
}
void PermissionRoleNode::save_role() const {
save(*_parent->_storage, _state->get_path().node_name(), false, true);
}
void PermissionRoleNode::save_extra(VarMap &map) const {
if (!_fallback_name.empty()) {
map[":fallback"] = _fallback_name;
}
auto rules = make_ref_<VarMap>();
for (auto it : _rules) {
(*rules)[it.first] = Var(PermissionName::convert(it.second));
}
map[":rules"] = std::move(rules);
}
void PermissionRoleNode::load_extra(VarMap &map) {
if (_cached_value != nullptr) {
_default_level =
PermissionName::parse(_cached_value->get_value().value.to_string());
}
if (map.count(":fallback") > 0 && map[":fallback"].is_string()) {
_fallback_name = map[":fallback"].get_string();
} else {
_fallback_name = "";
}
if (map.count(":rules") > 0 && map[":rules"].is_map()) {
auto &rules = map[":rules"].get_map();
for (auto &it : rules) {
if (it.second.is_string()) {
auto &str = it.second.get_string();
auto level = PermissionName::parse(str);
if (level != PermissionLevel::INVALID) {
_rules[it.first] = level;
// create a rule node for it
auto rule = make_ref_<PermissionRuleNode>(
_strand, get_ref(),
_strand->stream_acceptor().get_profile("broker/permission-rule",
true));
rule->_path = it.first;
rule->_level = level;
rule->set_value_lite(Var(str));
add_list_child(url_encode_node_name(it.first),
ref_<NodeModelBase>(rule));
} else {
LOG_ERROR(__FILENAME__,
LOG << "find invalid permission rule during loading: "
<< it.first << " : " << str);
}
}
}
}
}
PermissionRuleNode::PermissionRuleNode(const LinkStrandRef &strand,
ref_<PermissionRoleNode> &&role,
ref_<NodeModel> &&profile)
: NodeModel(strand, std::move(profile)), _role(std::move(role)) {}
void PermissionRuleNode::destroy_impl() {
_role.reset();
NodeModel::destroy_impl();
}
StatusDetail PermissionRuleNode::on_set_value(MessageValue &&value) {
if (value.value.is_string()) {
auto level = PermissionName::parse(value.value.get_string());
if (level != PermissionLevel::INVALID) {
if (level != _level) {
_level = level;
_role->_rules[_path] = _level;
_role->save_role();
return NodeModel::on_set_value(std::move(value));
}
}
}
return Status::INVALID_PARAMETER;
}
} // namespace dsa
|
dumitru-tap-at-endava/porta | config/abilities/switches.rb | # frozen_string_literal: true
Ability.define do |user|
if user && (account = user.account)
provider = account.provider_account
settings = if account.buyer?
provider.settings
else
account.settings
end
# :see means buyer can use it (see buyer_any.rb)
# :admin means provider can see the upgrade notices (see provider_admin.rb)
# :manage means provider can show and hide it
# account_plans service_plans finance require_cc_on_signup
# multiple_services multiple_applications multiple_users skip_email_engagement_footer
# groups branding web_hooks iam_tools
settings.switches.each do |name, switch|
if can?(:admin, name) && switch.allowed?
if account.master_on_premises? && [:account_plans, :service_plans].include?(name)
cannot %i[see admin manage], name
else
can :manage, name
end
end
end
end
end
|
janforp/spring | proxy/src/main/java/com/janita/proxy/aop2/JdkDynamicProxy2.java | package com.janita.proxy.aop2;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
/**
* JdkDynamicProxy2
*
* @author zhucj
* @since 20210225
*/
public class JdkDynamicProxy2 implements InvocationHandler {
/**
* 被代理对象
*/
private Object target;
public JdkDynamicProxy2(Object target) {
this.target = target;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
System.out.println("方法的参数个数为:" + (args != null ? args.length : 0));
Object invoke = method.invoke(target, args);
System.out.println("方法的返回值 :" + (invoke != null ? invoke : "空"));
return invoke;
}
/**
* 返回代理之后的对象
*
* @return 代理之后的对象
*/
public Object getProxy() {
return Proxy.newProxyInstance(
target.getClass().getClassLoader(),//类加载起
target.getClass().getInterfaces(), //代理类需要实现的接口集合
this); // 代理类虽然全部实现类接口方法,但是接口方法要依靠InvocationHandler去处理
}
}
|
ritabc/rails-decision-log | db/migrate/20190219233935_rename_role_type_column.rb | class RenameRoleTypeColumn < ActiveRecord::Migration[5.2]
def change
rename_column :roles, :type, :leader_type
end
end
|
tanishiking/dotty | tests/run-macros/tasty-eval/quoted_2.scala | <filename>tests/run-macros/tasty-eval/quoted_2.scala
import Macros.*
object Test {
final val y = 5
def main(args: Array[String]): Unit = {
println(foo(1)) // "Some(1)"
println(foo(1 + 7)) // "Some(8)"
println(foo(y)) // "Some(5)"
println(foo(y + 1))
val x = 4
println(foo(x)) // "None"
}
}
|
forma-exacta/replate | dist/Collection.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _State2 = require('./State');
var _State3 = _interopRequireDefault(_State2);
var _v = require('uuid/v4');
var _v2 = _interopRequireDefault(_v);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Collection = function (_State) {
_inherits(Collection, _State);
function Collection(name, subState) {
_classCallCheck(this, Collection);
return _possibleConstructorReturn(this, (Collection.__proto__ || Object.getPrototypeOf(Collection)).call(this, name, {}, _extends({
byId: new _State3.default('byId', {}, {
upsert: function upsert(state, action) {
var newState = {};
if (Array.isArray(action.payload)) {
newState = action.payload.reduce(function (res, curr) {
curr._id = curr._id || (0, _v2.default)();
return _extends({}, res, _defineProperty({}, curr._id, curr));
}, {});
} else {
action.payload._id = action.payload._id || (0, _v2.default)();
newState = _defineProperty({}, action.payload._id, action.payload);
}
return _extends({}, state, newState);
},
remove: function remove(state, action) {
var newState = _extends({}, state);
delete newState[action.payload._id];
return newState;
}
}),
allIds: new _State3.default('allIds', [], {
upsert: function upsert(state, action) {
if (Array.isArray(action.payload)) {
state = [].concat(_toConsumableArray(state), _toConsumableArray(action.payload.map(function (val) {
return val._id;
})));
} else {
state.push(action.payload._id);
}
return state.filter(function (value, index, self) {
return self.indexOf(value) === index;
});
},
remove: function remove(state, action) {
var newState = state.slice();
newState.splice(newState.indexOf(action.payload._id), 1);
return newState;
}
})
}, subState)));
}
return Collection;
}(_State3.default);
exports.default = Collection;
//# sourceMappingURL=Collection.js.map |
jaydeetay/pxt | tests/errors-test/cases/missing-operator.py | a = 8
b = 15
c = a b # TS9553
|
ToraNova/flask-arch | flask_arch/cms/base.py | # base object for the content management system
# this system also handles user
import datetime
from ..utils import RequestParser
class Content:
'''ancestor of all content managed by a ContentManager'''
id = None
def view(self, rp, actor):
# can be overwritten to only allow certain actors to view the content
return self
def __init__(self, rp, actor):
# user must define the creation behavior
raise NotImplementedError(f'__init__(self, rp, actor) on {self.__class__.__name__} not implemented.')
def before_insert(self, rp, actor):
# called before commit
# set creator_id
if isinstance(actor, Content):
self.creator_id = actor.id
self.created_on = datetime.datetime.now()
def after_insert(self, rp, actor):
# called after commit
pass
def modify(self, rp, actor):
# user must define the modification behavior
raise NotImplementedError(f'update(self, rp, actor) on {self.__class__.__name__} not implemented.')
def before_update(self, rp, actor):
# called before commit
if isinstance(actor, Content):
self.modifier_id = actor.id
self.updated_on = datetime.datetime.now()
def after_update(self, rp, actor):
# called after commit
pass
def deinit(self, rp, actor):
# deinitialization behavior
pass
def before_delete(self, rp, actor):
# called before delete
pass
def after_delete(self, rp, actor):
# called after commit
pass
@classmethod
def parse_id(cls, rp):
return rp.args['id']
@classmethod
def parse_filename(cls, rp):
return rp.args['filename']
@classmethod
def create_default_with_form(cls, **kwargs):
from .default import DEFAULT
defo = cls._create_with_form(DEFAULT, **kwargs)
return defo
@classmethod
def _create_with_form(cls, actor, **kwargs):
rp = RequestParser()
rp.form = kwargs.copy()
c = cls(rp, actor)
return c
class ContentManager:
def __init__(self, ContentClass):
if not issubclass(ContentClass, Content):
raise TypeError(f'{ContentClass} should be a subclass of {Content}.')
self.Content = ContentClass
def query(self, rp):
cid = self.Content.parse_id(rp)
c = self.select_one(cid)
return c
# get queries
def select(self, query):
# specific query
raise NotImplementedError(f'select method on {self.__class__.__name__} not implemented.')
def select_all(self):
# list contents
raise NotImplementedError(f'select_all method on {self.__class__.__name__} not implemented.')
def select_one(self, id):
# select content by id
raise NotImplementedError(f'select_one method on {self.__class__.__name__} not implemented.')
# insert/update/delete queries
def insert(self, nd):
# insert a new content
raise NotImplementedError(f'insert method on {self.__class__.__name__} not implemented.')
def update(self, nd):
# update a content
raise NotImplementedError(f'update method on {self.__class__.__name__} not implemented.')
def delete(self, nd):
# delete a content
raise NotImplementedError(f'delete method on {self.__class__.__name__} not implemented.')
# persistence method
def commit(self):
# persist changes and synchronize
raise NotImplementedError(f'commit method on {self.__class__.__name__} not implemented.')
def rollback(self):
# rollback changes (encountered an exception)
raise NotImplementedError(f'rollback method on {self.__class__.__name__} not implemented.')
|
curiousjgeorge/aws-sdk-cpp | aws-cpp-sdk-comprehend/source/model/EntitiesDetectionJobProperties.cpp | /*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/comprehend/model/EntitiesDetectionJobProperties.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace Comprehend
{
namespace Model
{
EntitiesDetectionJobProperties::EntitiesDetectionJobProperties() :
m_jobIdHasBeenSet(false),
m_jobNameHasBeenSet(false),
m_jobStatus(JobStatus::NOT_SET),
m_jobStatusHasBeenSet(false),
m_messageHasBeenSet(false),
m_submitTimeHasBeenSet(false),
m_endTimeHasBeenSet(false),
m_entityRecognizerArnHasBeenSet(false),
m_inputDataConfigHasBeenSet(false),
m_outputDataConfigHasBeenSet(false),
m_languageCode(LanguageCode::NOT_SET),
m_languageCodeHasBeenSet(false),
m_dataAccessRoleArnHasBeenSet(false),
m_volumeKmsKeyIdHasBeenSet(false)
{
}
EntitiesDetectionJobProperties::EntitiesDetectionJobProperties(JsonView jsonValue) :
m_jobIdHasBeenSet(false),
m_jobNameHasBeenSet(false),
m_jobStatus(JobStatus::NOT_SET),
m_jobStatusHasBeenSet(false),
m_messageHasBeenSet(false),
m_submitTimeHasBeenSet(false),
m_endTimeHasBeenSet(false),
m_entityRecognizerArnHasBeenSet(false),
m_inputDataConfigHasBeenSet(false),
m_outputDataConfigHasBeenSet(false),
m_languageCode(LanguageCode::NOT_SET),
m_languageCodeHasBeenSet(false),
m_dataAccessRoleArnHasBeenSet(false),
m_volumeKmsKeyIdHasBeenSet(false)
{
*this = jsonValue;
}
EntitiesDetectionJobProperties& EntitiesDetectionJobProperties::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("JobId"))
{
m_jobId = jsonValue.GetString("JobId");
m_jobIdHasBeenSet = true;
}
if(jsonValue.ValueExists("JobName"))
{
m_jobName = jsonValue.GetString("JobName");
m_jobNameHasBeenSet = true;
}
if(jsonValue.ValueExists("JobStatus"))
{
m_jobStatus = JobStatusMapper::GetJobStatusForName(jsonValue.GetString("JobStatus"));
m_jobStatusHasBeenSet = true;
}
if(jsonValue.ValueExists("Message"))
{
m_message = jsonValue.GetString("Message");
m_messageHasBeenSet = true;
}
if(jsonValue.ValueExists("SubmitTime"))
{
m_submitTime = jsonValue.GetDouble("SubmitTime");
m_submitTimeHasBeenSet = true;
}
if(jsonValue.ValueExists("EndTime"))
{
m_endTime = jsonValue.GetDouble("EndTime");
m_endTimeHasBeenSet = true;
}
if(jsonValue.ValueExists("EntityRecognizerArn"))
{
m_entityRecognizerArn = jsonValue.GetString("EntityRecognizerArn");
m_entityRecognizerArnHasBeenSet = true;
}
if(jsonValue.ValueExists("InputDataConfig"))
{
m_inputDataConfig = jsonValue.GetObject("InputDataConfig");
m_inputDataConfigHasBeenSet = true;
}
if(jsonValue.ValueExists("OutputDataConfig"))
{
m_outputDataConfig = jsonValue.GetObject("OutputDataConfig");
m_outputDataConfigHasBeenSet = true;
}
if(jsonValue.ValueExists("LanguageCode"))
{
m_languageCode = LanguageCodeMapper::GetLanguageCodeForName(jsonValue.GetString("LanguageCode"));
m_languageCodeHasBeenSet = true;
}
if(jsonValue.ValueExists("DataAccessRoleArn"))
{
m_dataAccessRoleArn = jsonValue.GetString("DataAccessRoleArn");
m_dataAccessRoleArnHasBeenSet = true;
}
if(jsonValue.ValueExists("VolumeKmsKeyId"))
{
m_volumeKmsKeyId = jsonValue.GetString("VolumeKmsKeyId");
m_volumeKmsKeyIdHasBeenSet = true;
}
return *this;
}
JsonValue EntitiesDetectionJobProperties::Jsonize() const
{
JsonValue payload;
if(m_jobIdHasBeenSet)
{
payload.WithString("JobId", m_jobId);
}
if(m_jobNameHasBeenSet)
{
payload.WithString("JobName", m_jobName);
}
if(m_jobStatusHasBeenSet)
{
payload.WithString("JobStatus", JobStatusMapper::GetNameForJobStatus(m_jobStatus));
}
if(m_messageHasBeenSet)
{
payload.WithString("Message", m_message);
}
if(m_submitTimeHasBeenSet)
{
payload.WithDouble("SubmitTime", m_submitTime.SecondsWithMSPrecision());
}
if(m_endTimeHasBeenSet)
{
payload.WithDouble("EndTime", m_endTime.SecondsWithMSPrecision());
}
if(m_entityRecognizerArnHasBeenSet)
{
payload.WithString("EntityRecognizerArn", m_entityRecognizerArn);
}
if(m_inputDataConfigHasBeenSet)
{
payload.WithObject("InputDataConfig", m_inputDataConfig.Jsonize());
}
if(m_outputDataConfigHasBeenSet)
{
payload.WithObject("OutputDataConfig", m_outputDataConfig.Jsonize());
}
if(m_languageCodeHasBeenSet)
{
payload.WithString("LanguageCode", LanguageCodeMapper::GetNameForLanguageCode(m_languageCode));
}
if(m_dataAccessRoleArnHasBeenSet)
{
payload.WithString("DataAccessRoleArn", m_dataAccessRoleArn);
}
if(m_volumeKmsKeyIdHasBeenSet)
{
payload.WithString("VolumeKmsKeyId", m_volumeKmsKeyId);
}
return payload;
}
} // namespace Model
} // namespace Comprehend
} // namespace Aws
|
dwtester88/jitsi_master | src/org/jitsi/android/gui/util/event/EventListenerList.java | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.android.gui.util.event;
import java.util.*;
/**
* Utility class to that stores the list of {@link EventListener}s.
* Provides add/remove and notify all operations.
*
* @param <T> the event object class
*
* @author <NAME>
*/
public class EventListenerList<T>
{
/**
* The list of {@link EventListener}
*/
private ArrayList<EventListener<T>> listeners =
new ArrayList<EventListener<T>>();
/**
* Adds the <tt>listener</tt> to the list
*
* @param listener the {@link EventListener} that will
* be added to the list
*/
public void addEventListener(EventListener<T> listener)
{
if(!listeners.contains(listener))
listeners.add(listener);
}
/**
* Removes the <tt>listener</tt> from the list
*
* @param listener the {@link EventListener} that will
* be removed from the list
*/
public void removeEventListener(EventListener<T> listener)
{
listeners.remove(listener);
}
/**
* Runs the event change notification on listeners list
*
* @param eventObject the source object of the event
*/
public void notifyEventListeners(T eventObject)
{
for(EventListener<T> l : listeners)
{
l.onChangeEvent(eventObject);
}
}
/**
* Clears the listeners list
*/
public void clear()
{
listeners.clear();
}
}
|
alexhope61/bootstrap | atom/packages/atom-ide-ui/modules/atom-ide-ui/pkg/atom-ide-debugger/lib/ui/ThreadTreeNode.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _LoadingSpinner() {
const data = require("../../../../../nuclide-commons-ui/LoadingSpinner");
_LoadingSpinner = function () {
return data;
};
return data;
}
function _Table() {
const data = require("../../../../../nuclide-commons-ui/Table");
_Table = function () {
return data;
};
return data;
}
function _Tree() {
const data = require("../../../../../nuclide-commons-ui/Tree");
_Tree = function () {
return data;
};
return data;
}
function _event() {
const data = require("../../../../../nuclide-commons/event");
_event = function () {
return data;
};
return data;
}
var React = _interopRequireWildcard(require("react"));
var _RxMin = require("rxjs/bundles/Rx.min.js");
function _constants() {
const data = require("../constants");
_constants = function () {
return data;
};
return data;
}
function _UniversalDisposable() {
const data = _interopRequireDefault(require("../../../../../nuclide-commons/UniversalDisposable"));
_UniversalDisposable = function () {
return data;
};
return data;
}
function _expected() {
const data = require("../../../../../nuclide-commons/expected");
_expected = function () {
return data;
};
return data;
}
function _classnames() {
const data = _interopRequireDefault(require("classnames"));
_classnames = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
/**
* Copyright (c) 2017-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*
* @format
*/
const LOADING = React.createElement("div", {
className: (0, _classnames().default)('debugger-expression-value-row', 'debugger-tree-no-frames')
}, React.createElement("span", {
className: "debugger-expression-value-content"
}, React.createElement(_LoadingSpinner().LoadingSpinner, {
size: "SMALL"
})));
class ThreadTreeNode extends React.Component {
constructor(props) {
super(props);
this.handleSelect = () => {
if (!this.state.isCollapsed) {
this.setState({
isCollapsed: true
});
} else {
this.setState({
isCollapsed: false,
childItems: _expected().Expect.pending()
});
this._selectTrigger.next();
}
};
this._handleStackFrameClick = (clickedRow, callFrameIndex) => {
this.props.service.focusStackFrame(clickedRow.frame, null, null, true);
};
this._selectTrigger = new _RxMin.Subject();
this.state = this._getInitialState();
this._disposables = new (_UniversalDisposable().default)();
}
_computeIsFocused() {
const {
service,
thread
} = this.props;
const focusedThread = service.viewModel.focusedThread;
return focusedThread != null && thread.threadId === focusedThread.threadId;
}
_getInitialState() {
return {
isCollapsed: true,
childItems: _expected().Expect.pending()
};
}
_getFrames(fetch = false) {
const {
thread
} = this.props;
const getValue = () => _RxMin.Observable.of(_expected().Expect.value(thread.getCallStack()));
if (fetch || !this.state.childItems.isPending && !this.state.childItems.isError && this.state.childItems.value.length === 0) {
return _RxMin.Observable.of(_expected().Expect.pending()).concat(_RxMin.Observable.fromPromise((async () => {
await thread.fetchCallStack();
return _expected().Expect.value(thread.getCallStack());
})()));
}
return getValue();
}
componentWillUnmount() {
this._disposables.dispose();
}
componentDidMount() {
const {
service
} = this.props;
const model = service.getModel();
const {
viewModel
} = service;
this._disposables.add(_RxMin.Observable.merge((0, _event().observableFromSubscribeFunction)(viewModel.onDidFocusStackFrame.bind(viewModel)), (0, _event().observableFromSubscribeFunction)(service.onDidChangeMode.bind(service))).subscribe(() => {
const {
isCollapsed
} = this.state;
const newIsCollapsed = isCollapsed && !this._computeIsFocused();
this.setState({
isCollapsed: newIsCollapsed
});
}), this._selectTrigger.asObservable().switchMap(() => this._getFrames(true)).subscribe(frames => {
this.setState({
childItems: frames
});
}), (0, _event().observableFromSubscribeFunction)(model.onDidChangeCallStack.bind(model)).debounceTime(100).startWith(null).switchMap(() => this._getFrames().switchMap(frames => {
if (!this.state.isCollapsed && !frames.isPending && !frames.isError && frames.value.length === 0) {
return this._getFrames(true);
}
return _RxMin.Observable.of(frames);
})).subscribe(frames => {
const {
isCollapsed
} = this.state;
this.setState({
childItems: frames,
isCollapsed: isCollapsed && !this._computeIsFocused()
});
}));
}
_generateTable(childItems) {
const {
service
} = this.props;
const rows = childItems.map((frame, frameIndex) => {
const activeFrame = service.viewModel.focusedStackFrame;
const isSelected = activeFrame != null ? frame === activeFrame : false;
const cellData = {
data: {
name: frame.name,
source: frame.source != null && frame.source.name != null ? `${frame.source.name}` : '',
line: `${frame.range.end.row}`,
frame,
isSelected
},
className: isSelected ? 'debugger-callstack-item-selected' : undefined
};
return cellData;
});
const columns = [{
title: 'Name',
key: 'name',
width: 0.5
}, {
title: 'Source',
key: 'source',
width: 0.35
}, {
title: 'Line',
key: 'line',
width: 0.15
}];
return React.createElement("div", {
className: (0, _classnames().default)({
'debugger-container-new-disabled': service.getDebuggerMode() === _constants().DebuggerMode.RUNNING
})
}, React.createElement("div", {
className: "debugger-callstack-table-div"
}, React.createElement(_Table().Table, {
className: "debugger-callstack-table",
columns: columns,
rows: rows,
selectable: cellData => cellData.frame.source.available,
resizable: true,
onSelect: this._handleStackFrameClick,
sortable: false
})));
}
render() {
const {
thread,
service
} = this.props;
const {
childItems
} = this.state;
const isFocused = this._computeIsFocused();
const handleTitleClick = event => {
if (thread.stopped) {
service.focusStackFrame(null, thread, null, true);
}
event.stopPropagation();
};
const formattedTitle = React.createElement("span", {
onClick: handleTitleClick,
className: isFocused ? (0, _classnames().default)('debugger-tree-process-thread-selected') : '',
title: 'Thread ID: ' + thread.threadId + ', Name: ' + thread.name
}, thread.name + (thread.stoppedDetails == null ? ' (Running)' : ' (Paused)'));
if (!childItems.isPending && !childItems.isError && childItems.value.length === 0) {
return React.createElement(_Tree().TreeItem, {
className: "debugger-tree-no-frames"
}, formattedTitle);
}
const callFramesElements = childItems.isPending ? LOADING : childItems.isError ? React.createElement("span", {
className: "debugger-tree-no-frames"
}, "Error fetching stack frames ", childItems.error.toString()) : this._generateTable(childItems.value);
return React.createElement(_Tree().NestedTreeItem, {
title: formattedTitle,
collapsed: this.state.isCollapsed,
onSelect: this.handleSelect
}, callFramesElements);
}
}
exports.default = ThreadTreeNode; |
stlbucket/fbkt-login | index.js | module.exports = {
core: require('./appLibs/fbktLogin'),
graphsQl: require('./appLibs/fbktLogin/graphQl/Query/graphs'),
actions: require('./appLibs/fbktLogin/actions'),
};
|
pallxk/sqlpad | client/src/common/QueryResultContainer.js | <filename>client/src/common/QueryResultContainer.js
import React from 'react';
import ErrorBlock from './ErrorBlock';
import InfoBlock from './InfoBlock';
import QueryResultDataTable from './QueryResultDataTable';
import QueryResultRunning from './QueryResultRunning';
function QueryResultContainer({ isRunning, queryError, queryResult }) {
if (isRunning) {
return <QueryResultRunning />;
}
if (queryError) {
return <ErrorBlock>{queryError}</ErrorBlock>;
}
if (!queryResult || !queryResult.rows) {
return null;
}
if (queryResult.status === 'finished' && queryResult.rows.length === 0) {
return (
<InfoBlock>
Query finished
<br />
No rows returned
</InfoBlock>
);
}
return <QueryResultDataTable queryResult={queryResult} />;
}
export default QueryResultContainer;
|
qianfei11/zstack | sdk/src/main/java/org/zstack/sdk/DeleteBaremetalChassisResult.java | <gh_stars>100-1000
package org.zstack.sdk;
public class DeleteBaremetalChassisResult {
}
|
nICEnnnnnnnLee/NinjaV4 | src/nicelee/test/junit/FileUploaderTest.java | package nicelee.test.junit;
import java.io.File;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import nicelee.function.cloud.upload.FileUploader;
import nicelee.global.GlobalConfig;
public class FileUploaderTest {
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void test() {
File file = new File(
"D:\\Workspace\\GitWorkspace\\0_GitHub\\nICEnnnnnnnLee.github.io\\sources\\pics\\bg-catoon.jpg");
boolean result = FileUploader.update(GlobalConfig.url_onlineDevices, file, GlobalConfig.token);
System.out.println(result);
}
}
|
karlbaker02/verify-hub | hub/policy/src/integration-test/java/uk/gov/ida/integrationtest/hub/policy/apprule/EidasCycle3DataResourceTest.java | package uk.gov.ida.integrationtest.hub.policy.apprule;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.base.Optional;
import helpers.JerseyClientConfigurationBuilder;
import io.dropwizard.client.JerseyClientBuilder;
import io.dropwizard.client.JerseyClientConfiguration;
import io.dropwizard.util.Duration;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import uk.gov.ida.hub.policy.Urls;
import uk.gov.ida.hub.policy.builder.AttributeQueryContainerDtoBuilder;
import uk.gov.ida.hub.policy.builder.domain.SessionIdBuilder;
import uk.gov.ida.hub.policy.domain.Cycle3AttributeRequestData;
import uk.gov.ida.hub.policy.domain.Cycle3UserInput;
import uk.gov.ida.hub.policy.domain.MatchingProcessDto;
import uk.gov.ida.hub.policy.domain.SessionId;
import uk.gov.ida.hub.policy.domain.state.Cycle3DataInputCancelledState;
import uk.gov.ida.hub.policy.domain.state.EidasCycle3MatchRequestSentState;
import uk.gov.ida.integrationtest.hub.policy.apprule.support.ConfigStubRule;
import uk.gov.ida.integrationtest.hub.policy.apprule.support.EventSinkStubRule;
import uk.gov.ida.integrationtest.hub.policy.apprule.support.PolicyAppRule;
import uk.gov.ida.integrationtest.hub.policy.apprule.support.SamlEngineStubRule;
import uk.gov.ida.integrationtest.hub.policy.apprule.support.SamlSoapProxyProxyStubRule;
import uk.gov.ida.integrationtest.hub.policy.rest.EidasCycle3DTO;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import java.net.URI;
import static io.dropwizard.testing.ConfigOverride.config;
import static org.assertj.core.api.Assertions.assertThat;
import static uk.gov.ida.hub.policy.builder.domain.Cycle3AttributeRequestDataBuilder.aCycle3AttributeRequestData;
import static uk.gov.ida.integrationtest.hub.policy.apprule.support.TestSessionResource.EIDAS_AWAITING_CYCLE_3_DATA_STATE;
import static uk.gov.ida.integrationtest.hub.policy.apprule.support.TestSessionResource.GET_SESSION_STATE_NAME;
public class EidasCycle3DataResourceTest {
public static String TEST_SESSION_RESOURCE_PATH = Urls.PolicyUrls.POLICY_ROOT + "test";
@ClassRule
public static SamlEngineStubRule samlEngineStub = new SamlEngineStubRule();
@ClassRule
public static EventSinkStubRule eventSinkStub = new EventSinkStubRule();
@ClassRule
public static SamlSoapProxyProxyStubRule samlSoapProxyProxyStub = new SamlSoapProxyProxyStubRule();
@ClassRule
public static ConfigStubRule configStub = new ConfigStubRule();
@ClassRule
public static PolicyAppRule policy = new PolicyAppRule(
config("eventSinkUri", eventSinkStub.baseUri().build().toASCIIString()),
config("configUri", configStub.baseUri().build().toASCIIString()),
config("samlSoapProxyUri", samlSoapProxyProxyStub.baseUri().build().toASCIIString()),
config("samlEngineUri", samlEngineStub.baseUri().build().toASCIIString()),
config("eidas", "true"));
private static Client client;
@BeforeClass
public static void beforeClass() {
JerseyClientConfiguration jerseyClientConfiguration = JerseyClientConfigurationBuilder.aJerseyClientConfiguration().withTimeout(Duration.seconds(10)).build();
client = new JerseyClientBuilder(policy.getEnvironment()).using(jerseyClientConfiguration).build(EidasCycle3DataResourceTest.class.getSimpleName());
}
@Before
public void setUp() throws Exception {
eventSinkStub.setupStubForLogging();
}
@Test
public void shouldGetCycle3AttributeRequestDataFromConfiguration() throws JsonProcessingException {
final SessionId sessionId = SessionIdBuilder.aSessionId().build();
final String rpEntityId = new EidasCycle3DTO(sessionId).getRequestIssuerEntityId();
final Response sessionCreatedResponse = createSessionInEidasAwaitingCycle3DataState(sessionId);
assertThat(sessionCreatedResponse.getStatus()).isEqualTo(Response.Status.OK.getStatusCode());
final MatchingProcessDto cycle3Attribute = new MatchingProcessDto(Optional.of("TUFTY_CLUB_CARD"));
configStub.setUpStubForEnteringAwaitingCycle3DataState(rpEntityId, cycle3Attribute);
samlSoapProxyProxyStub.setUpStubForSendHubMatchingServiceRequest(sessionId);
final Cycle3AttributeRequestData actualResponse = getCycle3Data(sessionId);
final Cycle3AttributeRequestData expectedResponse = aCycle3AttributeRequestData()
.withAttributeName(cycle3Attribute.getAttributeName().get())
.withRequestIssuerId(rpEntityId)
.build();
assertThat(actualResponse).isEqualToComparingFieldByField(expectedResponse);
}
@Test
public void shouldUpdateSessionStateToCancelledCycle3InputStateWhenInputToCycle3IsCancelled() throws JsonProcessingException {
final SessionId sessionId = SessionIdBuilder.aSessionId().build();
final Response sessionCreatedResponse = createSessionInEidasAwaitingCycle3DataState(sessionId);
assertThat(sessionCreatedResponse.getStatus()).isEqualTo(Response.Status.OK.getStatusCode());
cancelCycle3Data(sessionId);
assertThat(getSessionStateName(sessionId)).isEqualTo(Cycle3DataInputCancelledState.class.getName());
}
@Test
public void shouldReturnSuccessWhenDataSubmitted() throws JsonProcessingException {
final SessionId sessionId = SessionIdBuilder.aSessionId().build();
final String rpEntityId = new EidasCycle3DTO(sessionId).getRequestIssuerEntityId();
final String msaEntityId = new EidasCycle3DTO(sessionId).getMatchingServiceAdapterEntityId();
final Response sessionCreatedResponse = createSessionInEidasAwaitingCycle3DataState(sessionId);
assertThat(sessionCreatedResponse.getStatus()).isEqualTo(Response.Status.OK.getStatusCode());
final Cycle3UserInput cycle3UserInput = new Cycle3UserInput("test-value", "principal-ip-address-seen-by-hub");
samlEngineStub.setupStubForEidasAttributeQueryRequestGeneration(AttributeQueryContainerDtoBuilder.anAttributeQueryContainerDto().build());
configStub.setUpStubForMatchingServiceRequest(rpEntityId, msaEntityId);
final MatchingProcessDto cycle3Attribute = new MatchingProcessDto(Optional.of("TUFTY_CLUB_CARD"));
configStub.setUpStubForEnteringAwaitingCycle3DataState(rpEntityId, cycle3Attribute);
samlSoapProxyProxyStub.setUpStubForSendHubMatchingServiceRequest(sessionId);
postCycle3Data(sessionId, cycle3UserInput);
assertThat(getSessionStateName(sessionId)).isEqualTo(EidasCycle3MatchRequestSentState.class.getName());
}
private Cycle3AttributeRequestData getCycle3Data(final SessionId sessionId) {
final URI uri = UriBuilder.fromPath(Urls.PolicyUrls.CYCLE_3_REQUEST_RESOURCE).build(sessionId);
return client.target(policy.uri(uri.toASCIIString()))
.request()
.get()
.readEntity(Cycle3AttributeRequestData.class);
}
private Response cancelCycle3Data(final SessionId sessionId) {
final URI uri = UriBuilder.fromPath(Urls.PolicyUrls.CYCLE_3_CANCEL_RESOURCE).build(sessionId);
return client.target(policy.uri(uri.toASCIIString()))
.request()
.post(null);
}
private Response postCycle3Data(final SessionId sessionId, final Cycle3UserInput data) {
URI uri = UriBuilder.fromPath(Urls.PolicyUrls.CYCLE_3_SUBMIT_RESOURCE).build(sessionId);
return client.target(policy.uri(uri.toASCIIString()))
.request()
.post(Entity.json(data));
}
private String getSessionStateName(final SessionId sessionId) {
final URI uri = UriBuilder.fromPath(TEST_SESSION_RESOURCE_PATH + GET_SESSION_STATE_NAME).build(sessionId);
return client.target(policy.uri(uri.toASCIIString()))
.request(MediaType.APPLICATION_JSON_TYPE)
.get()
.readEntity(String.class);
}
private Response createSessionInEidasAwaitingCycle3DataState(final SessionId sessionId) {
final URI uri = UriBuilder.fromPath(TEST_SESSION_RESOURCE_PATH + EIDAS_AWAITING_CYCLE_3_DATA_STATE).build();
final EidasCycle3DTO dto = new EidasCycle3DTO(sessionId);
return client.target(policy.uri(uri.toASCIIString()))
.request()
.post(Entity.json(dto));
}
}
|
pavelkrolevets/openx-frontend | src/pages/Investor/InvestorContainer.js | import React, { Component } from "react";
import LayoutHoc from "../../hoc/Layout/Layout";
import InvestorComponent from "../../components/Investor/Investor";
import SubNavigationComponent from "../../components/General/SubNavigationComponent/SubNavigationComponent";
import MENU from "./constants";
class InvestorContainer extends Component {
render() {
return (
<LayoutHoc>
<div className="component-content Investor">
<SubNavigationComponent list={MENU} />
<InvestorComponent />
</div>
</LayoutHoc>
);
}
}
export default InvestorContainer;
|
adamrhunter/strapi | packages/strapi-connector-bookshelf/lib/queries.js | 'use strict';
/**
* Implementation of model queries for bookshelf
*/
const _ = require('lodash');
const pmap = require('p-map');
const { convertRestQueryParams, buildQuery, escapeQuery } = require('strapi-utils');
const { contentTypes: contentTypesUtils } = require('strapi-utils');
const { PUBLISHED_AT_ATTRIBUTE } = contentTypesUtils.constants;
module.exports = function createQueryBuilder({ model, strapi }) {
/* Utils */
// association key
const assocKeys = model.associations.map(ast => ast.alias);
// component keys
const componentKeys = Object.keys(model.attributes).filter(key => {
return ['dynamiczone', 'component'].includes(model.attributes[key].type);
});
const timestamps = _.get(model, ['options', 'timestamps'], []);
const hasDraftAndPublish = contentTypesUtils.hasDraftAndPublish(model);
// Returns an object with relation keys only to create relations in DB
const pickRelations = attributes => {
return _.pick(attributes, assocKeys);
};
// keys to exclude to get attribute keys
const excludedKeys = assocKeys.concat(componentKeys);
// Returns an object without relational keys to persist in DB
const selectAttributes = attributes => {
return _.pickBy(attributes, (value, key) => {
if (Array.isArray(timestamps) && timestamps.includes(key)) {
return false;
}
return !excludedKeys.includes(key) && _.has(model.allAttributes, key);
});
};
const wrapTransaction = (fn, { transacting } = {}) => {
const db = strapi.connections[model.connection];
if (transacting) return fn(transacting);
return db.transaction(trx => fn(trx));
};
/**
* Find one entry based on params
*/
async function findOne(params, populate, { transacting } = {}) {
const entries = await find({ ...params, _limit: 1 }, populate, { transacting });
return entries[0] || null;
}
/**
* Find multiple entries based on params
*/
function find(params, populate, { transacting } = {}) {
const filters = convertRestQueryParams(params);
const query = buildQuery({ model, filters });
return model
.query(query)
.fetchAll({
withRelated: populate,
transacting,
publicationState: filters.publicationState,
})
.then(results => results.toJSON());
}
/**
* Count entries based on filters
*/
function count(params = {}) {
const filters = convertRestQueryParams(_.omit(params, ['_sort', '_limit', '_start']));
return model
.query(buildQuery({ model, filters }))
.count()
.then(Number);
}
async function create(attributes, { transacting } = {}) {
const relations = pickRelations(attributes);
const data = { ...selectAttributes(attributes) };
if (hasDraftAndPublish) {
data[PUBLISHED_AT_ATTRIBUTE] = _.has(attributes, PUBLISHED_AT_ATTRIBUTE)
? attributes[PUBLISHED_AT_ATTRIBUTE]
: new Date();
}
const runCreate = async trx => {
// Create entry with no-relational data.
const entry = await model.forge(data).save(null, { transacting: trx });
const isDraft = contentTypesUtils.isDraft(entry.toJSON(), model);
await createComponents(entry, attributes, { transacting: trx, isDraft });
return model.updateRelations({ id: entry.id, values: relations }, { transacting: trx });
};
return wrapTransaction(runCreate, { transacting });
}
async function update(params, attributes, { transacting } = {}) {
const entry = await model.where(params).fetch({ transacting });
if (!entry) {
const err = new Error('entry.notFound');
err.status = 404;
throw err;
}
// Extract attributes related to relational data.
const relations = pickRelations(attributes);
const data = selectAttributes(attributes);
const runUpdate = async trx => {
const updatedEntry =
Object.keys(data).length > 0
? await entry.save(data, {
transacting: trx,
method: 'update',
patch: true,
})
: entry;
await updateComponents(updatedEntry, attributes, { transacting: trx });
if (Object.keys(relations).length > 0) {
return model.updateRelations({ id: entry.id, values: relations }, { transacting: trx });
}
return this.findOne(params, null, { transacting: trx });
};
return wrapTransaction(runUpdate, { transacting });
}
async function deleteOne(id, { transacting } = {}) {
const entry = await model.where({ [model.primaryKey]: id }).fetch({ transacting });
if (!entry) {
const err = new Error('entry.notFound');
err.status = 404;
throw err;
}
await model.deleteRelations(id, { transacting });
const runDelete = async trx => {
await deleteComponents(entry, { transacting: trx });
await model.where({ id: entry.id }).destroy({ transacting: trx, require: false });
return entry.toJSON();
};
return wrapTransaction(runDelete, { transacting });
}
async function deleteMany(params, { transacting } = {}) {
if (params[model.primaryKey]) {
const entries = await find({ ...params, _limit: 1 }, null, { transacting });
if (entries.length > 0) {
return deleteOne(entries[0][model.primaryKey], { transacting });
}
return null;
}
const paramsWithDefaults = _.defaults(params, { _limit: -1 });
const entries = await find(paramsWithDefaults, null, { transacting });
return pmap(entries, entry => deleteOne(entry.id, { transacting }), {
concurrency: 100,
stopOnError: true,
});
}
function search(params, populate) {
const filters = convertRestQueryParams(_.omit(params, '_q'));
return model
.query(qb => qb.where(buildSearchQuery({ model, params })))
.query(buildQuery({ model, filters }))
.fetchAll({ withRelated: populate })
.then(results => results.toJSON());
}
function countSearch(params) {
const filters = convertRestQueryParams(_.omit(params, ['_q', '_sort', '_limit', '_start']));
return model
.query(qb => qb.where(buildSearchQuery({ model, params })))
.query(buildQuery({ model, filters }))
.count()
.then(Number);
}
async function createComponents(entry, attributes, { transacting, isDraft }) {
if (componentKeys.length === 0) return;
const joinModel = model.componentsJoinModel;
const { foreignKey } = joinModel;
const createComponentAndLink = async ({ componentModel, value, key, order }) => {
return strapi
.query(componentModel.uid)
.create(value, { transacting })
.then(component => {
return joinModel.forge().save(
{
[foreignKey]: entry.id,
component_type: componentModel.collectionName,
component_id: component.id,
field: key,
order,
},
{ transacting }
);
});
};
for (let key of componentKeys) {
const attr = model.attributes[key];
const { type } = attr;
switch (type) {
case 'component': {
const { component, required = false, repeatable = false } = attr;
const componentModel = strapi.components[component];
if (!isDraft && required === true && !_.has(attributes, key)) {
const err = new Error(`Component ${key} is required`);
err.status = 400;
throw err;
}
if (!_.has(attributes, key)) continue;
const componentValue = attributes[key];
if (repeatable === true) {
await Promise.all(
componentValue.map((value, idx) =>
createComponentAndLink({
componentModel,
value,
key,
order: idx + 1,
})
)
);
} else {
if (componentValue === null) continue;
await createComponentAndLink({
componentModel,
key,
value: componentValue,
order: 1,
});
}
break;
}
case 'dynamiczone': {
const { required = false } = attr;
if (!isDraft && required === true && !_.has(attributes, key)) {
const err = new Error(`Dynamiczone ${key} is required`);
err.status = 400;
throw err;
}
if (!_.has(attributes, key)) continue;
const dynamiczoneValues = attributes[key];
await Promise.all(
dynamiczoneValues.map((value, idx) => {
const component = value.__component;
const componentModel = strapi.components[component];
return createComponentAndLink({
componentModel,
value: _.omit(value, ['__component']),
key,
order: idx + 1,
});
})
);
break;
}
}
}
}
async function updateComponents(entry, attributes, { transacting }) {
if (componentKeys.length === 0) return;
const joinModel = model.componentsJoinModel;
const { foreignKey } = joinModel;
const updateOrCreateComponentAndLink = async ({ componentModel, key, value, order }) => {
// check if value has an id then update else create
if (_.has(value, componentModel.primaryKey)) {
return strapi
.query(componentModel.uid)
.update(
{
[componentModel.primaryKey]: value[componentModel.primaryKey],
},
value,
{ transacting }
)
.then(component => {
return joinModel
.where({
[foreignKey]: entry.id,
component_type: componentModel.collectionName,
component_id: component.id,
field: key,
})
.save(
{
order,
},
{ transacting, patch: true, require: false }
);
});
}
// create
return strapi
.query(componentModel.uid)
.create(value, { transacting })
.then(component => {
return joinModel.forge().save(
{
[foreignKey]: entry.id,
component_type: componentModel.collectionName,
component_id: component.id,
field: key,
order,
},
{ transacting }
);
});
};
for (let key of componentKeys) {
// if key isn't present then don't change the current component data
if (!_.has(attributes, key)) continue;
const attr = model.attributes[key];
const { type } = attr;
switch (type) {
case 'component': {
const { component, repeatable = false } = attr;
const componentModel = strapi.components[component];
const componentValue = attributes[key];
if (repeatable === true) {
await deleteOldComponents(entry, componentValue, {
key,
joinModel,
componentModel,
transacting,
});
await Promise.all(
componentValue.map((value, idx) => {
return updateOrCreateComponentAndLink({
componentModel,
key,
value,
order: idx + 1,
});
})
);
} else {
await deleteOldComponents(entry, componentValue, {
key,
joinModel,
componentModel,
transacting,
});
if (componentValue === null) continue;
await updateOrCreateComponentAndLink({
componentModel,
key,
value: componentValue,
order: 1,
});
}
break;
}
case 'dynamiczone': {
const dynamiczoneValues = attributes[key];
await deleteDynamicZoneOldComponents(entry, dynamiczoneValues, {
key,
joinModel,
transacting,
});
await Promise.all(
dynamiczoneValues.map((value, idx) => {
const component = value.__component;
const componentModel = strapi.components[component];
return updateOrCreateComponentAndLink({
componentModel,
value: _.omit(value, ['__component']),
key,
order: idx + 1,
});
})
);
break;
}
}
}
return;
}
async function deleteDynamicZoneOldComponents(entry, values, { key, joinModel, transacting }) {
const idsToKeep = values.reduce((acc, value) => {
const component = value.__component;
const componentModel = strapi.components[component];
if (_.has(value, componentModel.primaryKey)) {
acc.push({
id: value[componentModel.primaryKey].toString(),
component: componentModel,
});
}
return acc;
}, []);
const allIds = await joinModel
.query(qb => {
qb.where(joinModel.foreignKey, entry.id).andWhere('field', key);
})
.fetchAll({ transacting })
.map(el => {
const componentKey = Object.keys(strapi.components).find(
key => strapi.components[key].collectionName === el.get('component_type')
);
return {
id: el.get('component_id').toString(),
component: strapi.components[componentKey],
};
});
// verify the provided ids are realted to this entity.
idsToKeep.forEach(({ id, component }) => {
if (!allIds.find(el => el.id === id && el.component.uid === component.uid)) {
const err = new Error(
`Some of the provided components in ${key} are not related to the entity`
);
err.status = 400;
throw err;
}
});
const idsToDelete = allIds.reduce((acc, { id, component }) => {
if (!idsToKeep.find(el => el.id === id && el.component.uid === component.uid)) {
acc.push({
id,
component,
});
}
return acc;
}, []);
if (idsToDelete.length > 0) {
await joinModel
.query(qb => {
qb.where('field', key);
qb.where(qb => {
idsToDelete.forEach(({ id, component }) => {
qb.orWhere(qb => {
qb.where('component_id', id).andWhere('component_type', component.collectionName);
});
});
});
})
.destroy({ transacting });
for (const idToDelete of idsToDelete) {
const { id, component } = idToDelete;
const model = strapi.query(component.uid);
await model.delete({ [model.primaryKey]: id }, { transacting });
}
}
}
async function deleteOldComponents(
entry,
componentValue,
{ key, joinModel, componentModel, transacting }
) {
const componentArr = Array.isArray(componentValue) ? componentValue : [componentValue];
const idsToKeep = componentArr
.filter(el => _.has(el, componentModel.primaryKey))
.map(el => el[componentModel.primaryKey].toString());
const allIds = await joinModel
.where({
[joinModel.foreignKey]: entry.id,
field: key,
})
.fetchAll({ transacting })
.map(el => el.get('component_id').toString());
// verify the provided ids are realted to this entity.
idsToKeep.forEach(id => {
if (!allIds.includes(id)) {
const err = new Error(
`Some of the provided components in ${key} are not related to the entity`
);
err.status = 400;
throw err;
}
});
const idsToDelete = _.difference(allIds, idsToKeep);
if (idsToDelete.length > 0) {
await joinModel
.query(qb => qb.whereIn('component_id', idsToDelete).andWhere('field', key))
.destroy({ transacting, require: false });
await strapi
.query(componentModel.uid)
.delete({ [`${componentModel.primaryKey}_in`]: idsToDelete }, { transacting });
}
}
async function deleteComponents(entry, { transacting }) {
if (componentKeys.length === 0) return;
const joinModel = model.componentsJoinModel;
const { foreignKey } = joinModel;
for (let key of componentKeys) {
const attr = model.attributes[key];
const { type } = attr;
switch (type) {
case 'component': {
const { component } = attr;
const componentModel = strapi.components[component];
const ids = await joinModel
.where({
[foreignKey]: entry.id,
field: key,
})
.fetchAll({ transacting })
.map(el => el.get('component_id'));
await strapi
.query(componentModel.uid)
.delete({ [`${componentModel.primaryKey}_in`]: ids }, { transacting });
await joinModel
.where({
[foreignKey]: entry.id,
field: key,
})
.destroy({ transacting, require: false });
break;
}
case 'dynamiczone': {
const { components } = attr;
const componentJoins = await joinModel
.where({
[foreignKey]: entry.id,
field: key,
})
.fetchAll({ transacting })
.map(el => ({
id: el.get('component_id'),
componentType: el.get('component_type'),
}));
for (const compo of components) {
const { uid, collectionName } = strapi.components[compo];
const model = strapi.query(uid);
const toDelete = componentJoins.filter(el => el.componentType === collectionName);
if (toDelete.length > 0) {
await model.delete(
{
[`${model.primaryKey}_in`]: toDelete.map(el => el.id),
},
{ transacting }
);
}
}
await joinModel
.where({
[foreignKey]: entry.id,
field: key,
})
.destroy({ transacting, require: false });
break;
}
}
}
}
return {
findOne,
find,
create,
update,
delete: deleteMany,
count,
search,
countSearch,
};
};
/**
* util to build search query
* @param {*} model
* @param {*} params
*/
const buildSearchQuery = ({ model, params }) => qb => {
const query = params._q;
const associations = model.associations.map(x => x.alias);
const stringTypes = ['string', 'text', 'uid', 'email', 'enumeration', 'richtext'];
const numberTypes = ['biginteger', 'integer', 'decimal', 'float'];
const searchColumns = Object.keys(model._attributes)
.filter(attribute => !associations.includes(attribute))
.filter(attribute => stringTypes.includes(model._attributes[attribute].type));
if (!_.isNaN(_.toNumber(query))) {
const numberColumns = Object.keys(model._attributes)
.filter(attribute => !associations.includes(attribute))
.filter(attribute => numberTypes.includes(model._attributes[attribute].type));
searchColumns.push(...numberColumns);
}
if ([...numberTypes, ...stringTypes].includes(model.primaryKeyType)) {
searchColumns.push(model.primaryKey);
}
// Search in columns with text using index.
switch (model.client) {
case 'pg':
searchColumns.forEach(attr =>
qb.orWhereRaw(
`"${model.collectionName}"."${attr}"::text ILIKE ?`,
`%${escapeQuery(query, '*%\\')}%`
)
);
break;
case 'sqlite3':
searchColumns.forEach(attr =>
qb.orWhereRaw(
`"${model.collectionName}"."${attr}" LIKE ? ESCAPE '\\'`,
`%${escapeQuery(query, '*%\\')}%`
)
);
break;
case 'mysql':
searchColumns.forEach(attr =>
qb.orWhereRaw(
`\`${model.collectionName}\`.\`${attr}\` LIKE ?`,
`%${escapeQuery(query, '*%\\')}%`
)
);
break;
}
};
|
sixdouglas/quarkus | independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/DependencyFlags.java | package io.quarkus.maven.dependency;
public interface DependencyFlags {
/* @formatter:off */
public static final int OPTIONAL = 0b0000001;
public static final int DIRECT = 0b0000010;
public static final int RUNTIME_CP = 0b0000100;
public static final int DEPLOYMENT_CP = 0b0001000;
public static final int RUNTIME_EXTENSION_ARTIFACT = 0b0010000;
public static final int WORKSPACE_MODULE = 0b0100000;
public static final int RELOADABLE = 0b1000000;
/* @formatter:on */
}
|
FishWannaFly/gmall | gmall-oms/src/main/java/com/atguigu/gmall/oms/listener/OmsListener.java | package com.atguigu.gmall.oms.listener;
import com.alibaba.fastjson.JSON;
import com.atguigu.gmall.oms.entity.OrderEntity;
import com.atguigu.gmall.oms.mapper.OrderMapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.rabbitmq.client.Channel;
import org.springframework.amqp.AmqpException;
import org.springframework.amqp.core.ExchangeTypes;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.rabbit.annotation.Exchange;
import org.springframework.amqp.rabbit.annotation.Queue;
import org.springframework.amqp.rabbit.annotation.QueueBinding;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.util.List;
@Component
public class OmsListener {
@Autowired
RabbitTemplate rabbitTemplate;
@Autowired
OrderMapper orderMapper;
@RabbitListener(bindings = @QueueBinding(
value = @Queue(value = "order-queue",durable = "true"),
exchange = @Exchange(value = "wms-exchange",ignoreDeclarationExceptions = "true",type = ExchangeTypes.TOPIC),
key = "order.update.dead"
))
public void listener(String orderToken, Channel channel, Message message) throws IOException {
try {
OrderEntity orderEntity = new OrderEntity();
orderEntity.setStatus(4);
orderMapper.update(orderEntity,new QueryWrapper<OrderEntity>().eq("order_sn",orderToken).eq("status",0));
channel.basicAck(message.getMessageProperties().getDeliveryTag(),false);
} catch (Exception e) {
if(message.getMessageProperties().getRedelivered()){
channel.basicReject(message.getMessageProperties().getDeliveryTag(),false);
}
channel.basicNack(message.getMessageProperties().getDeliveryTag(),false,true);
e.printStackTrace();
}
}
@RabbitListener(bindings = @QueueBinding(
value = @Queue(value = "order-success-queue",durable = "true"),
exchange = @Exchange(value = "wms-exchange",ignoreDeclarationExceptions = "true",type = ExchangeTypes.TOPIC),
key = "order.success"
))
public void listener2(String orderToken, Channel channel, Message message) throws IOException {
try {
if(!StringUtils.isEmpty(orderToken)){
OrderEntity orderEntity = new OrderEntity();
orderEntity.setStatus(1);
orderMapper.update(orderEntity,new QueryWrapper<OrderEntity>().eq("order_sn",orderToken).eq("status",0));
rabbitTemplate.convertAndSend("wms-exchange","stock.minus",orderToken);
}
channel.basicAck(message.getMessageProperties().getDeliveryTag(),false);
} catch (Exception e) {
if(message.getMessageProperties().getRedelivered()){
channel.basicReject(message.getMessageProperties().getDeliveryTag(),false);
}
channel.basicNack(message.getMessageProperties().getDeliveryTag(),false,true);
e.printStackTrace();
}
}
}
|
chencang1980/mockcpp | tests/3rdparty/testngpp/include/testngpp/utils/OptionList.h |
#ifndef __TESTNGPP_OPTION_LIST_H
#define __TESTNGPP_OPTION_LIST_H
#include <list>
#include <string>
#include <testngpp/testngpp.h>
#include <testngpp/utils/StringList.h>
TESTNGPP_NS_START
struct OptionList
{
typedef std::pair<std::string, std::string> Option;
typedef std::list<Option> Options;
typedef StringList Args;
Options options;
Args args;
void parse(int argc, char** argv, const char* optstr);
bool hasOption(const std::string& flag);
std::string getSingleOption
( const std::string& option
, const std::string& defaultValue);
unsigned int
getSingleUnsignedOption
( const std::string& option
, const unsigned int defaultValue);
};
TESTNGPP_NS_END
#endif
|
KisaragiEffective/OpenCommandBlock | build/proguard6.0.3/core/src/proguard/classfile/visitor/ParallelAllClassVisitor.java | <reponame>KisaragiEffective/OpenCommandBlock
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2018 GuardSquare NV
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.classfile.visitor;
import proguard.classfile.*;
import java.util.*;
import java.util.concurrent.*;
/**
* This ClassPoolVisitor will visit all Clazz objects of the class pool
* in a parallel way. For each thread, a separate ClassVisitor will be
* created using {@link ClassVisitorFactory#createClassVisitor()}.
* <p>
* The number of parallel threads is coupled to the number of available
* processors:
* <pre>
* parallel_threads = Runtime.getRuntime().availableProcessors() - 1;
* </pre>
* <p>
* It is possible to override the number of threads by setting the
* environment variable {@code parallel.threads} to an integer > 0.
*
* @author <NAME>
*/
public class ParallelAllClassVisitor
implements ClassPoolVisitor
{
private static final int THREAD_COUNT;
static {
Integer threads = null;
try {
String threadCountString = System.getProperty("parallel.threads");
if (threadCountString != null)
{
threads = Integer.parseInt(threadCountString);
}
}
catch (Exception ex) {}
threads = threads == null ?
Runtime.getRuntime().availableProcessors() - 1 :
Math.min(threads.intValue(), Runtime.getRuntime().availableProcessors());
THREAD_COUNT = threads.intValue();
}
/**
* A factory for ClassVisitor objects.
*/
public interface ClassVisitorFactory
{
/**
* Creates a ClassVisitor that will be used during
* parallel visiting of classes in a ClassPool.
*/
ClassVisitor createClassVisitor();
}
private final ClassVisitorFactory classVisitorFactory;
/**
* Create a new ParallelAllClassVisitor that will use the given factory
* to visit all classes in a ClassPool in a parallel way.
*/
public ParallelAllClassVisitor(ClassVisitorFactory classVisitorFactory)
{
this.classVisitorFactory = classVisitorFactory;
}
// Implementations for ClassPoolVisitor.
public void visitClassPool(ClassPool classPool)
{
if (THREAD_COUNT <= 1)
{
// Fallback to single thread execution if the thread count
// was overridden by an environment variable.
classPool.classesAccept(classVisitorFactory.createClassVisitor());
}
else
{
ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT, new MyThreadFactory());
MyThreadedClassVisitor classVisitor = new MyThreadedClassVisitor(executor);
classPool.classesAccept(classVisitor);
try
{
// Shutdown the executor service to release memory.
executor.shutdown();
// Rethrow any exception that was thrown in the executor threads.
classVisitor.awaitTermination();
}
catch (InterruptedException e)
{
throw new RuntimeException("Parallel execution is taking too long", e);
}
catch (ExecutionException e)
{
throw new RuntimeException(e.getCause());
}
}
}
private class MyThreadFactory
implements ThreadFactory
{
private int threadCounter = 0;
public Thread newThread(Runnable runnable)
{
return new MyClassVisitorThread(++threadCounter, runnable);
}
}
private class MyClassVisitorThread
extends Thread
{
private final ClassVisitor classVisitor = classVisitorFactory.createClassVisitor();
public MyClassVisitorThread(int counter, Runnable runnable)
{
super(runnable, "Parallel Class Visitor " + counter);
}
}
private static class MyThreadedClassVisitor
implements ClassVisitor
{
private final ExecutorService executorService;
private final List<Future> futures = new ArrayList<Future>();
public MyThreadedClassVisitor(ExecutorService executorService)
{
this.executorService = executorService;
}
public void awaitTermination() throws ExecutionException, InterruptedException
{
for (Future future : futures)
{
future.get();
}
}
// Implementations for ClassVisitor.
public void visitLibraryClass(LibraryClass libraryClass)
{
submitClassToExecutorService(libraryClass);
}
public void visitProgramClass(ProgramClass programClass)
{
submitClassToExecutorService(programClass);
}
private void submitClassToExecutorService(final Clazz clazz)
{
futures.add(executorService.submit(new Runnable()
{
public void run()
{
MyClassVisitorThread thread = (MyClassVisitorThread)Thread.currentThread();
clazz.accept(thread.classVisitor);
}
}));
}
}
}
|
GorchakovIgor/sedgewick-algorithms-coursera | src/test/java/org/sedgewick/algorithms/part_one/week_four/question_three/TaxicabNumbersTest.java | package org.sedgewick.algorithms.part_one.week_four.question_three;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
class TaxicabNumbersTest {
@Test
void test() {
List<List<Integer>> result = new TaxicabNumbers().find(13);
assertEquals(1, result.size());
assertEquals(Arrays.asList(9, 10, 1, 12), result.get(0));
}
@Test
void test2() {
List<List<Integer>> result = new TaxicabNumbers().find(17);
assertEquals(2, result.size());
assertEquals(Arrays.asList(9, 15, 2, 16), result.get(1));
}
} |
kevinvandervlist/rewrite | rewrite-gradle/src/main/java/org/openrewrite/gradle/UpgradePluginVersion.java | <gh_stars>100-1000
/*
* Copyright 2021 the original author or authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openrewrite.gradle;
import io.github.resilience4j.retry.Retry;
import io.github.resilience4j.retry.RetryConfig;
import io.github.resilience4j.retry.RetryRegistry;
import io.vavr.CheckedFunction1;
import lombok.EqualsAndHashCode;
import lombok.Value;
import okhttp3.ConnectionSpec;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import org.intellij.lang.annotations.Language;
import org.openrewrite.*;
import org.openrewrite.groovy.GroovyVisitor;
import org.openrewrite.internal.ListUtils;
import org.openrewrite.internal.StringUtils;
import org.openrewrite.internal.lang.Nullable;
import org.openrewrite.java.MethodMatcher;
import org.openrewrite.java.tree.Expression;
import org.openrewrite.java.tree.J;
import org.openrewrite.semver.Semver;
import org.openrewrite.semver.VersionComparator;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Value
@EqualsAndHashCode(callSuper = true)
public class UpgradePluginVersion extends Recipe {
private static final RetryConfig retryConfig = RetryConfig.custom()
.retryOnException(throwable -> throwable instanceof SocketTimeoutException ||
throwable instanceof TimeoutException)
.build();
private static final RetryRegistry retryRegistry = RetryRegistry.of(retryConfig);
private static final OkHttpClient httpClient = new OkHttpClient.Builder()
.connectionSpecs(Arrays.asList(ConnectionSpec.CLEARTEXT, ConnectionSpec.MODERN_TLS, ConnectionSpec.COMPATIBLE_TLS))
.build();
private static final Retry gradlePluginPortalRetry = retryRegistry.retry("GradlePluginPortal");
private static final CheckedFunction1<Request, Response> sendRequest = Retry.decorateCheckedFunction(
gradlePluginPortalRetry,
request -> httpClient.newCall(request).execute());
@Option(displayName = "Plugin id",
description = "The `ID` part of `plugin { ID }`, as a glob expression.",
example = "com.jfrog.bintray")
String pluginIdPattern;
@Option(displayName = "New version",
description = "An exact version number, or node-style semver selector used to select the version number.",
example = "29.X")
String newVersion;
@Option(displayName = "Version pattern",
description = "Allows version selection to be extended beyond the original Node Semver semantics. So for example," +
"Setting 'version' to \"25-29\" can be paired with a metadata pattern of \"-jre\" to select Guava 29.0-jre",
example = "-jre",
required = false)
@Nullable
String versionPattern;
@Override
public String getDisplayName() {
return "Update a Gradle plugin by id";
}
@Override
public String getDescription() {
return "Update a Gradle plugin by id to a later version.";
}
@Override
public Validated validate() {
Validated validated = super.validate();
if (newVersion != null) {
validated = validated.and(Semver.validate(newVersion, versionPattern));
}
return validated;
}
@Override
protected TreeVisitor<?, ExecutionContext> getSingleSourceApplicableTest() {
return new IsBuildGradle<>();
}
@Override
protected TreeVisitor<?, ExecutionContext> getVisitor() {
VersionComparator versionComparator = Semver.validate(newVersion, versionPattern).getValue();
assert versionComparator != null;
MethodMatcher pluginMatcher = new MethodMatcher("PluginSpec id(..)", false);
MethodMatcher versionMatcher = new MethodMatcher("Plugin version(..)", false);
return new GroovyVisitor<ExecutionContext>() {
@Override
public J visitMethodInvocation(J.MethodInvocation method, ExecutionContext executionContext) {
if (versionMatcher.matches(method) &&
method.getSelect() instanceof J.MethodInvocation &&
pluginMatcher.matches(method.getSelect())) {
List<Expression> pluginArgs = ((J.MethodInvocation) method.getSelect()).getArguments();
if (pluginArgs.get(0) instanceof J.Literal) {
String pluginId = (String) ((J.Literal) pluginArgs.get(0)).getValue();
assert pluginId != null;
if (StringUtils.matchesGlob(pluginId, pluginIdPattern)) {
List<Expression> versionArgs = method.getArguments();
if (versionArgs.get(0) instanceof J.Literal) {
String currentVersion = (String) ((J.Literal) versionArgs.get(0)).getValue();
if (currentVersion != null) {
return versionComparator.upgrade(currentVersion, availablePluginVersions(pluginId))
.map(upgradeVersion -> method.withArguments(ListUtils.map(versionArgs, v -> {
J.Literal versionLiteral = (J.Literal) v;
assert versionLiteral.getValueSource() != null;
return versionLiteral
.withValue(upgradeVersion)
.withValueSource(versionLiteral.getValueSource().replace(currentVersion, upgradeVersion));
})))
.orElse(method);
}
}
}
}
}
return super.visitMethodInvocation(method, executionContext);
}
};
}
public static List<String> availablePluginVersions(String pluginId) {
String uri = "https://plugins.gradle.org/plugin/" + pluginId;
Request.Builder request = new Request.Builder().url(uri).get();
try (Response response = sendRequest.apply(request.build())) {
if (response.isSuccessful() && response.body() != null) {
@SuppressWarnings("ConstantConditions")
@Language("xml")
String responseBody = response.body().string();
List<String> versions = new ArrayList<>();
Matcher matcher = Pattern.compile("href=\"/plugin/" + pluginId + "/([^\"]+)\"").matcher(responseBody);
int lastFind = 0;
while (matcher.find(lastFind)) {
versions.add(matcher.group(1));
lastFind = matcher.end();
}
matcher = Pattern.compile("Version ([^\\s]+) \\(latest\\)").matcher(responseBody);
if (matcher.find()) {
versions.add(matcher.group(1));
}
return versions;
}
} catch (Throwable throwable) {
return Collections.emptyList();
}
return Collections.emptyList();
}
}
|
lukasHD/adventofcode2019 | Day10/day10.py | # --- Day 10: Monitoring Station ---
from math import gcd, sqrt, pow, atan, atan2, degrees
from itertools import groupby, cycle
def read_file(fname):
with open(fname) as f:
content = f.readlines()
return [x.strip() for x in content]
def read_file2(fname):
with open(fname) as f:
content = f.read()
return content
inpA = """.#..#
.....
#####
....#
...##"""
def get_astroids(inStr):
coordinates = []
for lNr, line in enumerate(inStr.split('\n')):
for chNr, char in enumerate(line):
if char == '#':
# is astroid
coordinates.append((chNr, lNr))
#print(coordinates)
return coordinates
def get_station(inStr):
for lNr, line in enumerate(inStr.split('\n')):
for chNr, char in enumerate(line):
if char == 'X':
# is station
station = (chNr, lNr)
return station
def get_direction(p1, p2):
# return the direction from p1 to p2
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
if (divisor := gcd(dx, dy)) != 0:
dx /= divisor
dy /= divisor
else:
return None
return (int(dx), int(dy))
def get_dist(p1, p2):
# return the direction from p1 to p2
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
dist = sqrt(pow(dx, 2) + pow(dy, 2))
return dist
def get_angle(p1, p2):
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
# if dx == 0 and dy > 0:
# return 9999
# if dx == 0 and dy < 0:
# return 2222
return (180-degrees(atan2(dx, dy))) % 360
def get_number_visible(base, coordinates):
directions = set()
for target in coordinates:
if (direction := get_direction(base, target)) != None:
directions.add(direction)
#print(directions)
return len(directions)
def get_direction_distance(base, coordinates):
results = list()
for target in coordinates:
if (distance := get_dist(base, target)) != 0:
angle = get_angle(base, target)
results.append([target, distance, angle])
return results
def find_best_station(coordinates):
scan = []
for astroid in coordinates:
scan.append([astroid, get_number_visible(astroid, coordinates)])
#print("SCAN")
#print(scan)
bestLocation, numVisible = max(scan, key=lambda x: x[1])
#print(bestLocation)
#print(numVisible)
return bestLocation, numVisible
def run_small_test():
print("small test")
coordinates = get_astroids(inpA)
# print(get_direction((0,0),(1,2)))
# print(get_direction((0,0),(2,4)))
# print(get_direction((1,2),(0,0)))
# print(get_number_visible((0,1), coordinates))
find_best_station(coordinates)
return 0
def shoot(station, asteroids):
results = get_direction_distance(station, asteroids)
results.sort(key= lambda x: (x[2], x[1]))
# is sorted by angle, then by distance
print(results)
old_angle = -1
counter = 1
idx = 0
shooting_list = []
while len(results) > 0:
print("pos {} of {} old_angle = {}".format(idx, len(results), old_angle))
i = results[idx]
if i[2] == old_angle:
print("charge Laser and continue")
idx += 1
if idx > (len(results)-1):
print("wrap-around inplace")
# wrap around and forget all charging
if len(results) > 1:
idx = idx % (len(results)-1)
else:
idx = 0
old_angle = -1
continue
print("Shot #{} at {}".format(counter, i[0]))
shooting_list.append(i[0])
old_angle = i[2]
counter += 1
results.remove(i)
if len(results) == 0:
print("finished")
break
if idx > (len(results)-1):
print("wrap-around iterator")
# wrap around and forget all charging
idx = idx % (len(results)-1)
old_angle = -1
return shooting_list
def run_small_test2():
print("small test 2")
print("************")
inp = """.#....#####...#..
##...##.#####..##
##...#...#.#####.
..#.....X...###..
..#.#.....#....##"""
print(inp)
station = get_station(inp)
asteroids = get_astroids(inp)
print(station)
print(asteroids)
history_of_targets = shoot(station, asteroids)
print(history_of_targets)
def runPartTwo():
inp = read_file2('input')
station, numVisible = find_best_station(get_astroids(inp))
asteroids = get_astroids(inp)
print(station)
print(asteroids)
history_of_targets = shoot(station, asteroids)
print(history_of_targets)
twohundred = history_of_targets[200-1]
solution = 100*twohundred[0] + twohundred[1]
print(solution)
def runPartOne():
inp = read_file2('input')
bestLocation, numVisible = find_best_station(get_astroids(inp))
print(bestLocation)
print(numVisible)
return numVisible
if __name__ == '__main__':
#run_small_test()
runPartOne()
#run_small_test2()
runPartTwo() |
ESA-PhiLab/hypernet | python_research/experiments/multiple_feature_learning/train_multiple_fatures.py | <gh_stars>10-100
import numpy as np
import os.path
from keras.models import load_model
from keras.callbacks import ModelCheckpoint, EarlyStopping, CSVLogger
from python_research.experiments.utils import TimeHistory
from python_research.experiments.multiple_feature_learning.builders.keras_builders import build_multiple_features_model, build_settings_for_dataset
from python_research.experiments.utils import \
TrainTestIndices
from python_research.experiments.utils import Dataset
from python_research.experiments.utils import parse_multiple_features
def main():
args = parse_multiple_features()
original_data = Dataset(args.original_path,
args.gt_path,
args.nb_samples,
args.neighborhood)
train_test_indices = TrainTestIndices(original_data.train_indices,
original_data.val_indices,
original_data.test_indices)
area_data = Dataset(args.area_path,
args.gt_path,
args.nb_samples,
args.neighborhood,
train_test_indices=train_test_indices)
stddev_data = Dataset(args.stddev_path,
args.gt_path,
args.nb_samples,
args.neighborhood,
train_test_indices=train_test_indices)
diagonal_data = Dataset(args.diagonal_path,
args.gt_path,
args.nb_samples,
args.neighborhood,
train_test_indices=train_test_indices)
moment_data = Dataset(args.moment_path,
args.gt_path,
args.nb_samples,
args.neighborhood,
train_test_indices=train_test_indices)
early = EarlyStopping(patience=args.patience)
logger = CSVLogger(os.path.join(args.output_dir, args.output_name) + ".csv")
checkpoint = ModelCheckpoint(os.path.join(args.output_dir, args.output_name) + "_model",
save_best_only=True)
timer = TimeHistory()
settings = build_settings_for_dataset(args.neighborhood)
model = build_multiple_features_model(settings,
len(np.unique(original_data.y)) - 1,
original_data.x.shape[-1],
area_data.x.shape[-1],
stddev_data.x.shape[-1],
diagonal_data.x.shape[-1],
moment_data.x.shape[-1])
model.fit(x=[original_data.x_train,
area_data.x_train,
stddev_data.x_train,
diagonal_data.x_train,
moment_data.x_train],
y=original_data.y_train,
validation_data=([original_data.x_val,
area_data.x_val,
stddev_data.x_val,
diagonal_data.x_val,
moment_data.x_val],
original_data.y_val),
epochs=200,
batch_size=args.batch_size,
callbacks=[early,
logger,
checkpoint,
timer],
verbose=args.verbosity)
model = load_model(os.path.join(args.output_dir, args.output_name) + "_model")
print(model.evaluate([original_data.x_test,
area_data.x_test,
stddev_data.x_test,
diagonal_data.x_test,
moment_data.x_test], original_data.y_test))
times = timer.times
np.savetxt(os.path.join(args.output_dir, args.output_name) + "_times.csv", times, fmt="%1.4f")
if __name__ == "__main__":
main()
|
ma2ke/crawlab | backend/routes/system.go | package routes
import (
"crawlab/constants"
"crawlab/entity"
"crawlab/services"
"fmt"
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
func GetLangList(c *gin.Context) {
nodeId := c.Param("id")
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: services.GetLangList(nodeId),
})
}
func GetDepList(c *gin.Context) {
nodeId := c.Param("id")
lang := c.Query("lang")
depName := c.Query("dep_name")
var depList []entity.Dependency
if lang == constants.Python {
list, err := services.GetPythonDepList(nodeId, depName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
} else if lang == constants.Nodejs {
list, err := services.GetNodejsDepList(nodeId, depName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", lang))
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: depList,
})
}
func GetInstalledDepList(c *gin.Context) {
nodeId := c.Param("id")
lang := c.Query("lang")
var depList []entity.Dependency
if lang == constants.Python {
if services.IsMasterNode(nodeId) {
list, err := services.GetPythonLocalInstalledDepList(nodeId)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
} else {
list, err := services.GetPythonRemoteInstalledDepList(nodeId)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
}
} else if lang == constants.Nodejs {
if services.IsMasterNode(nodeId) {
list, err := services.GetNodejsLocalInstalledDepList(nodeId)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
} else {
list, err := services.GetNodejsRemoteInstalledDepList(nodeId)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
depList = list
}
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", lang))
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: depList,
})
}
func GetAllDepList(c *gin.Context) {
lang := c.Param("lang")
depName := c.Query("dep_name")
// 获取所有依赖列表
var list []string
if lang == constants.Python {
_list, err := services.GetPythonDepListFromRedis()
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
list = _list
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", lang))
return
}
// 过滤依赖列表
var depList []string
for _, name := range list {
if strings.HasPrefix(strings.ToLower(name), strings.ToLower(depName)) {
depList = append(depList, name)
}
}
// 只取前20
var returnList []string
for i, name := range depList {
if i >= 10 {
break
}
returnList = append(returnList, name)
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: returnList,
})
}
func InstallDep(c *gin.Context) {
type ReqBody struct {
Lang string `json:"lang"`
DepName string `json:"dep_name"`
}
nodeId := c.Param("id")
var reqBody ReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
return
}
if reqBody.Lang == constants.Python {
if services.IsMasterNode(nodeId) {
_, err := services.InstallPythonLocalDep(reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
} else {
_, err := services.InstallPythonRemoteDep(nodeId, reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
}
} else if reqBody.Lang == constants.Nodejs {
if services.IsMasterNode(nodeId) {
_, err := services.InstallNodejsLocalDep(reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
} else {
_, err := services.InstallNodejsRemoteDep(nodeId, reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
}
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", reqBody.Lang))
return
}
// TODO: check if install is successful
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
func UninstallDep(c *gin.Context) {
type ReqBody struct {
Lang string `json:"lang"`
DepName string `json:"dep_name"`
}
nodeId := c.Param("id")
var reqBody ReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
}
if reqBody.Lang == constants.Python {
if services.IsMasterNode(nodeId) {
_, err := services.UninstallPythonLocalDep(reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
} else {
_, err := services.UninstallPythonRemoteDep(nodeId, reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
}
} else if reqBody.Lang == constants.Nodejs {
if services.IsMasterNode(nodeId) {
_, err := services.UninstallNodejsLocalDep(reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
} else {
_, err := services.UninstallNodejsRemoteDep(nodeId, reqBody.DepName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
}
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", reqBody.Lang))
return
}
// TODO: check if uninstall is successful
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
func GetDepJson(c *gin.Context) {
depName := c.Param("dep_name")
lang := c.Param("lang")
var dep entity.Dependency
if lang == constants.Python {
_dep, err := services.FetchPythonDepInfo(depName)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
dep = _dep
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", lang))
return
}
c.Header("Cache-Control", "max-age=86400")
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: dep,
})
}
func InstallLang(c *gin.Context) {
type ReqBody struct {
Lang string `json:"lang"`
}
nodeId := c.Param("id")
var reqBody ReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
return
}
if reqBody.Lang == constants.Nodejs {
if services.IsMasterNode(nodeId) {
_, err := services.InstallNodejsLocalLang()
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
} else {
_, err := services.InstallNodejsRemoteLang(nodeId)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
}
} else {
HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("%s is not implemented", reqBody.Lang))
return
}
// TODO: check if install is successful
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
|
uvsq-versailles/Master_1 | Semestre_2/INF201/TD/src/main/java/fr/uvsq/pglp/dao/crud/DAO.java | <gh_stars>1-10
package fr.uvsq.pglp.dao.crud;
public interface DAO<T> {
/*
* CRUD:
* - Create
* - Read
* - Update
* - Delete
*/
T create(T obj);
T read(String id);
T update(T obj);
void delete(T obj);
}
|
forivall/tacoscript-incubator | specs/core/base-statement/try/finally/expected.t.js | try{}
finally{
cleanup(stuff);
}
|
seekdoor/Bangumi | src/screens/tinygrail/index/menu-item.js | /*
* @Author: czy0729
* @Date: 2019-09-15 10:54:09
* @Last Modified by: czy0729
* @Last Modified time: 2021-12-31 18:16:10
*/
import React from 'react'
import { Flex, Text, Touchable, Iconfont } from '@components'
import { _ } from '@stores'
import { obc } from '@utils/decorators'
import { t } from '@utils/fetch'
function MenuItem(
{ style, index, iconStyle, pathname, config, title, icon },
{ navigation }
) {
const styles = memoStyles()
const num = _.portrait(2, 4)
return (
<Touchable
style={[styles.container, _.isLandscape && index % num === 0 && styles.left]}
onPress={() => {
t('小圣杯.跳转', {
to: pathname,
...config
})
navigation.push(pathname, config)
}}
>
<Flex style={[styles.block, style]}>
<Text type='tinygrailPlain' size={18} bold>
{title}
</Text>
<Iconfont
style={iconStyle ? [styles.icon, iconStyle] : styles.icon}
name={icon}
size={46}
/>
</Flex>
</Touchable>
)
}
export default obc(MenuItem)
const memoStyles = _.memoStyles(() => {
const num = _.portrait(2, 4)
const { width, marginLeft } = _.grid(num)
return {
container: {
marginVertical: marginLeft / 2,
marginLeft,
borderRadius: _.radiusSm,
overflow: 'hidden'
},
block: {
width,
height: width * 0.4,
paddingLeft: 20,
backgroundColor: _.tSelect(_.colorTinygrailBorder, _.colorTinygrailBg)
},
left: {
marginLeft: 0
},
icon: {
position: 'absolute',
top: '50%',
right: -10,
marginTop: -24,
color: _.colorTinygrailIcon,
opacity: 0.24
}
}
})
|
timxor/leetcode-journal | solutions/LeetCode/Python3/20.py | <filename>solutions/LeetCode/Python3/20.py
__________________________________________________________________________________________________
32ms
class Solution:
def isValid(self, s: 'str') -> 'bool':
v = []
d = {']':'[', ')':'(', '}':'{'}
for i in range(len(s)):
if s[i] in d.values():
v.append(s[i])
elif s[i] in d.keys():
if len(v) == 0:
return False
elif v[-1] == d[s[i]]:
v.pop()
else:
return False
if v == []:
return True
else:
return False
__________________________________________________________________________________________________
36ms
class Solution:
def isValid(self, s: str) -> bool:
if not s:
return True
st = []
for ch in s:
if ch == '[':
st.append(']')
elif ch == '{':
st.append('}')
elif ch == '(':
st.append(')')
elif not st or st.pop()!=ch:
return False
return st == []
__________________________________________________________________________________________________
40ms
class Solution:
def isValid(self, s: str) -> bool:
stack = []
bra = ('(', ')')
cur = ('{', '}')
squ = ('[', ']')
brackets = [bra, cur, squ]
for char in s:
for b in brackets:
if char == b[0]:
stack.append(b[0])
elif char == b[1]:
if len(stack) <= 0:
return False
last = stack.pop()
if not last == b[0]:
return False
return len(stack) == 0
__________________________________________________________________________________________________
12128 kb
class Solution:
def isValid(self, s: 'str') -> 'bool':
dictt = {')': '(',
']' : '[',
'}' : '{'}
que = ''
i = 0
while i < len(s):
if s[i] in dictt:
if que == '':
return False
elif que[-1] == dictt[s[i]]:
que = que[:-1]
i += 1
else:
return False
else:
que += s[i]
i += 1
if que == '':
return True
return False
__________________________________________________________________________________________________
12136 kb
class Solution:
def isValid(self, s: 'str') -> 'bool':
dictt = {')': '(',
']' : '[',
'}' : '{'}
que = []
i = 0
while i < len(s):
if s[i] in dictt:
if que == []:
return False
elif que[-1] == dictt[s[i]]:
que = que[:-1]
i += 1
else:
return False
else:
que.append(s[i])
i += 1
if que == []:
return True
return False
__________________________________________________________________________________________________
|
wildmaples/mutual-aid | spec/models/organization_spec.rb | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Organization, type: :model do
describe 'is_host validation' do
subject(:organization) { build :organization, is_host: true }
context 'with an existing instance owner' do
before do
Organization.host_organization || create(:organization, is_host: true)
end
it { is_expected.to_not be_valid }
end
context 'without an existing instance owner' do
before do
# in case test db was seeded
Organization.host_organization.update!(is_host: false) if Organization.host_organization
end
it { is_expected.to be_valid }
end
end
end
|
Ragnarson/people | spec/controllers/dashboard_controller_spec.rb | require 'spec_helper'
describe DashboardController do
before(:each) do
sign_in create(:user, first_name: "Marian")
end
describe "#index" do
let!(:old_membership) { create(:membership, ends_at: 1.hour.ago) }
let!(:new_membership) { create(:membership, ends_at: 1.hour.from_now) }
before { get :index }
it "responds successfully with an HTTP 200 status code" do
expect(response).to be_success
expect(response.status).to eq(200)
end
describe "exposing unfinished memberships" do
it "exposes unfinished memberships" do
expect(controller.memberships).to include(new_membership)
end
it "doesn't expose finished memebrships" do
expect(controller.memberships).not_to include(old_membership)
end
end
end
end
|
wh1t3lord/kotek | engine/kotek.core.containers.vector/include/kotek_std_alias_vector.h | <reponame>wh1t3lord/kotek<filename>engine/kotek.core.containers.vector/include/kotek_std_alias_vector.h
#pragma once
#include <kotek.core.defines.static.cpp/include/kotek_core_defines_static_cpp.h>
#include <kotek.core.memory.cpu/include/kotek_core_memory_cpu.h>
#ifdef KOTEK_USE_BOOST_LIBRARY
#include <boost/container/vector.hpp>
#else
#include <vector>
#endif
namespace Kotek
{
namespace ktk
{
#ifdef KOTEK_USE_BOOST_LIBRARY
template <typename Type>
using vector = boost::container::vector<Type, mi_stl_allocator<Type>>;
#else
template <typename Type>
using vector = std::vector<Type, mi_stl_allocator<Type>>;
#endif
} // namespace ktk
} // namespace Kotek |
bxl295/m4extreme | include/External/stlib/packages/cuda/array.h | <filename>include/External/stlib/packages/cuda/array.h
// -*- C++ -*-
/*!
\file cuda/array.h
\brief Implementation of std::tr1::array for cuda.
*/
#if !defined(__cuda_array_h__)
#define __cuda_array_h__
//#include <iterator>
#include <cstddef>
namespace std {
namespace tr1 {
template<typename _T, std::size_t _N>
struct array {
typedef _T value_type;
typedef value_type& reference;
typedef const value_type& const_reference;
typedef value_type* iterator;
typedef const value_type* const_iterator;
typedef std::size_t size_type;
typedef std::ptrdiff_t difference_type;
#if 0
// CONTINUE: I can enable these if I write a CUDA replacement.
typedef std::reverse_iterator<iterator> reverse_iterator;
typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
#endif
// Don't use the aligned attribute, which would cause 16 byte alignment.
//value_type _data[_N ? _N : 1] __attribute__((__aligned__));
value_type _data[_N ? _N : 1];
// No explicit construct/copy/destroy for aggregate type.
void
__device__
__host__
assign(const value_type& x) {
for (std::size_t i = 0; i != size(); ++i) {
_data[i] = x;
}
}
void
__device__
__host__
swap(array& other) {
value_type tmp;
for (std::size_t i = 0; i != size(); ++i) {
tmp = _data[i];
_data[i] = other._data[i];
other._data[i] = tmp;
}
}
// Iterators.
iterator
__device__
__host__
begin() {
return iterator(&_data[0]);
}
const_iterator
__device__
__host__
begin() const {
return const_iterator(&_data[0]);
}
iterator
__device__
__host__
end() {
return iterator(&_data[_N]);
}
const_iterator
__device__
__host__
end() const {
return const_iterator(&_data[_N]);
}
#if 0
reverse_iterator
__device__
__host__
rbegin() {
return reverse_iterator(end());
}
const_reverse_iterator
__device__
__host__
rbegin() const {
return const_reverse_iterator(end());
}
reverse_iterator
__device__
__host__
rend() {
return reverse_iterator(begin());
}
const_reverse_iterator
__device__
__host__
rend() const {
return const_reverse_iterator(begin());
}
#endif
// Capacity.
size_type
__device__
__host__
size() const {
return _N;
}
size_type
__device__
__host__
max_size() const {
return _N;
}
bool
__device__
__host__
empty() const {
return size() == 0;
}
// Element access.
reference
__device__
__host__
operator[](const size_type n) {
return _data[n];
}
const_reference
__device__
__host__
operator[](const size_type n) const {
return _data[n];
}
reference
__device__
__host__
at(const size_type n) {
return _data[n];
}
const_reference
__device__
__host__
at(const size_type n) const {
return _data[n];
}
reference
__device__
__host__
front() {
return *begin();
}
const_reference
__device__
__host__
front() const {
return *begin();
}
reference
__device__
__host__
back() {
return _N ? *(end() - 1) : *end();
}
const_reference
__device__
__host__
back() const {
return _N ? *(end() - 1) : *end();
}
_T*
__device__
__host__
data() {
return &_data[0];
}
const _T*
__device__
__host__
data() const {
return &_data[0];
}
};
} // namespace tr1
} // namespace std
#endif
|
carolinezhao/React-learning | to-do-list/src/containers/VisibleTodoList.js | import { connect } from 'react-redux'
import { toggleTodo } from '../actions'
import TodoList from '../components/TodoList'
import { getVisibleTodos } from '../selectors'
// 容器组件
// 从 Redux state 树中读取部分数据,并通过 props 来把这些数据提供给要渲染的组件。
// 使用 React Redux 库的 connect() 方法来生成。
// mapStateToProps 指定如何把当前 state 映射到展示组件的 props 中。
// mapDispatchToProps 接收 dispatch() 方法并返回期望注入到展示组件的 props 中的回调方法。
const mapStateToProps = state => ({
todos: getVisibleTodos(state)
})
const mapDispatchToProps = dispatch => ({
toggleTodo: id => dispatch(toggleTodo(id))
})
export default connect(
mapStateToProps,
mapDispatchToProps
)(TodoList) |
trosenkranz/logic | src/logic/bitstream_iterator.cpp | /* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "logic/bitstream_iterator.hpp"
#include <cstdint>
using logic::bitstream_iterator;
static constexpr bitstream_iterator::difference_type BITS = 8;
bitstream_iterator::bitstream_iterator(pointer bits) noexcept :
m_bits{bits},
m_index{0}
{ }
bitstream_iterator::bitstream_iterator(pointer bits,
size_type index) noexcept :
m_bits{bits},
m_index{difference_type(index)}
{ }
bitstream_iterator::bitstream_iterator(pointer bits,
difference_type index) noexcept :
m_bits{bits},
m_index{index}
{ }
auto bitstream_iterator::operator++() noexcept -> bitstream_iterator& {
++m_index;
return *this;
}
auto bitstream_iterator::operator++(int) noexcept -> const bitstream_iterator {
bitstream_iterator tmp{*this};
++m_index;
return tmp;
}
auto bitstream_iterator::operator--() noexcept -> bitstream_iterator& {
--m_index;
return *this;
}
auto bitstream_iterator::operator--(int) noexcept -> const bitstream_iterator {
bitstream_iterator tmp{*this};
--m_index;
return tmp;
}
auto bitstream_iterator::operator+(
difference_type n) const noexcept -> bitstream_iterator {
return {m_bits, m_index + n};
}
auto bitstream_iterator::operator+=(
difference_type n) noexcept -> bitstream_iterator& {
m_index += n;
return *this;
}
auto bitstream_iterator::operator-(
difference_type n) const noexcept -> bitstream_iterator {
return {m_bits, m_index - n};
}
auto bitstream_iterator::operator-=(
difference_type n) noexcept -> bitstream_iterator& {
m_index -= n;
return *this;
}
auto bitstream_iterator::operator[](
difference_type n) noexcept -> reference {
auto index = m_index + n;
return {m_bits, (index >= 0) ? size_type(index) : 0};
}
auto bitstream_iterator::operator[](
difference_type n) const noexcept -> reference {
auto index = m_index + n;
return {m_bits, (index >= 0) ? size_type(index) : 0};
}
auto bitstream_iterator::operator*() noexcept -> reference {
return {m_bits, (m_index >= 0) ? size_type(m_index) : 0};
}
auto bitstream_iterator::operator*() const noexcept -> reference {
return {m_bits, (m_index >= 0) ? size_type(m_index) : 0};
}
auto bitstream_iterator::operator->() noexcept -> pointer {
return static_cast<std::uint8_t*>(m_bits) + (m_index / BITS);
}
auto bitstream_iterator::operator->() const noexcept -> pointer {
return static_cast<std::uint8_t*>(m_bits) + (m_index / BITS);
}
bitstream_iterator::operator bool() const noexcept {
return (nullptr != m_bits);
}
bool bitstream_iterator::operator<(
const bitstream_iterator& other) const noexcept {
return (m_index < other.m_index);
}
bool bitstream_iterator::operator<=(
const bitstream_iterator& other) const noexcept {
return (m_index <= other.m_index);
}
bool bitstream_iterator::operator>(
const bitstream_iterator& other) const noexcept {
return (m_index > other.m_index);
}
bool bitstream_iterator::operator>=(
const bitstream_iterator& other) const noexcept {
return (m_index >= other.m_index);
}
bool bitstream_iterator::operator==(
const bitstream_iterator& other) const noexcept {
return (m_index == other.m_index);
}
bool bitstream_iterator::operator!=(
const bitstream_iterator& other) const noexcept {
return (m_index != other.m_index);
}
|
truthiswill/intellij-community | plugins/groovy/testdata/refactoring/changeSignatureForJava/AlreadyHandled_after.java | <gh_stars>1-10
class Test {
void foo () throws Exception {
}
void bar () throws Exception {
foo();
}
}
|
dolphin57/spring-security-actual-code | spring-security-app/src/main/java/io/dolphin/security/app/processor/AppSocialAuthenticationFilterPostProcessor.java | package io.dolphin.security.app.processor;
import io.dolphin.security.core.social.SocialAuthenticationFilterPostProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
import org.springframework.social.security.SocialAuthenticationFilter;
import org.springframework.stereotype.Component;
/**
* @Description: 设置app下springsocial走的成功处理器
* @Author: dolphin
* @Since: 2021-5-15 11:38
*/
@Component
public class AppSocialAuthenticationFilterPostProcessor implements SocialAuthenticationFilterPostProcessor {
//认证成功后返回token的成功处理器
@Autowired
private AuthenticationSuccessHandler dolphinAuthenticationSuccessHandler;
@Override
public void process(SocialAuthenticationFilter socialAuthenticationFilter) {
socialAuthenticationFilter.setAuthenticationSuccessHandler(dolphinAuthenticationSuccessHandler);
}
}
|
jafar-rezaei/s_d | src/redux/reducers/playerReducer.js | import { getRandomColor, getRandomFace } from '../../constants/utilities';
import {
ADD_NEW_PLAYER,
MOVE_PLAYER,
CHANGE_PLAYER,
CHANGE_PLAYER_POSITION_IN_BOX,
SET_PLAYER_PERSISTENCE,
ADD_SNAKE_BITE,
RECORD_DICE_LOG,
ADD_LADDER_HIKE,
RESET_PLAYERS,
} from '../../constants/types';
const firstPlayerColor = getRandomColor();
const firstPlayerFace = getRandomFace();
const initialState = {
count: 1,
persistence: 1,
current: {
id: 1,
pos: 1,
color: firstPlayerColor,
path: [1],
diceLog: [],
boxPosition: -1, //center
snakeBites: 0,
ladderHikes: 0,
...firstPlayerFace,
},
all: [
{
id: 1,
pos: 1,
color: firstPlayerColor,
path: [1],
diceLog: [],
boxPosition: -1, //center
snakeBites: 0,
ladderHikes: 0,
...firstPlayerFace,
},
],
};
export function players(state = initialState, action) {
switch (action.type) {
case ADD_NEW_PLAYER:
const newPlayer = _generateNewPlayer(state.count);
return {
...state,
all: [...state.all, newPlayer],
count: state.count + 1,
};
case MOVE_PLAYER:
return {
...state,
all: state.all.map(p => {
if (p.id === state.current.id) {
return {
...p,
pos: action.newPos,
boxPosition: -1,
path: [...p.path, action.newPos],
};
}
return p;
}),
current: {
...state.current,
pos: action.newPos,
boxPosition: -1,
path: [...state.current.path, action.newPos],
},
};
case SET_PLAYER_PERSISTENCE:
return {
...state,
persistence: action.persistence,
};
case RECORD_DICE_LOG:
return {
...state,
all: state.all.map(p => {
if (p.id === state.current.id) {
return {
...p,
diceLog: [...p.diceLog, action.diceResult],
};
}
return p;
}),
current: {
...state.current,
diceLog: [...state.current.diceLog, action.diceResult],
},
};
case CHANGE_PLAYER:
const nextPlayer = _getNextPlayer(state);
return {
...state,
current: nextPlayer,
};
case CHANGE_PLAYER_POSITION_IN_BOX:
let curPlayer =
state.current.id === action.playerId
? {
...state.current,
boxPosition: action.newBoxPosition,
}
: state.current;
return {
...state,
all: state.all.map(p => {
if (p.id === action.playerId) {
return {
...p,
boxPosition: action.newBoxPosition,
};
}
return p;
}),
current: curPlayer,
};
case ADD_SNAKE_BITE:
const newSnakeBites = state.current.snakeBites + 1;
return {
...state,
all: state.all.map(p => {
if (p.id === state.current.id) {
return {
...p,
snakeBites: newSnakeBites,
};
}
return p;
}),
current: {
...state.current,
snakeBites: newSnakeBites,
},
};
case ADD_LADDER_HIKE:
const newLadderHikes = state.current.ladderHikes + 1;
return {
...state,
all: state.all.map(p => {
if (p.id === state.current.id) {
return {
...p,
ladderHikes: newLadderHikes,
};
}
return p;
}),
current: {
...state.current,
ladderHikes: newLadderHikes,
},
};
case RESET_PLAYERS:
return {
...initialState,
};
default:
return state;
}
}
/*
* Private functions
*/
function _generateNewPlayer(curCount) {
return {
id: curCount + 1,
color: getRandomColor(),
pos: 1,
path: [1],
diceLog: [],
boxPosition: -1, //center
snakeBites: 0,
ladderHikes: 0,
...getRandomFace(),
};
}
function _getNextPlayer({ all, current, count }) {
return current.id === count
? all[0]
: all.filter(p => p.id === current.id + 1)[0];
}
|
xiaohaogong/spring-data-redis | src/main/java/org/springframework/data/redis/ClusterStateFailureException.java | /*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.redis;
import org.springframework.dao.DataAccessResourceFailureException;
/**
* {@link DataAccessResourceFailureException} indicating the current local snapshot of cluster state does no longer
* represent the actual remote state. This can happen nodes are removed from cluster, slots get migrated to other nodes
* and so on.
*
* @author <NAME>
* @author <NAME>
* @since 1.7
*/
public class ClusterStateFailureException extends DataAccessResourceFailureException {
private static final long serialVersionUID = 333399051713240852L;
/**
* Creates new {@link ClusterStateFailureException}.
*
* @param msg the detail message.
*/
public ClusterStateFailureException(String msg) {
super(msg);
}
/**
* Creates new {@link ClusterStateFailureException}.
*
* @param msg the detail message.
* @param cause the nested exception.
*/
public ClusterStateFailureException(String msg, Throwable cause) {
super(msg, cause);
}
}
|
benbenwt/lisa_docker | new/libr/asm/arch/pyc/opcode_23.c | <reponame>benbenwt/lisa_docker
#include "opcode.h"
pyc_opcodes *opcode_23(void) {
pyc_opcodes *ret = opcode_2x ();
if (!ret) {
return NULL;
}
ret->version_sig = (void *(*)())opcode_23;
r_list_purge (ret->opcode_arg_fmt);
add_arg_fmt (ret, "EXTENDED_ARG", format_extended_arg);
return ret;
}
|
AnandBhatUpb/smlcs-final-submission | results/programs/fifth100/optional_data_creation_test04_true-valid-memsafety.c | <reponame>AnandBhatUpb/smlcs-final-submission<filename>results/programs/fifth100/optional_data_creation_test04_true-valid-memsafety.c
extern void __VERIFIER_error() __attribute__ ((__noreturn__));
#include <stdlib.h>
extern int __VERIFIER_nondet_int(void);
/*
This source code is licensed under the GPLv3 license.
Author: <NAME>.
*/
struct data_struct {
int number;
int *array;
};
typedef struct data_struct *Data;
struct node_t {
Data data;
struct node_t *next;
};
static Data create_data() {
// Create optional data
if(__VERIFIER_nondet_int()) {
return NULL;
}
Data data = malloc(sizeof *data);
if(__VERIFIER_nondet_int()) {
data->array = (int*) malloc(20 * sizeof(data->array));
int counter = 0;
for(counter = 0; counter < 20; counter++) {
data->array[counter] = __VERIFIER_nondet_int();
}
} else {
data->array = NULL;
}
data->number = __VERIFIER_nondet_int();
return data;
}
static void freeData(Data data) {
if(data == NULL) {
return;
}
if(data->array != NULL) {
free(data->array);
}
free(data);
}
static void append(struct node_t **pointerToList) {
struct node_t *node = malloc(sizeof *node);
node->next = *pointerToList;
node->data = create_data();
*pointerToList = node;
}
int main() {
struct node_t *list = NULL;
/* Create a long singly-linked list with optional data.
*/
int dataNotFinished = 0;
do {
append(&list);
} while(__VERIFIER_nondet_int());
/*
Do something with data.
displayData();
*/
// free list and data
while (list) {
struct node_t *next = list->next;
freeData(list->data);
free(list);
list = next;
}
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.