repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
lechium/tvOS135Headers | System/Library/PrivateFrameworks/UIKitCore.framework/UIKBKeyView.h | /*
* This header is generated by classdump-dyld 1.0
* on Sunday, June 7, 2020 at 11:45:32 AM Mountain Standard Time
* Operating System: Version 13.4.5 (Build 17L562)
* Image Source: /System/Library/PrivateFrameworks/UIKitCore.framework/UIKitCore
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
#import <UIKitCore/UIKitCore-Structs.h>
#import <UIKitCore/UIView.h>
#import <UIKit/UIKBCacheableView.h>
@class NSString, UIKBTree, UIKBRenderConfig, UIKBRenderFactory, NSMutableDictionary, UIKeyboardMenuView;
@interface UIKBKeyView : UIView <UIKBCacheableView> {
UIKBTree* m_keyplane;
UIKBTree* m_key;
CGRect m_drawFrame;
CFBooleanRef m_allowsCaching;
UIKBRenderConfig* m_renderConfig;
UIKBRenderFactory* m_factory;
NSMutableDictionary* _keyLayers;
int _renderedKeyState;
NSString* _cachedTraitsHashString;
CGColorRef _activeBackgroundColor;
id _activeCompositingFilter;
BOOL _singleRerender;
double _cachedBackgroundOpacity;
BOOL _cachedControlKeyRenderingPreference;
BOOL _renderAsMask;
unsigned long long _cachedAnchorCorner;
unsigned long long _cachedShiftState;
long long _cachedSelector;
UIKeyboardMenuView* _popupMenu;
double _endingTransitionDuration;
}
@property (assign,nonatomic) BOOL renderAsMask; //@synthesize renderAsMask=_renderAsMask - In the implementation block
@property (readonly) long long cachedRenderFlags;
@property (nonatomic,readonly) UIEdgeInsets displayInsets;
@property (nonatomic,retain) NSString * cachedTraitsHashString; //@synthesize cachedTraitsHashString=_cachedTraitsHashString - In the implementation block
@property (assign,nonatomic) unsigned long long cachedAnchorCorner; //@synthesize cachedAnchorCorner=_cachedAnchorCorner - In the implementation block
@property (assign,nonatomic) unsigned long long cachedShiftState; //@synthesize cachedShiftState=_cachedShiftState - In the implementation block
@property (assign,nonatomic) long long cachedSelector; //@synthesize cachedSelector=_cachedSelector - In the implementation block
@property (assign,nonatomic) BOOL cachedControlKeyRenderingPreference; //@synthesize cachedControlKeyRenderingPreference=_cachedControlKeyRenderingPreference - In the implementation block
@property (nonatomic,readonly) UIKBTree * keyplane;
@property (nonatomic,readonly) UIKBTree * key;
@property (assign,nonatomic) CGRect drawFrame;
@property (nonatomic,retain) UIKBRenderConfig * renderConfig;
@property (nonatomic,retain) UIKBRenderFactory * factory;
@property (assign,nonatomic) UIKeyboardMenuView * popupMenu; //@synthesize popupMenu=_popupMenu - In the implementation block
@property (nonatomic,readonly) CGRect variantFrame;
@property (nonatomic,readonly) UIKBKeyView * contentsKeyView;
@property (nonatomic,readonly) BOOL hasRendered;
@property (assign,nonatomic) double endingTransitionDuration; //@synthesize endingTransitionDuration=_endingTransitionDuration - In the implementation block
@property (readonly) unsigned long long hash;
@property (readonly) Class superclass;
@property (copy,readonly) NSString * description;
@property (copy,readonly) NSString * debugDescription;
@property (nonatomic,readonly) NSString * cacheKey;
@property (nonatomic,readonly) BOOL cacheDeferable;
@property (nonatomic,readonly) double cachedWidth;
@property (nonatomic,readonly) BOOL keepNonPersistent;
@property (nonatomic,readonly) long long cacheDeferPriority;
-(void)dealloc;
-(UIKBTree *)key;
-(void)displayLayer:(id)arg1 ;
-(NSString *)cacheKey;
-(void)removeFromSuperview;
-(UIKBRenderFactory *)factory;
-(BOOL)_canDrawContent;
-(void)touchesEnded:(id)arg1 withEvent:(id)arg2 ;
-(void)prepareForDisplay;
-(void)setFactory:(UIKBRenderFactory *)arg1 ;
-(void)willDisplayModalActionView:(id)arg1 withSubTreeKeyView:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)setRenderConfig:(UIKBRenderConfig *)arg1 ;
-(UIKBRenderConfig *)renderConfig;
-(UIKBKeyView *)contentsKeyView;
-(void)setDrawFrame:(CGRect)arg1 ;
-(id)initWithFrame:(CGRect)arg1 keyplane:(id)arg2 key:(id)arg3 ;
-(int)textEffectsVisibilityLevel;
-(void)dimKeys:(id)arg1 ;
-(UIKBTree *)keyplane;
-(void)configureBackdropView:(id)arg1 forRenderConfig:(id)arg2 ;
-(BOOL)hasRendered;
-(void)updateForKeyplane:(id)arg1 key:(id)arg2 ;
-(unsigned long long)focusableVariantCount;
-(id)layerForRenderFlags:(long long)arg1 ;
-(BOOL)_viewShouldBeOpaque;
-(CGRect)drawFrame;
-(long long)cachedRenderFlags;
-(BOOL)renderAsMask;
-(UIEdgeInsets)displayInsets;
-(long long)cachedSelector;
-(unsigned long long)cachedAnchorCorner;
-(unsigned long long)cachedShiftState;
-(BOOL)cachedControlKeyRenderingPreference;
-(BOOL)allowBackgroundCachingForRenderFlags:(long long)arg1 ;
-(NSString *)cachedTraitsHashString;
-(id)renderFlagsForTraits:(id)arg1 ;
-(BOOL)requiresSublayers;
-(BOOL)_shouldUpdateLayers;
-(void)setCachedTraitsHashString:(NSString *)arg1 ;
-(void)setCachedAnchorCorner:(unsigned long long)arg1 ;
-(void)setCachedShiftState:(unsigned long long)arg1 ;
-(void)setCachedSelector:(long long)arg1 ;
-(void)setCachedControlKeyRenderingPreference:(BOOL)arg1 ;
-(void)_populateLayer:(id)arg1 withContents:(id)arg2 ;
-(UIKeyboardMenuView *)popupMenu;
-(void)setRenderAsMask:(BOOL)arg1 ;
-(id)_generateBackdropMaskImage;
-(id)cacheKeysForRenderFlags:(id)arg1 ;
-(BOOL)cacheDeferable;
-(double)cachedWidth;
-(BOOL)keepNonPersistent;
-(void)drawContentsOfRenderers:(id)arg1 ;
-(CGRect)variantFrame;
-(void)_applyAppearanceInvocations;
-(id)subTreeHitTest:(CGPoint)arg1 ;
-(long long)didInputSubTree:(id)arg1 ;
-(void)hideKeyCap:(BOOL)arg1 ;
-(long long)imageOrientationForLayer:(id)arg1 ;
-(void)changeBackgroundToEnabled;
-(void)changeBackgroundToActiveIfNecessary;
-(void)setPopupMenu:(UIKeyboardMenuView *)arg1 ;
-(double)endingTransitionDuration;
-(void)setEndingTransitionDuration:(double)arg1 ;
@end
|
bicepjai/mypuzzles | others/sorts/mergesort.java | <filename>others/sorts/mergesort.java
import java.util.Arrays;
public class mergesort {
public static int[] merge (int[] a1, int[] a2){
int i=0, k=0, j=0;
int[] b = new int[a1.length + a2.length];
while (i<a1.length && j<a2.length) {
System.out.println("i="+i+" j="+j);
if (a1[i] < a2[j])
b[k++] = a1[i++];
else
b[k++] = a2[j++];
}
while (i< a1.length) b[k++] = a1[i++];
while (j< a2.length) b[k++] = a2[j++];
return b;
}
public static void main (String[] args){
int[] a1 = {4,7,14};
int[] a2 = {1,3,9,17};
System.out.println("a1="+Arrays.toString(a1)+" a2="+Arrays.toString(a2)+" merged"+Arrays.toString(merge(a1,a2)));
}
} |
AutonomicPerfectionist/myrobotlab | src/org/myrobotlab/document/workflow/WorkflowWorker.java | package org.myrobotlab.document.workflow;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import org.myrobotlab.document.Document;
import org.myrobotlab.document.ProcessingStatus;
import org.myrobotlab.document.transformer.AbstractStage;
import org.myrobotlab.document.transformer.StageConfiguration;
import org.myrobotlab.document.transformer.WorkflowConfiguration;
import org.myrobotlab.logging.LoggerFactory;
import org.slf4j.Logger;
/**
*
* WorkflowWorker : this is a list of stages that will poll the workflow queue
* and process documents through that list of stages.
*/
public class WorkflowWorker extends Thread {
public final static Logger log = LoggerFactory.getLogger(WorkflowWorker.class);
boolean processing = false;
private ArrayList<AbstractStage> stages;
private final LinkedBlockingQueue<Document> queue;
WorkflowWorker(WorkflowConfiguration workflowConfig, LinkedBlockingQueue<Document> queue) throws ClassNotFoundException {
// set the thread name
this.setName("WorkflowWorker-" + workflowConfig.getName());
this.queue = queue;
stages = new ArrayList<AbstractStage>();
for (StageConfiguration stageConf : workflowConfig.getStages()) {
String stageClass = stageConf.getStageClass().trim();
String stageName = stageConf.getStageName();
log.info("Starting stage: {} class: {}", stageName, stageClass);
Class<?> sc = Workflow.class.getClassLoader().loadClass(stageClass);
try {
AbstractStage stageInst = (AbstractStage) sc.newInstance();
stageInst.startStage(stageConf);
addStage(stageInst);
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public void run() {
Document doc;
boolean running = true;
while (running) {
try {
doc = queue.take();
// when can this case happen
if (doc == null) {
log.info("Doc was null from workflow queue. setting running to false.");
running = false;
} else {
processing = true;
// process from the start of the workflow
processDocumentInternal(doc, 0);
processing = false;
}
} catch (InterruptedException e) {
// TODO: handle these properly
log.warn("Workflow Worker Died! {}", e.getMessage());
e.printStackTrace();
}
}
}
public boolean isProcessing() {
return processing;
}
public void processDocumentInternal(Document doc, int stageOffset) {
// TODO:
int i = 0;
for (AbstractStage s : stages.subList(i, stages.size())) {
// create a pool of stages, so that when you call processDocument
// or each thread should have it's own pool?
List<Document> childDocs = s.processDocument(doc);
i++;
if (childDocs != null) {
// process each of the children docs down the rest of the pipeline
for (Document childDoc : childDocs) {
processDocumentInternal(childDoc, i);
}
}
// TODO:should I create a completely new concept for
// callbacks?
if (doc.getStatus().equals(ProcessingStatus.DROP)) {
// if it's a drop, break here.
break;
}
}
}
public void addStage(AbstractStage stage) {
stages.add(stage);
}
public void flush() {
for (AbstractStage s : stages) {
s.flush();
}
}
}
|
HHAIE/firebase-test | install_app/install-tools.js | <filename>install_app/install-tools.js<gh_stars>10-100
/* install-tools.js
* Copyright (c) 2019-2021 by <NAME>, https://github.com/david-asher
*
* Helper functions for other install scripts.
*/
'use strict';
const fs = require('fs')
const os = require('os')
const path = require('path')
const { env } = require('process')
const { execSync } = require( 'child_process' )
function getInstallPaths()
{
// process.cwd() is root of electron-firebase folder in node_modules
// process.env.INIT_CWD is root of project folder
// __dirname is postinstall script folder
var moduleRoot, projectRoot
// moduleRoot is the source; projectRoot is the target
if ( undefined == process.env.INIT_CWD ) {
// for local testing, e.g. at project root, run:
// node ./node_modules/electron-firebase/install_app/postinstall.js
moduleRoot = path.dirname( __dirname )
projectRoot = `${process.cwd()}${path.sep}`
}
else {
// normal npm install case
moduleRoot = `${process.cwd()}${path.sep}`
projectRoot = `${process.env.INIT_CWD}${path.sep}`
}
return {
moduleRoot: moduleRoot,
projectRoot: projectRoot
}
}
function getModified( filePath )
{
try {
const fileStats = fs.statSync( filePath )
return new Date( fileStats.mtime )
}
catch (error) {
return null
}
}
function touchFile( filePath, timeStamp )
{
if ( !timeStamp ) timeStamp = new Date()
fs.utimesSync( filePath, timeStamp, timeStamp )
}
/*
* This function will not overwrite a file that has been modified more
* recently than the lastUpdate; set lastUpdate to Date.now() to force overwrite.
* After a successful copy the access and modified times will be set to timeStamp.
*/
function copyFile( filename, sourceFolder, targetFolder, timeStamp, lastUpdate )
{
try {
const sourceFile = path.join( sourceFolder, filename )
const targetFile = path.join( targetFolder, filename )
// check for user modified file and do not overwrite
const mTimeTarget = getModified( targetFile )
if ( +mTimeTarget > +lastUpdate ) return
// copy the file but we need to update the timestamps ourselves
fs.copyFileSync( sourceFile, targetFile )
touchFile( targetFile, timeStamp )
}
catch (error) {
if ( error.code == 'EEXIST') return
throw( error )
}
}
function copyFolderFiles( sourceFolder, targetFolder, timeStamp, lastUpdate )
{
const dirList = fs.readdirSync( sourceFolder, { withFileTypes: true } )
dirList.forEach( (file) => {
if ( !file.isFile() ) return
copyFile( file.name, sourceFolder, targetFolder, timeStamp, lastUpdate )
})
}
function makeFolder( folderPath )
{
try {
fs.mkdirSync( folderPath )
}
catch( error ) {
if ( error && error.code == 'EEXIST' ) return
console.error( error )
}
}
function copyFolder( folderName, sourceParent, targetParent, timeStamp, lastUpdate )
{
const sourceFolder = path.join( sourceParent, folderName )
if ( !fs.statSync( sourceFolder ).isDirectory() ) {
console.error( "Source folder does not exist: ", sourceFolder )
return
}
const targetFolder = path.join( targetParent, folderName )
makeFolder( targetFolder )
if ( !fs.statSync( targetFolder ).isDirectory() ) {
console.error( "Failed to create target folder: ", targetFolder )
return
}
copyFolderFiles( sourceFolder, targetFolder, timeStamp, lastUpdate )
}
function isObject( it )
{
return ( Object.prototype.toString.call( it ) === '[object Object]' )
}
function omerge( oTarget, oUpdate )
{
if ( !isObject( oUpdate ) ) return oUpdate
for ( var key in oUpdate ) {
oTarget[key] = omerge( oTarget[key], oUpdate[key] )
}
return oTarget
}
function backupFile( filePath )
{
var backupParts = filePath.split( '.' )
backupParts.splice( -1, 0, "old" )
const backupPath = backupParts.join( '.' )
fs.copyFileSync( filePath, backupPath )
}
function updateJsonFile( jsonFile, updateJson )
{
const sourceJson = require( jsonFile )
backupFile( jsonFile )
fs.writeFileSync( jsonFile, JSON.stringify( omerge( sourceJson, updateJson ), null, 2 ) )
}
function checkCommand( commandString )
{
var exists = true
try {
// stdio to pipe because we don't want to see the output
execSync( `${commandString} --version`, {stdio : 'pipe' } )
}
catch (error) {
exists = false
}
return exists
}
function installApp( commandString, appInstallString, bQuiet )
{
// check for command existence before installing
if ( !checkCommand( commandString ) ) {
execSync( appInstallString, { stdio: bQuiet ? 'pipe' : 'inherit' } )
}
// if this failed, stop, because we can't build
if ( !checkCommand( commandString ) ) {
throw( "Cannot find " + commandString + " and failed to install it. " )
}
}
function addToPath( newPath )
{
env.PATH = `${newPath}${path.delimiter}${env.PATH}`
}
function makeNpmGlobal( globalFolder )
{
const npmGlobal = path.join( os.homedir(), globalFolder )
makeFolder( npmGlobal )
addToPath( npmGlobal )
const npmGlobalBin = path.join( npmGlobal, "bin" )
makeFolder( npmGlobalBin )
addToPath( npmGlobalBin )
execSync( `npm config set prefix "${npmGlobal}"` )
}
module.exports = {
getInstallPaths: getInstallPaths,
getModified: getModified,
touchFile: touchFile,
copyFile: copyFile,
copyFolderFiles: copyFolderFiles,
makeFolder: makeFolder,
copyFolder: copyFolder,
isObject: isObject,
omerge: omerge,
backupFile: backupFile,
updateJsonFile: updateJsonFile,
checkCommand: checkCommand,
installApp: installApp,
addToPath: addToPath,
makeNpmGlobal: makeNpmGlobal
} |
Swampbots/FreightFrenzy | TeamCode/src/main/java/org/firstinspires/ftc/teamcode/robot/subsystems/CapGrip.java | package org.firstinspires.ftc.teamcode.robot.subsystems;
import com.disnodeteam.dogecommander.Subsystem;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.Servo;
@Deprecated
public class CapGrip implements Subsystem {
private HardwareMap hardwareMap;
private Servo gripper;
public enum POSITION {
GRIP,
PLACE;
public double getPosition() { // TODO: Check values
switch (this) {
case GRIP:
return 0.0;
case PLACE:
return 1.0;
default:
return 0;
}
}
}
private POSITION targetPos = POSITION.GRIP;
public CapGrip(HardwareMap hardwareMap) {
this.hardwareMap = hardwareMap;
}
@Override
public void initHardware() {
gripper = hardwareMap.get(Servo.class, "cap_grip");
gripper.setPosition(targetPos.getPosition());
}
@Override
public void periodic() {
gripper.setPosition(targetPos.getPosition());
}
public void grip() {
targetPos = POSITION.GRIP;
}
public void place() {
targetPos = POSITION.PLACE;
}
public void toggle() {
targetPos = targetPos == POSITION.GRIP ? POSITION.PLACE : POSITION.GRIP;
}
public void setTargetPos(POSITION targetPos) {
this.targetPos = targetPos;
}
public POSITION getTargetPos() {
return targetPos;
}
}
|
zweimach/wiyata.c | src/open_kattis/fizzbuzz.h | #ifndef WIYATA_OPEN_KATTIS_FIZZBUZZ_H
#define WIYATA_OPEN_KATTIS_FIZZBUZZ_H
char const* fizzbuzz(int x, int y, int n);
#endif /* WIYATA_OPEN_KATTIS_FIZZBUZZ_H */
|
alicegraziosi/semaphore | node_modules/angularjs-color-picker/test/e2e/button-reset.protractor.js | <reponame>alicegraziosi/semaphore<filename>node_modules/angularjs-color-picker/test/e2e/button-reset.protractor.js
var Page = require('../page-object.js');
describe('Options: ', () => {
describe('Button Reset: ', () => {
beforeAll(() => {
Page.openPage();
Page.waitTillPageLoaded();
});
it('Should not be visible by default', () => {
Page.openColorPicker();
expect(Page.button_reset.isDisplayed()).toEqual(false);
});
it('Should update to be visible', () => {
Page.button_reset_show_field.$('[label="Yes"]').click();
Page.openColorPicker();
expect(Page.button_reset.isDisplayed()).toEqual(true);
});
it('Should set back to invisible', () => {
Page.button_reset_show_field.$('[label="No"]').click();
Page.openColorPicker();
expect(Page.button_reset.isDisplayed()).toEqual(false);
});
});
describe('Button Class: ', () => {
beforeAll(() => {
Page.openPage();
Page.waitTillPageLoaded();
});
it('Should not have an button class by default', () => {
expect(Page.button_reset.getAttribute('class')).toMatch('');
});
it('Should update the button class', () => {
let button_class = 'qwerty';
Page.button_reset_show_class.clear().sendKeys(button_class);
expect(Page.button_reset.getAttribute('class')).toMatch(button_class);
});
it('Should not have an button class', () => {
Page.button_reset_show_class.clear();
expect(Page.button_reset.getAttribute('class')).toMatch('');
});
it('Should update the button class again', () => {
let button_class = 'asdf';
Page.button_reset_show_class.clear().sendKeys(button_class);
expect(Page.button_reset.getAttribute('class')).toMatch(button_class);
});
it('Should update the button class again', () => {
let button_class = 'zxcv';
Page.button_reset_show_class.clear().sendKeys(button_class);
expect(Page.button_reset.getAttribute('class')).toMatch(button_class);
});
});
describe('Button Label: ', () => {
beforeAll(() => {
Page.openPage();
Page.waitTillPageLoaded();
// show the button
Page.button_reset_show_field.$('[label="Yes"]').click();
});
it('Should have an button label by default', () => {
expect(Page.button_reset.getAttribute('innerHTML')).toEqual('Reset');
});
it('Should update the button label', () => {
let button_label = 'qwerty';
Page.button_reset_show_label.clear().sendKeys(button_label);
expect(Page.button_reset.getAttribute('innerHTML')).toEqual(button_label);
});
it('Should not have an button label', () => {
Page.button_reset_show_label.clear();
expect(Page.button_reset.getAttribute('innerHTML')).toEqual('');
});
it('Should update the button label again', () => {
let button_label = 'asdf';
Page.button_reset_show_label.clear().sendKeys(button_label);
expect(Page.button_reset.getAttribute('innerHTML')).toEqual(button_label);
});
it('Should update the button label a third time', () => {
let button_label = 'zxcv';
Page.button_reset_show_label.clear().sendKeys(button_label);
expect(Page.button_reset.getAttribute('innerHTML')).toEqual(button_label);
});
});
});
|
DanielGMesquita/StudyPath | PythonFIAP/5_2_SistemaOperacional/UsuarioData.py | import getpass
from datetime import datetime
print('Usuário: ', getpass.getuser())
print('Data completa: ', datetime.now())
print('Dia: ', datetime.now().day)
print('Mês: ', datetime.now().month)
print('Ano: ', datetime.now().year)
# hour, minute and second |
skullbaselab/aa-afterdark | google-maps-demo-master/config/routes.rb | <filename>google-maps-demo-master/config/routes.rb<gh_stars>1-10
Rails.application.routes.draw do
root to: 'static_pages#root'
namespace :api do
resources :listings, only: [:create, :destroy, :index]
get 'listings/search/', to: 'listings#search'
end
end
|
986510453/SpringLimiter | src/main/java/site/higgs/limiter/interceptor/LimitContextsValueWrapper.java | <gh_stars>1-10
package site.higgs.limiter.interceptor;
public class LimitContextsValueWrapper {
private boolean value;
private Object limiterFailResolveResult;
public LimitContextsValueWrapper(boolean value, Object limiterFailResolveResult) {
this.value = value;
this.limiterFailResolveResult = limiterFailResolveResult;
}
public boolean value() {
return value;
}
public Object getLimiterFailResolveResult() {
return limiterFailResolveResult;
}
} |
truemrwalker/mads-app | prediction/models.py | from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from django.urls import reverse
from private_storage.fields import PrivateFileField
from jsonfield import JSONField
import joblib
import numpy as np
from common.models import OwnedResourceModel
import os
import uuid
import logging
logger = logging.getLogger(__name__)
User = get_user_model()
def get_encoded_filepath(instance, filename):
filename, file_extension = os.path.splitext(filename)
return os.path.join(str(instance.owner.uuid), str(instance.id) + file_extension)
def delete_previous_file(function):
"""The decorator for deleting unnecessary file.
:param function: main function
:return: wrapper
"""
def wrapper(*args, **kwargs):
"""Wrapper function.
:param args: params
:param kwargs: keyword params
:return: result
"""
self = args[0]
# get the previous filename
result = PretrainedModel.objects.filter(pk=self.pk)
previous = result[0] if len(result) else None
super(PretrainedModel, self).save()
# execution
result = function(*args, **kwargs)
# if the previous file exists, delete it.
if previous and previous.file.name != self.file.name:
previous.file.delete(False)
return result
return wrapper
class PretrainedModel(OwnedResourceModel):
shared_users = models.ManyToManyField(
'users.User', blank=True,
related_name='pm_shared_users'
)
shared_groups = models.ManyToManyField(
Group, blank=True, related_name='pm_shared_groups'
)
file = PrivateFileField(upload_to=get_encoded_filepath,)
componentInstance = models.ForeignKey(
'analysis.ComponentInstance',
on_delete=models.SET_NULL,
blank=True, null=True
)
metadata = JSONField(blank=True, null=True)
objects = models.Manager()
def get_filename(self):
return os.path.basename(self.file.name)
def get_absolute_url(self):
return reverse('prediction:model-detail', kwargs={'id': self.id})
def get_full_path(self):
return self.file.full_path
def get_owned_models(self, user):
return PretrainedModel.objects.filter(owner=user)
def get_public_models(self):
return PretrainedModel.objects.filter(accessibility=PretrainedModel.ACCESSIBILITY_PUBLIC)
def predict(self, inports):
outport = {}
inputs = []
for key, value in inports.items():
inputs.append(float(value)) # TODO: support different types: str, etc,
logger.info(inputs)
model = joblib.load(self.file)
out = model.predict([inputs])
outport = out[0]
return outport
@delete_previous_file
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(PretrainedModel, self).save()
@delete_previous_file
def delete(self, using=None, keep_parents=False):
super(PretrainedModel, self).delete()
# when deleting model the file is removed
@receiver(post_delete, sender=PretrainedModel)
def delete_file(sender, instance, **kwargs):
instance.file.delete(False)
|
jljacoblo/jalgorithmCPP | src/STLDuddle/Sort.h | <filename>src/STLDuddle/Sort.h
//
// Created by <NAME> on Jan 08, 2019.
//
// STL Sort implemented as Quick-Sort.
#pragma once
#include <vector>
#include <algorithm>
namespace STLDuddle {
/// default sort ( default comparator with increment )
void sortDefaultIncrement () {
int arr[] = {32,71,12,45,26,80,53,33};
std::vector<int> v( arr, arr+8);
std::sort(v.begin(), v.end() );
std::cout <<
}
}; |
iomash/jbatch | src/cmd/parseJSON/parseJSON.js | define(['jquery'], function($) {
'use strict';
return function(args, ctx) {
try {
ctx.write($.parseJSON(args[1]));
} catch (ex) {
ctx.write(ex);
return ctx.fail;
}
return ctx.done;
};
});
|
lemnisk8/framework7 | packages/vue/components/chip.js | import Utils from '../utils/utils';
import Mixins from '../utils/mixins';
import __vueComponentDispatchEvent from '../runtime-helpers/vue-component-dispatch-event.js';
import __vueComponentProps from '../runtime-helpers/vue-component-props.js';
export default {
name: 'f7-chip',
props: Object.assign({
id: [String, Number],
media: String,
text: [String, Number],
deleteable: Boolean,
mediaBgColor: String,
mediaTextColor: String,
outline: Boolean
}, Mixins.colorProps),
render: function render() {
var _h = this.$createElement;
var self = this;
var props = self.props;
var media = props.media,
text = props.text,
deleteable = props.deleteable,
className = props.className,
id = props.id,
style = props.style,
mediaTextColor = props.mediaTextColor,
mediaBgColor = props.mediaBgColor,
outline = props.outline;
var mediaEl;
var labelEl;
var deleteEl;
if (media || self.$slots && self.$slots.media) {
var mediaClasses = Utils.classNames('chip-media', mediaTextColor && "text-color-".concat(mediaTextColor), mediaBgColor && "bg-color-".concat(mediaBgColor));
mediaEl = _h('div', {
class: mediaClasses
}, [media || this.$slots['media']]);
}
if (text || self.$slots && self.$slots.text) {
labelEl = _h('div', {
class: 'chip-label'
}, [text, this.$slots['text']]);
}
if (deleteable) {
deleteEl = _h('a', {
ref: 'deleteEl',
class: 'chip-delete'
});
}
var classes = Utils.classNames(className, 'chip', {
'chip-outline': outline
}, Mixins.colorClasses(props));
return _h('div', {
ref: 'el',
style: style,
class: classes,
attrs: {
id: id
}
}, [mediaEl, labelEl, deleteEl]);
},
created: function created() {
Utils.bindMethods(this, ['onClick', 'onDeleteClick']);
},
mounted: function mounted() {
this.$refs.el.addEventListener('click', this.onClick);
if (this.$refs.deleteEl) {
this.$refs.deleteEl.addEventListener('click', this.onDeleteClick);
}
},
beforeDestroy: function beforeDestroy() {
this.$refs.el.removeEventListener('click', this.onClick);
if (this.$refs.deleteEl) {
this.$refs.deleteEl.removeEventListener('click', this.onDeleteClick);
}
},
methods: {
onClick: function onClick(event) {
this.dispatchEvent('click', event);
},
onDeleteClick: function onDeleteClick(event) {
this.dispatchEvent('delete', event);
},
dispatchEvent: function dispatchEvent(events) {
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
__vueComponentDispatchEvent.apply(void 0, [this, events].concat(args));
}
},
computed: {
props: function props() {
return __vueComponentProps(this);
}
}
}; |
TheSledgeHammer/2.11BSD | contrib/gnu/gcc/dist/gcc/config/rs6000/darwin64-biarch.h | /* Target definitions for PowerPC64 running Darwin (Mac OS X) for a 64b host
supporting a 32b multilib.
Copyright (C) 2006-2020 Free Software Foundation, Inc.
Contributed by Apple Computer Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3, or (at your
option) any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#undef TARGET_DEFAULT
#define TARGET_DEFAULT (MASK_POWERPC64 | MASK_64BIT \
| MASK_MULTIPLE | MASK_PPC_GFXOPT)
#undef DARWIN_ARCH_SPEC
#define DARWIN_ARCH_SPEC "%{m32:ppc;:ppc64}"
/* Actually, there's really only 970 as an active option. */
#undef DARWIN_SUBARCH_SPEC
#define DARWIN_SUBARCH_SPEC DARWIN_ARCH_SPEC
#undef SUBTARGET_EXTRA_SPECS
#define SUBTARGET_EXTRA_SPECS \
DARWIN_EXTRA_SPECS \
{ "darwin_arch", DARWIN_ARCH_SPEC }, \
{ "darwin_crt2", "" }, \
{ "darwin_subarch", DARWIN_SUBARCH_SPEC },
|
jafc-stripe/sorbet | namer/configatron/configatron.cc | #include "yaml-cpp/yaml.h"
// has to go first as it violates our poisions
#include "absl/strings/match.h"
#include "common/FileOps.h"
#include "configatron.h"
#include <cctype>
#include <sys/types.h>
#include <utility>
using namespace std;
namespace sorbet::namer {
namespace {
enum class StringKind { String, Integer, Float, Symbol };
StringKind classifyString(string_view str) {
int dotCount = 0;
if (!str.empty() && str[0] == ':') {
return StringKind::Symbol;
}
for (const auto &c : str) {
if (c == '.') {
dotCount++;
} else {
if (c != '_' && !isdigit(c)) {
return StringKind::String;
}
}
}
switch (dotCount) {
case 0:
return StringKind::Integer;
case 1:
return StringKind::Float;
default:
return StringKind::String;
}
}
core::TypePtr getType(core::GlobalState &gs, const YAML::Node &node) {
ENFORCE(node.IsScalar());
string value = node.as<string>();
if (value == "true" || value == "false") {
return core::Types::Boolean();
}
switch (classifyString(value)) {
case StringKind::Integer:
return core::Types::Integer();
case StringKind::Float:
return core::Types::Float();
case StringKind::String:
return core::Types::String();
case StringKind::Symbol:
return core::Types::Symbol();
}
}
struct Path {
Path *parent;
string selector;
core::TypePtr myType;
Path(Path *parent, string selector) : parent(parent), selector(move(selector)){};
string toString() {
if (parent) {
return parent->toString() + "." + selector;
};
return selector;
}
string show(core::GlobalState &gs) {
fmt::memory_buffer buf;
if (myType) {
fmt::format_to(buf, "{} -> {}", toString(), myType->toString(gs));
}
fmt::format_to(buf, "{}",
fmt::map_join(children, "", [&](const auto &child) -> string { return child->show(gs); }));
return "";
}
vector<shared_ptr<Path>> children;
shared_ptr<Path> getChild(string_view name) {
if (!name.empty() && name[0] == ':') {
string_view withoutColon(name.data() + 1, name.size() - 1);
return getChild(withoutColon);
}
for (auto &child : children) {
if (child->selector == name) {
return child;
}
}
return children.emplace_back(make_shared<Path>(this, string(name)));
}
void setType(core::GlobalState &gs, core::TypePtr tp) {
if (myType) {
myType = core::Types::any(core::MutableContext(gs, core::Symbols::root()), myType, tp);
} else {
myType = tp;
}
}
void enter(core::GlobalState &gs, core::SymbolRef parent, core::SymbolRef owner) {
if (children.empty()) {
parent.data(gs)->resultType = myType;
} else {
auto classSym =
gs.enterClassSymbol(core::Loc::none(), owner, gs.enterNameConstant("configatron" + this->toString()));
classSym.data(gs)->setIsModule(false);
if (this->parent == nullptr) {
classSym.data(gs)->setSuperClass(core::Symbols::Configatron_RootStore());
} else {
classSym.data(gs)->setSuperClass(core::Symbols::Configatron_Store());
}
parent.data(gs)->resultType = core::make_type<core::ClassType>(classSym);
// DO NOT ADD METHODS HERE. add them to Configatron::Store shim
for (auto &child : children) {
auto method = gs.enterMethodSymbol(core::Loc::none(), classSym, gs.enterNameUTF8(child->selector));
child->enter(gs, method, owner);
auto &blkArg = gs.enterMethodArgumentSymbol(core::Loc::none(), method, core::Names::blkArg());
blkArg.flags.isBlock = true;
}
// cout << classSym.toStringWithTabs(gs, 1, 1);
}
}
};
void recurse(core::GlobalState &gs, const YAML::Node &node, shared_ptr<Path> prefix) {
switch (node.Type()) {
case YAML::NodeType::Null:
prefix->setType(gs, core::Types::nilClass());
break;
case YAML::NodeType::Scalar:
prefix->setType(gs, getType(gs, node));
break;
case YAML::NodeType::Sequence: {
core::TypePtr elemType;
for (const auto &child : node) {
auto thisElemType = child.IsScalar() ? getType(gs, child) : core::Types::untypedUntracked();
if (elemType) {
elemType =
core::Types::any(core::MutableContext(gs, core::Symbols::root()), elemType, thisElemType);
} else {
elemType = thisElemType;
}
}
if (!elemType) {
elemType = core::Types::bottom();
}
vector<core::TypePtr> elems{elemType};
prefix->setType(gs, core::make_type<core::AppliedType>(core::Symbols::Array(), elems));
break;
}
case YAML::NodeType::Map:
for (const auto &child : node) {
auto key = child.first.as<string>();
if (key != "<<") {
recurse(gs, child.second, prefix->getChild(key));
} else {
recurse(gs, child.second, prefix);
}
}
break;
case YAML::NodeType::Undefined:
break;
}
}
void handleFile(core::GlobalState &gs, const string &file, shared_ptr<Path> rootNode) {
YAML::Node config = YAML::LoadFile(file);
switch (config.Type()) {
case YAML::NodeType::Map:
for (const auto &child : config) {
auto key = child.first.as<string>();
recurse(gs, child.second, rootNode);
}
break;
default:
break;
}
}
} // namespace
void configatron::fillInFromFileSystem(core::GlobalState &gs, const vector<string> &folders,
const vector<string> &files) {
auto rootNode = make_shared<Path>(nullptr, "");
for (auto &folder : folders) {
auto files = FileOps::listFilesInDir(folder, {".yaml"}, true, {}, {});
const int prefixLen = folder.length() + 1;
for (const auto &file : files) {
constexpr int extLen = 5; // strlen(".yaml");
string_view fileName(file.c_str(), file.size() - extLen);
// Trim off folder + '/'
fileName = fileName.substr(prefixLen);
auto innerNode = rootNode->getChild(fileName);
handleFile(gs, file, innerNode);
}
}
for (auto &file : files) {
handleFile(gs, file, rootNode);
}
core::SymbolRef configatron =
gs.enterMethodSymbol(core::Loc::none(), core::Symbols::Kernel(), gs.enterNameUTF8("configatron"));
rootNode->enter(gs, configatron, core::Symbols::root());
auto &blkArg = gs.enterMethodArgumentSymbol(core::Loc::none(), configatron, core::Names::blkArg());
blkArg.flags.isBlock = true;
}
} // namespace sorbet::namer
|
unisonteam/ignite-3 | modules/storage-page-memory/src/main/java/org/apache/ignite/internal/storage/pagememory/mv/RowVersionFreeList.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.storage.pagememory.mv;
import static org.apache.ignite.internal.pagememory.PageIdAllocator.FLAG_AUX;
import static org.apache.ignite.internal.pagememory.PageIdAllocator.INDEX_PARTITION;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.internal.pagememory.PageMemory;
import org.apache.ignite.internal.pagememory.evict.PageEvictionTracker;
import org.apache.ignite.internal.pagememory.freelist.AbstractFreeList;
import org.apache.ignite.internal.pagememory.io.PageIo;
import org.apache.ignite.internal.pagememory.metric.IoStatisticsHolder;
import org.apache.ignite.internal.pagememory.reuse.ReuseList;
import org.apache.ignite.internal.pagememory.util.PageHandler;
import org.apache.ignite.internal.pagememory.util.PageLockListener;
import org.apache.ignite.internal.storage.pagememory.mv.io.RowVersionDataIo;
import org.apache.ignite.internal.tx.Timestamp;
import org.apache.ignite.lang.IgniteInternalCheckedException;
import org.apache.ignite.lang.IgniteLogger;
import org.jetbrains.annotations.Nullable;
/**
* {@link AbstractFreeList} for {@link RowVersion} instances.
*/
public class RowVersionFreeList extends AbstractFreeList<RowVersion> {
private static final IgniteLogger LOG = IgniteLogger.forClass(RowVersionFreeList.class);
private final PageEvictionTracker evictionTracker;
private final IoStatisticsHolder statHolder;
private final UpdateTimestampHandler updateTimestampHandler = new UpdateTimestampHandler();
/**
* Constructor.
*
* @param grpId Group ID.
* @param pageMem Page memory.
* @param reuseList Reuse list to track pages that can be reused after they get completely empty (if {@code null},
* the free list itself will be used as a ReuseList.
* @param lockLsnr Page lock listener.
* @param metaPageId Metadata page ID.
* @param initNew {@code True} if new metadata should be initialized.
* @param pageListCacheLimit Page list cache limit.
* @param evictionTracker Page eviction tracker.
* @param statHolder Statistics holder to track IO operations.
* @throws IgniteInternalCheckedException If failed.
*/
public RowVersionFreeList(
int grpId,
PageMemory pageMem,
@Nullable ReuseList reuseList,
PageLockListener lockLsnr,
long metaPageId,
boolean initNew,
@Nullable AtomicLong pageListCacheLimit,
PageEvictionTracker evictionTracker,
IoStatisticsHolder statHolder
) throws IgniteInternalCheckedException {
super(
grpId,
"RowVersionFreeList_" + grpId,
pageMem,
reuseList,
lockLsnr,
FLAG_AUX,
LOG,
metaPageId,
initNew,
pageListCacheLimit,
evictionTracker
);
this.evictionTracker = evictionTracker;
this.statHolder = statHolder;
}
/**
* {@inheritDoc}
*/
@Override
protected long allocatePageNoReuse() throws IgniteInternalCheckedException {
return pageMem.allocatePage(grpId, INDEX_PARTITION, defaultPageFlag);
}
/**
* Inserts a row.
*
* @param row Row.
* @throws IgniteInternalCheckedException If failed.
*/
public void insertDataRow(RowVersion row) throws IgniteInternalCheckedException {
super.insertDataRow(row, statHolder);
}
/**
* Updates row version's timestamp.
*
* @param link link to the slot containing row version
* @param newTimestamp timestamp to set
* @throws IgniteInternalCheckedException if something fails
*/
public void updateTimestamp(long link, Timestamp newTimestamp) throws IgniteInternalCheckedException {
updateDataRow(link, updateTimestampHandler, newTimestamp, statHolder);
}
/**
* Removes a row by link.
*
* @param link Row link.
* @throws IgniteInternalCheckedException If failed.
*/
public void removeDataRowByLink(long link) throws IgniteInternalCheckedException {
super.removeDataRowByLink(link, statHolder);
}
private class UpdateTimestampHandler implements PageHandler<Timestamp, Object> {
@Override
public Object run(
int groupId,
long pageId,
long page,
long pageAddr,
PageIo io,
Timestamp arg,
int itemId,
IoStatisticsHolder statHolder
) throws IgniteInternalCheckedException {
RowVersionDataIo dataIo = (RowVersionDataIo) io;
dataIo.updateTimestamp(pageAddr, itemId, pageSize(), arg);
evictionTracker.touchPage(pageId);
return true;
}
}
}
|
ninoseki/uzen | tests/apis/test_screenshots.py | import asyncio
import pytest
from fastapi.testclient import TestClient
from tests.helper import first_snapshot_id_sync
@pytest.mark.usefixtures("snapshots_setup")
def test_screenshots(client: TestClient, event_loop: asyncio.AbstractEventLoop):
snapshot_id = first_snapshot_id_sync(event_loop)
response = client.get(f"/api/screenshots/{snapshot_id}")
assert response.status_code == 200
assert response.headers.get("content-type") == "image/png"
# Disable this test because it's difficult to work the test along with arq
# async def mock_preview(hostname: str):
# return b""
#
#
# @pytest.mark.asyncio
# async def test_preview(
# client: TestClient, monkeypatch: MonkeyPatch, arq_worker: Worker
# ):
# monkeypatch.setattr(Browser, "preview", mock_preview)
#
# response = await client.get("/api/screenshots/preview/example.com")
# assert response.status_code in [200, 500]
# assert response.headers.get("content-type") == "image/png"
#
|
leonardt/magma | magma/display.py | <filename>magma/display.py
from magma.circuit import peek_definition_context_stack
from magma.t import Type
class _Time:
pass
def time():
return _Time()
class _Event:
def __init__(self, value):
if not isinstance(value, Type):
raise TypeError("Expected magma value for event")
self.value = value
class _Posedge(_Event):
verilog_str = "posedge"
class _Negedge(_Event):
verilog_str = "negedge"
def posedge(value):
return _Posedge(value)
def negedge(value):
return _Negedge(value)
def _make_display_format_arg(value, format_args):
# Unique name based on object id
var = f"_display_var_{id(value)}"
if isinstance(value, _Time):
# Insert $time string
value = "$time"
# Insert into a format kwargs map
format_args[var] = value
# Wrap in format braces so it is replaced later on
var = f"{{{var}}}"
return var
class Display:
def __init__(self, display_str, args, file):
# Encode to handle newlines, etc... properly
self.display_str = display_str.encode('unicode_escape').decode()
self.args = args
self.events = []
self.cond = None
self.file = file
def if_(self, cond):
"""
Method to set condition for display
# Display if CE (enable) is high
m.display("x=%d", x).when(m.posedge(io.CLK))\
.if_(io.CE)
"""
if self.cond is not None:
raise Exception("Can only invoke if_ once on display")
self.cond = cond
return self
def when(self, event):
"""
Allows chaining to set event for display, e.g.
m.display("x=%d", x).when(m.posedge(io.CLK))\
.when(m.negedge(io.ASYNCRESET))
"""
if not isinstance(event, (Type, _Event)):
raise TypeError("Expected magma value or event for when argument")
self.events.append(event)
return self
def _make_cond_str(self, format_args):
if self.cond is not None:
return _make_display_format_arg(self.cond, format_args)
return ""
def get_inline_verilog(self):
format_args = {}
display_args = []
# arguments to the dipslay function are unique names that are
# interpolated later by the inline_verilog syntax
# e.g. $display("...", {_display_var_0}, {_display_var_1});
for arg in self.args:
display_args.append(_make_display_format_arg(arg, format_args))
display_args_str = ""
if display_args:
display_args_str = ", " + ", ".join(display_args)
# Default all events
event_str = "*"
if self.events:
event_strs = []
for event in self.events:
value = event
# Could be sensitive to plain signal
if isinstance(event, _Event):
value = value.value
var = _make_display_format_arg(value, format_args)
# prepend event if not just plain signal
if isinstance(event, _Event):
var = f"{event.verilog_str} {var}"
event_strs.append(var)
event_str = ", ".join(event_strs)
cond_str = self._make_cond_str(format_args)
if cond_str:
cond_str = f"if ({cond_str}) "
display_str = f"$display("
if self.file is not None:
display_str = f"$fdisplay(\\_file_{self.file.filename} , " # noqa
format_str = f"""\
always @({event_str}) begin
{cond_str}{display_str}\"{self.display_str}\"{display_args_str});
end
"""
return format_str, format_args, {}
def display(display_str, *args, file=None):
context = peek_definition_context_stack()
disp = Display(display_str, args, file)
context.get_child("display").add_display(disp)
return disp
class File:
def __init__(self, filename, mode):
self.filename = filename
self.mode = mode
context = peek_definition_context_stack()
context.get_child("display").add_file(self)
def __enter__(self):
return self
def __exit__(self, *args):
pass
|
TomMD/pinot | thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/rootcause/impl/ServiceEntity.java | package com.linkedin.thirdeye.rootcause.impl;
import com.linkedin.thirdeye.rootcause.Entity;
import com.linkedin.thirdeye.rootcause.util.EntityUtils;
import com.linkedin.thirdeye.rootcause.util.ParsedUrn;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* ServiceEntity represents a service associated with certain metrics or dimensions. It typically
* serves as a connecting piece between observed discrepancies between current and baseline metrics
* and root cause events such as code deployments. The URN namespace is defined as
* 'thirdeye:service:{name}'.
*/
public class ServiceEntity extends Entity {
public static final EntityType TYPE = new EntityType("thirdeye:service:");
private final String name;
protected ServiceEntity(String urn, double score, List<? extends Entity> related, String name) {
super(urn, score, related);
this.name = name;
}
public String getName() {
return name;
}
@Override
public ServiceEntity withScore(double score) {
return new ServiceEntity(this.getUrn(), score, this.getRelated(), this.name);
}
@Override
public ServiceEntity withRelated(List<? extends Entity> related) {
return new ServiceEntity(this.getUrn(), this.getScore(), related, this.name);
}
public static ServiceEntity fromName(double score, String name) {
String urn = TYPE.formatURN(name);
return new ServiceEntity(urn, score, new ArrayList<Entity>(), name);
}
public static ServiceEntity fromURN(String urn, double score) {
ParsedUrn parsedUrn = EntityUtils.parseUrnString(urn, TYPE);
parsedUrn.assertPrefixOnly();
String service = parsedUrn.getPrefixes().get(2);
return new ServiceEntity(urn, score, Collections.<Entity>emptyList(), service);
}
}
|
forever-Liudawang/LDSystem | LDream/util/confirm.js | export default function confirm(resp,success,error,showToast){
if(resp && resp.data && resp.data.success){
success && success(resp.data.data)
}else{
if(resp.data.message){
error?error():uni.showToast({
title:resp.data.message,
icon:"error"
})
}
}
}
|
Robbbert/messui | src/devices/cpu/hpc/hpcdasm.cpp | // license:BSD-3-Clause
// copyright-holders:AJR
/***************************************************************************
National Semiconductor HPC disassembler
Note that though all 16-bit fields in instructions have the MSB first,
the HPC's memory organization is in fact little-endian (including
vector and JIDW tables). This is why r16 is always swapped here.
***************************************************************************/
#include "emu.h"
#include "hpcdasm.h"
#include <cctype>
const char *const hpc16083_disassembler::s_regs[128] =
{
"psw", nullptr, "SP", "PC", "A", "K", "B", "X",
"enir", "irpd", "ircd", "sio", "porti", nullptr, "halten", nullptr,
"porta", "portb", nullptr, "upic", nullptr, nullptr, nullptr, nullptr,
"dira", "dirb", "bfun", nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, "portd", nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
"enu", "enui", "rbuf", "tbuf", "enur", nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
"t4", "r4", "t5", "r5", "t6", "r6", "t7", "r7",
"pwmode", "portp", nullptr, nullptr, nullptr, nullptr, "eicon", "eicr",
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
"i4cr", "i3cr", "i2cr", "r2", "t2", "r3", "t3", "divby",
"tmmode", "t0con", "watchdog", nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr
};
const char *const hpc16164_disassembler::s_regs[128] =
{
"psw", nullptr, "SP", "PC", "A", "K", "B", "X",
"enir", "irpd", "ircd", "sio", "porti", nullptr, "halten", "romdump",
"porta", "portb", nullptr, "upic", nullptr, nullptr, nullptr, nullptr,
"dira", "dirb", "bfun", nullptr, nullptr, nullptr, nullptr, nullptr,
"adcr1", "adcr2", "portd", "adcr3", nullptr, nullptr, nullptr, nullptr,
"ad0", "ad1", "ad2", "ad3", "ad4", "ad5", "ad6", "ad7",
"enu", "enui", "rbuf", "tbuf", "enur", nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
"t4", "r4", "t5", "r5", "t6", "r6", "t7", "r7",
"pwmode", "portp", nullptr, nullptr, nullptr, nullptr, "eicon", "eicr",
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
"i4cr", "i3cr", "i2cr", "r2", "t2", "r3", "t3", "divby",
"tmmode", "t0con", "watchdog", nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr
};
hpc_disassembler::hpc_disassembler(const char *const regs[])
: util::disasm_interface()
, m_regs(regs)
{
}
u32 hpc_disassembler::opcode_alignment() const
{
return 1;
}
void hpc_disassembler::format_register(std::ostream &stream, u16 reg) const
{
if (reg >= 0x00c0 && reg < 0x01c0)
{
const char *name = m_regs[(reg - 0x00c0) >> 1];
if (name != nullptr)
{
stream << name;
if (BIT(reg, 0))
stream << "+1";
return;
}
}
util::stream_format(stream, "0%X", reg);
}
void hpc_disassembler::format_immediate_byte(std::ostream &stream, u8 data) const
{
stream << "#";
if (data >= 0x10)
stream << "0";
util::stream_format(stream, "%02X", data);
}
void hpc_disassembler::format_immediate_word(std::ostream &stream, u16 data) const
{
stream << "#";
if (data >= 0x1000)
stream << "0";
util::stream_format(stream, "%04X", data);
}
void hpc_disassembler::disassemble_op(std::ostream &stream, const char *op, u16 reg, u16 src, bool imm, bool indir, bool idx, bool w) const
{
util::stream_format(stream, "%-8s", op);
if (idx)
stream << "A";
else
format_register(stream, reg);
stream << ",";
if (imm)
{
if (w)
format_immediate_word(stream, src);
else
format_immediate_byte(stream, src);
}
else
{
if (idx)
util::stream_format(stream, "0%X", u16(reg));
if (indir)
stream << "[";
format_register(stream, src);
if (indir)
stream << "]";
if (w)
stream << ".w";
else
stream << ".b";
}
}
void hpc_disassembler::disassemble_unary_op(std::ostream &stream, const char *op, u16 offset, u16 src, bool indir, bool idx, bool w) const
{
util::stream_format(stream, "%-8s", op);
if (idx)
util::stream_format(stream, "0%X", offset);
if (indir)
stream << "[";
format_register(stream, src);
if (indir)
stream << "]";
if (w)
{
if (src < 0x00c4 || src >= 0x00d0 || indir)
stream << ".w";
}
else
stream << ".b";
}
void hpc_disassembler::disassemble_bit_op(std::ostream &stream, const char *op, u8 bit, u16 offset, u16 src, bool indir, bool idx) const
{
if (src >= 0x00c0 && src < 0x01c0 && BIT(src, 0) && !indir && m_regs[(src - 0x00c0) >> 1] != nullptr)
{
src &= 0xfffe;
bit += 8;
}
util::stream_format(stream, "%-8s%d,", op, bit);
if (idx)
util::stream_format(stream, "0%X", offset);
if (indir)
stream << "[";
format_register(stream, src);
if (indir)
stream << "].b";
}
offs_t hpc_disassembler::disassemble(std::ostream &stream, offs_t pc, const hpc_disassembler::data_buffer &opcodes, const hpc_disassembler::data_buffer ¶ms)
{
u8 opcode = opcodes.r8(pc);
u16 reg = REGISTER_A;
u16 src = REGISTER_B;
bool imm = false;
bool dmode = false;
bool indir = true;
bool idx = false;
bool jmp = false;
offs_t bytes = 1;
switch (opcode)
{
case 0x20: case 0x21: case 0x22: case 0x23:
case 0x24: case 0x25: case 0x26: case 0x27:
case 0x28: case 0x29: case 0x2a: case 0x2b:
case 0x2c: case 0x2d: case 0x2e: case 0x2f:
jmp = true;
src = 0xffd0 + (opcode & 0x0f) * 2;
bytes = 1;
break;
case 0x30: case 0x31: case 0x32: case 0x33:
jmp = true;
src = pc + 2 + ((opcode & 0x03) << 8 | opcodes.r8(pc + 1));
bytes = 2;
break;
case 0x34: case 0x35: case 0x36: case 0x37:
jmp = true;
src = pc - ((opcode & 0x03) << 8 | opcodes.r8(pc + 1));
bytes = 2;
break;
case 0x38: case 0x39: case 0x3a:
reg = REGISTER_X;
break;
case 0x3f:
case 0x89: case 0x8a:
case 0xa9: case 0xaa: case 0xaf:
indir = false;
src = opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x40: case 0x41: case 0x42: case 0x43:
case 0x44: case 0x45: case 0x46: case 0x47:
case 0x48: case 0x49: case 0x4a: case 0x4b:
case 0x4c: case 0x4d: case 0x4e: case 0x4f:
case 0x50: case 0x51: case 0x52: case 0x53:
case 0x54: case 0x55: case 0x56: case 0x57:
case 0x58: case 0x59: case 0x5a: case 0x5b:
case 0x5c: case 0x5d: case 0x5e: case 0x5f:
jmp = true;
src = pc + 1 + (opcode & 0x1f);
break;
case 0x60: case 0x61: case 0x62: case 0x63:
case 0x64: case 0x65: case 0x66: case 0x67:
case 0x68: case 0x69: case 0x6a: case 0x6b:
case 0x6c: case 0x6d: case 0x6e: case 0x6f:
case 0x70: case 0x71: case 0x72: case 0x73:
case 0x74: case 0x75: case 0x76: case 0x77:
case 0x78: case 0x79: case 0x7a: case 0x7b:
case 0x7c: case 0x7d: case 0x7e: case 0x7f:
jmp = true;
src = pc - (opcode & 0x1f);
break;
case 0x80: case 0x82:
case 0xa0:
dmode = true;
indir = false;
if (BIT(opcode, 1))
imm = true;
src = opcodes.r8(pc + 1);
reg = opcodes.r8(pc + 2);
opcode = opcodes.r8(pc + 3);
bytes = 4;
break;
case 0x84: case 0x86:
case 0xa4:
dmode = true;
indir = false;
if (BIT(opcode, 1))
imm = true;
src = swapendian_int16(opcodes.r16(pc + 1));
reg = opcodes.r8(pc + 3);
opcode = opcodes.r8(pc + 4);
bytes = 5;
break;
case 0x81: case 0x83:
case 0xa1:
dmode = true;
indir = false;
if (BIT(opcode, 1))
imm = true;
src = opcodes.r8(pc + 1);
reg = swapendian_int16(opcodes.r16(pc + 2));
opcode = opcodes.r8(pc + 4);
bytes = 5;
break;
case 0xa2:
idx = true;
reg = opcodes.r8(pc + 1);
src = opcodes.r8(pc + 2);
opcode = opcodes.r8(pc + 3);
bytes = 4;
break;
case 0x85: case 0x87:
case 0xa5:
dmode = true;
indir = false;
if (BIT(opcode, 1))
imm = true;
src = swapendian_int16(opcodes.r16(pc + 1));
reg = swapendian_int16(opcodes.r16(pc + 3));
opcode = opcodes.r8(pc + 5);
bytes = 6;
break;
case 0xa6:
idx = true;
reg = swapendian_int16(opcodes.r16(pc + 1));
src = opcodes.r8(pc + 3);
opcode = opcodes.r8(pc + 4);
bytes = 5;
break;
case 0x88: case 0x8b: case 0x8e:
case 0xa8: case 0xab: case 0xae:
indir = false;
src = opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x8c:
case 0xac:
indir = false;
src = opcodes.r8(pc + 1);
reg = opcodes.r8(pc + 2);
bytes = 3;
break;
case 0x8d:
imm = true;
src = opcodes.r8(pc + 1);
reg = opcodes.r8(pc + 2);
bytes = 3;
break;
case 0x8f:
src = REGISTER_X;
opcode = opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x90: case 0x91: case 0x92: case 0x93:
imm = true;
reg = 0x00c8 | ((opcode & 0x03) << 1);
src = opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x98: case 0x99: case 0x9a: case 0x9b:
case 0x9c: case 0x9d: case 0x9e: case 0x9f:
imm = true;
src = opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x94:
jmp = true;
src = pc + 2 + opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x95:
jmp = true;
src = pc - opcodes.r8(pc + 1);
bytes = 2;
break;
case 0x96:
indir = false;
src = opcodes.r8(pc + 1);
opcode = opcodes.r8(pc + 2);
bytes = 3;
break;
case 0x97:
indir = false;
imm = true;
src = opcodes.r8(pc + 1);
reg = opcodes.r8(pc + 2);
bytes = 3;
break;
case 0xa7:
imm = true;
src = swapendian_int16(opcodes.r16(pc + 1));
reg = swapendian_int16(opcodes.r16(pc + 3));
bytes = 5;
break;
case 0xad:
src = opcodes.r8(pc + 1);
opcode = opcodes.r8(pc + 2);
bytes = 3;
break;
case 0xb0: case 0xb1: case 0xb2: case 0xb3:
imm = true;
reg = 0x00c8 | ((opcode & 0x03) << 1);
src = swapendian_int16(opcodes.r16(pc + 1));
bytes = 3;
break;
case 0xb8: case 0xb9: case 0xba: case 0xbb:
case 0xbc: case 0xbd: case 0xbe: case 0xbf:
imm = true;
src = swapendian_int16(opcodes.r16(pc + 1));
bytes = 3;
break;
case 0xb4: case 0xb5:
jmp = true;
src = pc + 3 + swapendian_int16(opcodes.r16(pc + 1));
bytes = 3;
break;
case 0xb6:
indir = false;
src = swapendian_int16(opcodes.r16(pc + 1));
opcode = opcodes.r8(pc + 3);
bytes = 4;
break;
case 0xb7:
imm = true;
src = swapendian_int16(opcodes.r16(pc + 1));
reg = opcodes.r8(pc + 3);
bytes = 4;
break;
case 0xd4: case 0xd5: case 0xd6:
case 0xf4: case 0xf5: case 0xf6:
src = REGISTER_X;
break;
}
switch (opcode)
{
case 0x00:
util::stream_format(stream, "%-8sA", "clr");
break;
case 0x01:
util::stream_format(stream, "%-8sA", "comp");
break;
case 0x02:
stream << "sc";
break;
case 0x03:
stream << "rc";
break;
case 0x04:
util::stream_format(stream, "%-8sA", "inc");
break;
case 0x05:
util::stream_format(stream, "%-8sA", "dec");
break;
case 0x06:
stream << "ifnc";
break;
case 0x07:
stream << "ifc";
break;
case 0x08: case 0x09: case 0x0a: case 0x0b:
case 0x0c: case 0x0d: case 0x0e: case 0x0f:
disassemble_bit_op(stream, "sbit", opcode & 0x07, reg, src, indir, idx);
break;
case 0x10: case 0x11: case 0x12: case 0x13:
case 0x14: case 0x15: case 0x16: case 0x17:
disassemble_bit_op(stream, "ifbit", opcode & 0x07, reg, src, indir, idx);
break;
case 0x18: case 0x19: case 0x1a: case 0x1b:
case 0x1c: case 0x1d: case 0x1e: case 0x1f:
disassemble_bit_op(stream, "rbit", opcode & 0x07, reg, src, indir, idx);
break;
case 0x20: case 0x21: case 0x22: case 0x23:
case 0x24: case 0x25: case 0x26: case 0x27:
case 0x28: case 0x29: case 0x2a: case 0x2b:
case 0x2c: case 0x2d: case 0x2e: case 0x2f:
util::stream_format(stream, "%-8s", "jsrp");
if (jmp)
util::stream_format(stream, "[0%X]", src);
else
stream << "???";
bytes |= STEP_OVER;
break;
case 0x30: case 0x31: case 0x32: case 0x33:
case 0x34: case 0x35: case 0x36: case 0x37:
util::stream_format(stream, "%-8s", "jsr");
if (jmp)
util::stream_format(stream, "0%X", src);
else
stream << "???";
bytes |= STEP_OVER;
break;
case 0x38:
disassemble_op(stream, "rbit", reg, src, imm, indir, idx, false);
break;
case 0x39:
disassemble_op(stream, "sbit", reg, src, imm, indir, idx, false);
break;
case 0x3a:
disassemble_op(stream, "ifbit", reg, src, imm, indir, idx, false);
break;
case 0x3b:
util::stream_format(stream, "%-8sA", "swap");
break;
case 0x3c:
stream << "ret";
bytes |= STEP_OUT;
break;
case 0x3d:
stream << "retsk";
bytes |= STEP_OUT;
break;
case 0x3e:
stream << "reti";
bytes |= STEP_OUT;
break;
case 0x3f:
util::stream_format(stream, "%-8s", "pop");
format_register(stream, src);
break;
case 0x40:
stream << "nop";
break;
case 0x41: case 0x42: case 0x43:
case 0x44: case 0x45: case 0x46: case 0x47:
case 0x48: case 0x49: case 0x4a: case 0x4b:
case 0x4c: case 0x4d: case 0x4e: case 0x4f:
case 0x50: case 0x51: case 0x52: case 0x53:
case 0x54: case 0x55: case 0x56: case 0x57:
case 0x58: case 0x59: case 0x5a: case 0x5b:
case 0x5c: case 0x5d: case 0x5e: case 0x5f:
case 0x60: case 0x61: case 0x62: case 0x63:
case 0x64: case 0x65: case 0x66: case 0x67:
case 0x68: case 0x69: case 0x6a: case 0x6b:
case 0x6c: case 0x6d: case 0x6e: case 0x6f:
case 0x70: case 0x71: case 0x72: case 0x73:
case 0x74: case 0x75: case 0x76: case 0x77:
case 0x78: case 0x79: case 0x7a: case 0x7b:
case 0x7c: case 0x7d: case 0x7e: case 0x7f:
util::stream_format(stream, "%-8s", "jp");
if (jmp)
util::stream_format(stream, "0%X", src);
else
stream << "???";
break;
case 0x88: case 0x8c:
case 0x90: case 0x91: case 0x92: case 0x93:
case 0x97:
case 0xa8: case 0xac:
case 0xb0: case 0xb1: case 0xb2: case 0xb3:
case 0xb7:
case 0xc4:
case 0xd4:
case 0xe4:
case 0xf4:
disassemble_op(stream, "ld", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x89:
case 0xa9:
disassemble_unary_op(stream, "inc", reg, src, indir, idx, BIT(opcode, 5));
break;
case 0x8a:
case 0xaa:
disassemble_unary_op(stream, "decsz", reg, src, indir, idx, BIT(opcode, 5));
bytes |= STEP_OVER | (1 << OVERINSTSHIFT);
break;
case 0x8b:
case 0xab:
case 0xc6:
case 0xd6:
case 0xe6:
case 0xf6:
disassemble_op(stream, dmode ? "ld" : "st", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x8d:
util::stream_format(stream, "%-8sBK,", "ld");
format_immediate_byte(stream, src);
stream << ",";
format_immediate_byte(stream, reg);
break;
case 0x8e:
case 0xae:
case 0xc5:
case 0xd5:
case 0xe5:
case 0xf5:
disassemble_op(stream, "x", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x94: case 0x95:
util::stream_format(stream, "%-8s", "jp");
if (jmp)
util::stream_format(stream, "0%X", src);
else
stream << "???";
break;
case 0x98: case 0xb8: case 0xd8: case 0xf8:
disassemble_op(stream, "add", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x99: case 0xb9: case 0xd9: case 0xf9:
disassemble_op(stream, "and", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9a: case 0xba: case 0xda: case 0xfa:
disassemble_op(stream, "or", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9b: case 0xbb: case 0xdb: case 0xfb:
disassemble_op(stream, "xor", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9c: case 0xbc: case 0xdc: case 0xfc:
disassemble_op(stream, "ifeq", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9d: case 0xbd: case 0xdd: case 0xfd:
disassemble_op(stream, "ifgt", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9e: case 0xbe: case 0xde: case 0xfe:
disassemble_op(stream, "mult", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0x9f: case 0xbf: case 0xdf: case 0xff:
disassemble_op(stream, "div", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0xa7:
util::stream_format(stream, "%-8sBK,", "ld");
format_immediate_word(stream, src);
stream << ",";
format_immediate_word(stream, reg);
break;
case 0xaf:
util::stream_format(stream, "%-8s", "push");
format_register(stream, src);
break;
case 0xb4:
util::stream_format(stream, "%-8s", "jmpl");
if (jmp)
util::stream_format(stream, "0%X", src);
else
stream << "???";
break;
case 0xb5:
util::stream_format(stream, "%-8s", "jsrl");
if (jmp)
util::stream_format(stream, "0%X", src);
else
stream << "???";
bytes |= STEP_OVER;
break;
case 0xc0: case 0xc2:
case 0xe0: case 0xe2:
util::stream_format(stream, "%-8sA,[B%c].%c", "lds",
BIT(opcode, 1) ? '-' : '+',
BIT(opcode, 5) ? 'w' : 'b');
bytes |= STEP_OVER | (1 << OVERINSTSHIFT);
break;
case 0xd0: case 0xd2:
case 0xf0: case 0xf2:
util::stream_format(stream, "%-8sA,[X%c].%c", "ld",
BIT(opcode, 1) ? '-' : '+',
BIT(opcode, 5) ? 'w' : 'b');
break;
case 0xc1: case 0xc3:
case 0xe1: case 0xe3:
util::stream_format(stream, "%-8sA,[B%c].%c", "xs",
BIT(opcode, 1) ? '-' : '+',
BIT(opcode, 5) ? 'w' : 'b');
bytes |= STEP_OVER | (1 << OVERINSTSHIFT);
break;
case 0xd1: case 0xd3:
case 0xf1: case 0xf3:
util::stream_format(stream, "%-8sA,[X%c].%c", "x",
BIT(opcode, 1) ? '-' : '+',
BIT(opcode, 5) ? 'w' : 'b');
break;
case 0xc7: case 0xe7:
util::stream_format(stream, "%-8sA", BIT(opcode, 5) ? "shl" : "shr");
break;
case 0xd7: case 0xf7:
util::stream_format(stream, "%-8sA", BIT(opcode, 5) ? "rlc" : "rrc");
break;
case 0xc8: case 0xe8:
disassemble_op(stream, "adc", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0xc9: case 0xe9:
disassemble_op(stream, "dadc", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0xca: case 0xea:
disassemble_op(stream, "dsubc", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0xcb: case 0xeb:
disassemble_op(stream, "subc", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
case 0xcc: case 0xec:
stream << "jid";
if (BIT(opcode, 5))
stream << "w";
break;
case 0xcf: case 0xef:
disassemble_op(stream, "divd", reg, src, imm, indir, idx, BIT(opcode, 5));
break;
default:
stream << "???";
break;
}
return bytes | SUPPORTED;
}
|
matthiasblaesing/COMTypelibraries | vbide5/src/main/java/eu/doppel_helix/jna/tlb/vbide5/VBComponent.java |
package eu.doppel_helix.jna.tlb.vbide5;
import com.sun.jna.platform.win32.COM.COMException;
import com.sun.jna.platform.win32.COM.util.IComEventCallbackCookie;
import com.sun.jna.platform.win32.COM.util.IComEventCallbackListener;
import com.sun.jna.platform.win32.COM.util.IConnectionPoint;
import com.sun.jna.platform.win32.COM.util.IUnknown;
import com.sun.jna.platform.win32.COM.util.annotation.ComObject;
import com.sun.jna.platform.win32.COM.util.IRawDispatchHandle;
/**
* <p>uuid({BE39F3DA-1B13-11D0-887F-00A0C90F2744})</p>
* <p>interface(_VBComponent)</p>
*/
@ComObject(clsId = "{BE39F3DA-1B13-11D0-887F-00A0C90F2744}")
public interface VBComponent extends IUnknown
,_VBComponent
{
} |
Webaholicson/automall | spec/controllers/attributes_controller_spec.rb | <filename>spec/controllers/attributes_controller_spec.rb
require 'rails_helper'
RSpec.describe AttributesController, type: :controller do
end
|
eiroca/freej2me | src/jme-api/src/main/java/javax/microedition/io/Datagram.java | /**
* This file is part of FreeJ2ME.
*
* FreeJ2ME is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* FreeJ2ME is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with FreeJ2ME. If not,
* see http://www.gnu.org/licenses/
*
*/
package javax.microedition.io;
import java.io.DataInput;
import java.io.DataOutput;
public interface Datagram extends DataInput, DataOutput {
public String getAddress();
public byte[] getData();
public int getLength();
public int getOffset();
public void reset();
public void setAddress(Datagram reference);
public void setAddress(String addr);
public void setData(byte[] buffer, int offset, int len);
public void setLength(int len);
}
|
acgist/muses | boot-parent/boot-test/src/test/java/com/acgist/nosql/neo4j/PersonRelationship.java | <reponame>acgist/muses
package com.acgist.nosql.neo4j;
import org.springframework.data.neo4j.core.schema.RelationshipProperties;
import com.acgist.model.neo4j.BootRelationship;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@EqualsAndHashCode(callSuper = true)
@RelationshipProperties
public class PersonRelationship extends BootRelationship<PersonNode> {
private static final long serialVersionUID = 1L;
private String name;
}
|
josephwinston/hana | test/sandbox/repeat.cpp | <reponame>josephwinston/hana<filename>test/sandbox/repeat.cpp
/*
@copyright <NAME> 2015
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
*/
#include <boost/hana/assert.hpp>
#include <boost/hana/bool.hpp>
#include <boost/hana/core/models.hpp>
#include <boost/hana/functional.hpp>
#include <boost/hana/integral_constant.hpp>
#include <boost/hana/iterable.hpp>
#include <boost/hana/lazy.hpp>
#include <boost/hana/monad_plus.hpp>
#include <boost/hana/sequence.hpp>
#include <boost/hana/tuple.hpp>
using namespace boost::hana;
struct LazyList;
template <typename X, typename Xs>
struct lazy_cons_type {
X x;
Xs xs;
struct hana { using datatype = LazyList; };
};
auto lazy_cons = [](auto x, auto xs) {
return lazy_cons_type<decltype(x), decltype(xs)>{x, xs};
};
struct lazy_nil_type { struct hana { using datatype = LazyList; }; };
constexpr lazy_nil_type lazy_nil{};
auto repeat_ = fix([](auto repeat, auto x) {
return lazy_cons(x, lazy(repeat)(x));
});
namespace boost { namespace hana {
//////////////////////////////////////////////////////////////////////////
// Iterable
//////////////////////////////////////////////////////////////////////////
template <>
struct head_impl<LazyList> {
template <typename Xs>
static constexpr auto apply(Xs lcons)
{ return lcons.x; }
};
template <>
struct tail_impl<LazyList> {
template <typename Xs>
static constexpr auto apply(Xs lcons)
{ return eval(lcons.xs); }
};
template <>
struct is_empty_impl<LazyList> {
template <typename Xs>
static constexpr auto apply(Xs lcons)
{ return false_; }
static constexpr auto apply(lazy_nil_type)
{ return true_; }
};
//////////////////////////////////////////////////////////////////////////
// MonadPlus
//////////////////////////////////////////////////////////////////////////
template <>
struct prepend_impl<LazyList> {
template <typename X, typename Xs>
static constexpr auto apply(X x, Xs xs)
{ return lazy_cons(x, lazy(xs)); }
};
template <>
struct empty_impl<LazyList> {
static constexpr auto apply()
{ return lazy_nil; }
};
//////////////////////////////////////////////////////////////////////////
// Sequence
//////////////////////////////////////////////////////////////////////////
template <>
struct models_impl<Sequence, LazyList>
: decltype(true_)
{ };
}}
int main() {
BOOST_HANA_CONSTANT_CHECK(!is_empty(repeat_(1)));
BOOST_HANA_CONSTEXPR_CHECK(head(repeat_(1)) == 1);
BOOST_HANA_CONSTEXPR_CHECK(at(size_t<10>, repeat_(1)) == 1);
BOOST_HANA_CONSTEXPR_CHECK(take(size_t<2>, repeat_('x')) == make<Tuple>('x', 'x'));
}
|
arsenm/rocPRIM | test/rocprim/test_hc_tuple.cpp | <filename>test/rocprim/test_hc_tuple.cpp
// MIT License
//
// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#include <iostream>
#include <type_traits>
#include <iterator>
#include <tuple>
// Google Test
#include <gtest/gtest.h>
// rocPRIM API
#include <rocprim/rocprim.hpp>
#include "test_utils.hpp"
template<class T>
struct non_trivial
{
non_trivial() { x = 1; }
non_trivial(T x) : x(x) {}
~non_trivial() {}
non_trivial& operator=(const non_trivial& other)
{
x = other.x;
return *this;
}
non_trivial operator+(const non_trivial& other) const
{
return non_trivial(x + other.x);
}
non_trivial operator-(const non_trivial& other) const
{
return non_trivial(x - other.x);
}
bool operator==(const non_trivial& other) const
{
return (x == other.x);
}
bool operator!=(const non_trivial& other) const
{
return !(*this == other);
}
bool operator==(const T& value) const
{
return (x == value);
}
bool operator!=(const T& value) const
{
return !(*this == value);
}
T x;
};
using non_trivial_int = non_trivial<int>;
TEST(RocprimTupleTests, IntegerSequence)
{
ASSERT_TRUE((
std::is_same<
rocprim::make_index_sequence<4>,
rocprim::index_sequence<0, 1, 2, 3>
>::value
));
ASSERT_TRUE((
!std::is_same<
rocprim::make_index_sequence<4>,
rocprim::index_sequence<0, 1, 2, 3, 4>
>::value
));
ASSERT_TRUE((
!std::is_same<
rocprim::make_index_sequence<3>,
rocprim::integer_sequence<char, 0, 1, 2>
>::value
));
ASSERT_TRUE((
std::is_same<
rocprim::make_integer_sequence<char, 2>,
rocprim::integer_sequence<char, 0, 1>
>::value
));
}
TEST(RocprimTupleTests, TupleSize)
{
using t3 = rocprim::tuple<char, int, double>;
ASSERT_EQ(rocprim::tuple_size<t3>::value, 3);
using t1 = rocprim::tuple<char>;
ASSERT_EQ(rocprim::tuple_size<t1>::value, 1);
using t4 = rocprim::tuple<char, int, double, int>;
ASSERT_EQ(rocprim::tuple_size<t4>::value, 4);
using t0 = rocprim::tuple<>;
ASSERT_EQ(rocprim::tuple_size<t0>::value, 0);
}
TEST(RocprimTupleTests, TupleElement)
{
using t0 = rocprim::tuple<char, int, double>;
ASSERT_TRUE((
std::is_same<
rocprim::tuple_element<0, t0>::type,
char
>::value
));
ASSERT_TRUE((
std::is_same<
rocprim::tuple_element<1, t0>::type,
int
>::value
));
ASSERT_TRUE((
std::is_same<
rocprim::tuple_element_t<2, t0>,
double
>::value
));
}
TEST(RocprimTupleTests, TupleConstructors)
{
rocprim::tuple<> t1; (void) t1;
rocprim::tuple<int, char, double>t2(1, 2, 3); (void) t2;
rocprim::tuple<int, int, int> t3(4, 5, 6); (void) t3;
auto t4(t3); (void) t4;
rocprim::tuple<int, char, double> t5(2, 4, 1U); (void) t5;
rocprim::tuple<double, float> t6(5, 2); (void) t6;
rocprim::tuple<double, float> t7; (void) t7;
const int i = 5;
rocprim::tuple<double, float> t8(i, 2); (void) t8;
rocprim::tuple<double, float> t9(i, i); (void) t9;
rocprim::tuple<int, int> t10(i, i); (void) t10;
const rocprim::tuple<float, unsigned int> ct1(1.0f, 2U);
rocprim::tuple<double, unsigned int> ct2(ct1);
rocprim::tuple<non_trivial_int> nt_ct1; (void) nt_ct1;
rocprim::tuple<non_trivial_int> nt_ct2(non_trivial_int(1)); (void) nt_ct2;
}
struct empty1
{
};
struct empty2
{
};
TEST(RocprimTupleTests, TupleConstructorsEBO)
{
rocprim::tuple<empty1> t1; (void) t1;
rocprim::tuple<empty1, int> t2(empty1(), 2.0); (void) t2;
rocprim::tuple<empty1, empty2> t3; (void) t3;
rocprim::tuple<empty1, int, empty2> t4(empty1(), 2, empty2()); (void) t4;
auto t5(t4); (void) t5;
}
TEST(RocprimTupleTests, TupleGetIndex)
{
rocprim::tuple<int, char, double> t1(1, 2, 3U);
rocprim::tuple<double, float, int> t2(4, 5, 6);
// Read value
ASSERT_EQ(rocprim::get<0>(t1), 1);
ASSERT_EQ(rocprim::get<1>(t1), char(2));
ASSERT_EQ(rocprim::get<2>(t1), 3);
ASSERT_EQ(rocprim::get<0>(t2), 4);
ASSERT_EQ(rocprim::get<1>(t2), 5);
ASSERT_EQ(rocprim::get<2>(t2), 6);
// Write value
rocprim::get<0>(t2) = 7;
rocprim::get<1>(t2) = 8;
rocprim::get<2>(t2) = 9;
ASSERT_EQ(rocprim::get<0>(t2), 7);
ASSERT_EQ(rocprim::get<1>(t2), 8);
ASSERT_EQ(rocprim::get<2>(t2), 9);
// rvalue
using type = rocprim::tuple<int, double>;
ASSERT_EQ(rocprim::get<0>(type()), 0);
ASSERT_EQ(rocprim::get<1>(type(1, 2)), 2);
// non-trivial
rocprim::tuple<non_trivial_int, non_trivial_int> nt1(
non_trivial_int(1), non_trivial_int(2)
);
ASSERT_EQ(rocprim::get<0>(nt1), 1);
ASSERT_EQ(rocprim::get<1>(nt1), 2);
}
TEST(RocprimTupleTests, TupleAssignOperator)
{
rocprim::tuple<int, char, double> t1(2, 4, 2U);
rocprim::tuple<int, char, double> t2(3, 5, 1U);
rocprim::tuple<double, float, int> t3(1, 2, 3);
ASSERT_EQ(rocprim::get<0>(t2), 3);
ASSERT_EQ(rocprim::get<1>(t2), char(5));
ASSERT_EQ(rocprim::get<2>(t2), 1);
ASSERT_EQ(rocprim::get<0>(t3), 1);
ASSERT_EQ(rocprim::get<1>(t3), 2);
ASSERT_EQ(rocprim::get<2>(t3), 3);
// Same tuple types
auto t4 = t2;
ASSERT_EQ(rocprim::get<0>(t4), 3);
ASSERT_EQ(rocprim::get<1>(t4), char(5));
ASSERT_EQ(rocprim::get<2>(t4), 1);
t2 = t1;
ASSERT_EQ(rocprim::get<0>(t2), 2);
ASSERT_EQ(rocprim::get<1>(t2), char(4));
ASSERT_EQ(rocprim::get<2>(t2), 2);
// Different tuple types (same number of elements)
t3 = t2;
ASSERT_EQ(rocprim::get<0>(t3), 2);
ASSERT_EQ(rocprim::get<1>(t3), 4);
ASSERT_EQ(rocprim::get<2>(t3), 2);
// Move (same tuple types)
t3 = decltype(t3)();
ASSERT_EQ(rocprim::get<0>(t3), 0);
ASSERT_EQ(rocprim::get<1>(t3), 0);
ASSERT_EQ(rocprim::get<2>(t3), 0);
// Move (different types)
t3 = decltype(t2)(1, 1, 1);
ASSERT_EQ(rocprim::get<0>(t3), 1);
ASSERT_EQ(rocprim::get<1>(t3), 1);
ASSERT_EQ(rocprim::get<2>(t3), 1);
// Empty tuple
rocprim::tuple<> t6;
rocprim::tuple<> t7;
t6 = decltype(t7)();
t7 = t6;
rocprim::tuple<int, int> t10(1, 2);
const rocprim::tuple<int, int>& t10_ref = t10;
rocprim::tuple<int, int> t11(10, 20);
t11 = t10_ref;
ASSERT_EQ(rocprim::get<0>(t11), 1);
ASSERT_EQ(rocprim::get<1>(t11), 2);
rocprim::tuple<int, int> t12(11, 12);
t12 = std::move(t10);
ASSERT_EQ(rocprim::get<0>(t12), 1);
ASSERT_EQ(rocprim::get<1>(t12), 2);
rocprim::tuple<float, double> t13(11, 12);
t13 = std::move(t10);
ASSERT_EQ(rocprim::get<0>(t13), float(1));
ASSERT_EQ(rocprim::get<1>(t13), double(2));
// non-trivial
rocprim::tuple<non_trivial_int, non_trivial_int> nt1(
non_trivial_int(1), non_trivial_int(2)
);
rocprim::tuple<non_trivial_int, non_trivial_int> nt2;
nt2 = nt1;
ASSERT_EQ(rocprim::get<0>(nt2), 1);
ASSERT_EQ(rocprim::get<1>(nt2), 2);
}
TEST(RocprimTupleTests, TupleComparisonOperators)
{
rocprim::tuple<int, char, double> t1(2, 4, 2);
rocprim::tuple<int, char, double> t2(3, 5, 1);
rocprim::tuple<double, float, char> t3(3, 5, 1);
rocprim::tuple<double, float, char> t4(2, 4, 2);
// ==
ASSERT_EQ(t1, t1);
ASSERT_EQ(t2, t2);
ASSERT_EQ(t3, t3);
ASSERT_EQ(t2, t3);
ASSERT_EQ(t1, t4);
ASSERT_EQ(rocprim::tuple<>(), rocprim::tuple<>());
// !=
ASSERT_NE(t1, t2);
ASSERT_NE(t3, t4);
ASSERT_NE(t1, t3);
ASSERT_NE(t2, t4);
rocprim::tuple<float, float, float> t5(1, 2, 3);
rocprim::tuple<float, float, float> t6(1, 2, 4);
rocprim::tuple<int, int, int> t7(1, 4, 2);
rocprim::tuple<int, int, int> t8(1, 4, 2);
// <
ASSERT_LT(t5, t6);
ASSERT_LT(t5, t7);
// <=
ASSERT_LE(t5, t6);
ASSERT_LE(t5, t7);
ASSERT_LE(t5, t5);
ASSERT_LE(t6, t6);
ASSERT_LE(t7, t8);
// >=
ASSERT_GE(t5, t5);
ASSERT_GE(t6, t6);
ASSERT_GE(t7, t8);
ASSERT_GE(t6, t5);
ASSERT_GE(t7, t5);
// >
ASSERT_GT(t6, t5);
ASSERT_GT(t7, t5);
}
TEST(RocprimTupleTests, TupleSwap)
{
rocprim::tuple<int, char, double> t1(1, 2, 3);
rocprim::tuple<int, char, double> t2(4, 5, 6);
t1.swap(t2);
ASSERT_EQ(rocprim::get<0>(t1), 4);
ASSERT_EQ(rocprim::get<1>(t1), 5);
ASSERT_EQ(rocprim::get<2>(t1), 6);
ASSERT_EQ(rocprim::get<0>(t2), 1);
ASSERT_EQ(rocprim::get<1>(t2), 2);
ASSERT_EQ(rocprim::get<2>(t2), 3);
rocprim::swap(t1, t2);
ASSERT_EQ(rocprim::get<0>(t1), 1);
ASSERT_EQ(rocprim::get<1>(t1), 2);
ASSERT_EQ(rocprim::get<2>(t1), 3);
ASSERT_EQ(rocprim::get<0>(t2), 4);
ASSERT_EQ(rocprim::get<1>(t2), 5);
ASSERT_EQ(rocprim::get<2>(t2), 6);
}
TEST(RocprimTupleTests, TupleMakeTuple)
{
rocprim::tuple<int, double> t1(1, 2);
auto t2 = rocprim::make_tuple(1, 2.0);
ASSERT_EQ(t1, t2);
ASSERT_TRUE((
std::is_same<
rocprim::tuple<int, double>,
decltype(t2)
>::value
));
ASSERT_TRUE((
std::is_same<
rocprim::tuple<int, int, double>,
decltype(rocprim::make_tuple(1, 6, 1.0))
>::value
));
}
TEST(RocprimTupleTests, TupleMakeTie)
{
int a = 0;
double b = 0.0;
rocprim::tuple<int, double> t1(1, 2);
ASSERT_EQ(rocprim::get<0>(t1), 1);
ASSERT_EQ(rocprim::get<1>(t1), 2);
rocprim::tie(a, b) = t1;
ASSERT_EQ(a, 1);
ASSERT_EQ(b, 2);
rocprim::tie(rocprim::ignore, a) = t1;
ASSERT_EQ(a, 2);
ASSERT_EQ(b, 2);
}
TEST(RocprimTupleTests, Conversions)
{
ASSERT_EQ(
(std::is_convertible<
std::tuple<int&, int&>,
std::tuple<int, int>
>::value),
(std::is_convertible<
rocprim::tuple<int&, int&>,
rocprim::tuple<int, int>
>::value)
);
using T1R = std::iterator_traits<std::vector<int>::iterator>::reference;
using T1V = std::iterator_traits<std::vector<int>::iterator>::value_type;
ASSERT_EQ(
(std::is_convertible<
std::tuple<T1R, T1R>,
std::tuple<T1V, T1V>
>::value),
(std::is_convertible<
rocprim::tuple<T1R, T1R>,
rocprim::tuple<T1V, T1V>
>::value)
);
int x = 1;
std::tuple<int&, int&> tx = std::tie(x, x);
std::tuple<int, int> ux(tx); (void) ux;
rocprim::tuple<int, int> rtx = rocprim::tie(x, x);
rocprim::tuple<int, int> rux(rtx); (void) rux;
} |
dwilt/the-resistance | react-redux-generator/reducer/selectors.js | <reponame>dwilt/the-resistance<gh_stars>1-10
const {
createFile,
getImportStatement,
capitalizeFirstChar,
getFileNames,
getExportAllString,
} = require(`../helpers.js`);
const {
reducerName,
initialStateObject,
selectorsFolderPath,
simpleReducer
} = require(`./vars`);
const selectorFilename = `${reducerName}.selectors.js`;
const getReselectImport = () => {
return getImportStatement([`createSelector`], `reselect`);
};
const getReducerSelector = () => {
return `const ${reducerName}Selector = state => state.${reducerName};\n\n`;
};
const getPropSelectors = () => {
return Object.keys(initialStateObject).reduce((currentString, prop) => {
return currentString + `export const ${reducerName}${capitalizeFirstChar(prop)}Selector = createSelector(\n ${reducerName}Selector,\n ${reducerName} => ${reducerName}.${prop}\n);\n\n`;
}, ``);
};
const writeIndexFile = async () => {
const files = await getFileNames(selectorsFolderPath);
const selectors = files.filter(file => file !== `index.js` && file !== `package.json`);
const content = selectors.reduce((current, file) => {
return current + `${getExportAllString(file.split(`.js`)[0])}\n`;
}, ``);
return createFile(selectorsFolderPath, `index.js`, content);
};
const createSelectorsFile = () => {
let content = `${getReducerSelector()}`;
if (!simpleReducer) {
content = `${getReselectImport()}${content}${getPropSelectors()}`;
}
return createFile(selectorsFolderPath, selectorFilename, content);
};
const createSelector = async () => {
await createSelectorsFile();
await writeIndexFile();
};
module.exports = createSelector;
|
neuralm/Neuralm-Java-Client | src/main/java/net/neuralm/client/messages/requests/DisableTrainingRoomRequest.java | package net.neuralm.client.messages.requests;
public class DisableTrainingRoomRequest extends Request {
public final int trainingRoomId;
public final int userId;
public DisableTrainingRoomRequest(int trainingRoomId, int userId) {
this.trainingRoomId = trainingRoomId;
this.userId = userId;
}
}
|
Beiden/Intkr_SAAS_BEIDEN | com/intkr/saas/module/screen/admin/item/dialog/ItemPropertySelect.java | <filename>com/intkr/saas/module/screen/admin/item/dialog/ItemPropertySelect.java<gh_stars>0
package com.intkr.saas.module.screen.admin.item.dialog;
import com.intkr.saas.module.screen.admin.item.ItemPropertyMgr;
/**
*
* @author Beiden
* @date 2016-6-18 下午10:17:54
* @version 1.0
*/
public class ItemPropertySelect extends ItemPropertyMgr {
}
|
AppSecAI-TEST/lightfish | multilight/lightfish-st/src/test/java/org/lightfish/business/MessageEndpoint.java | /*
*
*/
package org.lightfish.business;
import java.util.concurrent.CountDownLatch;
import javax.websocket.Endpoint;
import javax.websocket.EndpointConfig;
import javax.websocket.MessageHandler;
import javax.websocket.Session;
/**
*
* @author adam-bien.com
*/
public class MessageEndpoint extends Endpoint {
private String message;
private CountDownLatch latch;
public MessageEndpoint(CountDownLatch latch) {
this.latch = latch;
}
@Override
public void onOpen(Session session, EndpointConfig ec) {
System.out.println("Opening session: " + session);
session.addMessageHandler(new MessageHandler.Whole<String>() {
public void onMessage(String message) {
latch.countDown();
setMessage(message);
}
});
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
|
colin-zhou/mrfs | linux/access/acc.c | #include <stdio.h>
#include <unistd.h>
int main()
{
if(access("./file.test", F_OK) != -1) {
printf("relative find success\n");
} else {
printf("relative path can't find\n");
}
if(access("/home/colin/Git/reserve/linux_api/access/file.test", F_OK) != -1) {
printf("ablsolute file exist");
} else {
printf("absolute file not exist");
}
int ret = rename("./file.test", "/home/colin/mytest.test");
if (ret != 0) {
printf("rename file success\n");
}
return 0;
}
|
EtashGuha/Etude | public/WebViewer/lib/ui/src/components/AnnotationPopup/AnnotationPopup.js | <gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import ActionButton from 'components/ActionButton';
import AnnotationStylePopup from 'components/AnnotationStylePopup';
import core from 'core';
import { getAnnotationPopupPositionBasedOn } from 'helpers/getPopupPosition';
import getAnnotationStyle from 'helpers/getAnnotationStyle';
import getClassName from 'helpers/getClassName';
import actions from 'actions';
import selectors from 'selectors';
import './AnnotationPopup.scss';
class AnnotationPopup extends React.PureComponent {
static propTypes = {
isNotesPanelDisabled: PropTypes.bool,
isDisabled: PropTypes.bool,
isOpen: PropTypes.bool,
isLeftPanelOpen: PropTypes.bool,
isRightPanelOpen: PropTypes.bool,
isAnnotationStylePopupDisabled: PropTypes.bool,
openElement: PropTypes.func.isRequired,
closeElement: PropTypes.func.isRequired,
setIsNoteEditing: PropTypes.func.isRequired,
setActiveLeftPanel: PropTypes.func.isRequired
}
constructor() {
super();
this.popup = React.createRef();
this.initialState = {
annotation: {},
left: 0,
top: 0,
canModify: false,
isStylePopupOpen: false,
isMouseLeftDown: false,
};
this.state = this.initialState;
}
componentDidMount() {
core.addEventListener('mouseLeftUp', this.onMouseLeftUp);
core.addEventListener('mouseLeftDown', this.onMouseLeftDown);
core.addEventListener('annotationSelected', this.onAnnotationSelected);
core.addEventListener('annotationChanged', this.onAnnotationChanged);
core.addEventListener('updateAnnotationPermission', this.onUpdateAnnotationPermission);
core.addEventListener('documentUnloaded', this.onDocumentUnloaded);
window.addEventListener('resize', this.handleWindowResize);
}
componentDidUpdate(prevProps, prevState) {
const { isMouseLeftDown } = this.state;
const isAnnotationSelected = Object.keys(this.state.annotation).length !== 0;
const isClosingAnnotationPopup = this.props.isOpen === false && this.props.isOpen !== prevProps.isOpen;
const isStylePopupOpen = !prevState.isStylePopupOpen && this.state.isStylePopupOpen;
const isContainerShifted = prevProps.isLeftPanelOpen !== this.props.isLeftPanelOpen || prevProps.isRightPanelOpen !== this.props.isRightPanelOpen;
if (isAnnotationSelected && !isMouseLeftDown && !isContainerShifted && !isClosingAnnotationPopup && !this.props.isDisabled || isStylePopupOpen) {
this.positionAnnotationPopup();
this.props.openElement('annotationPopup');
}
if (isContainerShifted) { //closing because we can't correctly reposition the popup on panel transition
this.props.closeElement('annotationPopup');
}
}
componentWillUnmount() {
core.removeEventListener('mouseLeftUp', this.onMouseLeftUp);
core.removeEventListener('mouseLeftDown', this.onMouseLeftDown);
core.removeEventListener('annotationSelected', this.onAnnotationSelected);
core.removeEventListener('annotationChanged', this.onAnnotationChanged);
core.removeEventListener('updateAnnotationPermission', this.onUpdateAnnotationPermission);
core.removeEventListener('documentUnloaded', this.onDocumentUnloaded);
window.removeEventListener('resize', this.handleWindowResize);
}
close = () => {
this.props.closeElement('annotationPopup');
this.setState({ ...this.initialState });
}
onMouseLeftUp = () => {
this.setState({ isMouseLeftDown:false });
}
onMouseLeftDown = () => {
this.setState({ isMouseLeftDown:true });
}
onDocumentUnloaded = () => {
this.close();
}
onAnnotationSelected = (e, annotations, action) => {
if (action === 'selected' && annotations.length === 1) {
const annotation = annotations[0];
this.setState({
annotation,
canModify: core.canModify(annotation)
});
} else {
this.close();
}
}
onAnnotationChanged = (e, annotations, action) => {
if (action === 'modify' && core.isAnnotationSelected(this.state.annotation)) {
// Position change
this.positionAnnotationPopup();
this.props.openElement('annotationPopup');
// Style change
this.forceUpdate();
}
}
onUpdateAnnotationPermission = () => {
const canModify = this.state.annotation ? core.canModify(this.state.annotation) : false;
this.setState({ canModify });
}
handleWindowResize = () => {
this.props.closeElement('annotationPopup');
}
positionAnnotationPopup = () => {
const { left, top } = getAnnotationPopupPositionBasedOn(this.state.annotation, this.popup);
this.setState({ left, top });
}
commentOnAnnotation = () => {
if (this.state.annotation instanceof Annotations.FreeTextAnnotation) {
core.getAnnotationManager().trigger('annotationDoubleClicked', this.state.annotation);
} else if (!this.props.isLeftPanelOpen) {
this.props.openElement('notesPanel');
setTimeout(() => {
this.props.setIsNoteEditing(true);
}, 400);
} else {
this.props.setActiveLeftPanel('notesPanel');
this.props.setIsNoteEditing(true);
}
this.props.closeElement('annotationPopup');
}
openStylePopup = () => {
this.setState({ isStylePopupOpen: true });
}
deleteAnnotation = () => {
core.deleteAnnotations([this.state.annotation]);
this.props.closeElement('annotationPopup');
}
render() {
const { annotation, left, top, canModify, isStylePopupOpen } = this.state;
const { isNotesPanelDisabled, isDisabled, isOpen, isAnnotationStylePopupDisabled } = this.props;
const style = getAnnotationStyle(annotation);
const hasStyle = Object.keys(style).length > 0;
const className = getClassName(`Popup AnnotationPopup`, this.props);
if (isDisabled) {
return null;
}
return (
<div className={className} ref={this.popup} data-element="annotationPopup" style={{ left, top }} onClick={e => e.stopPropagation()} onMouseDown={e => e.stopPropagation()}>
{isStylePopupOpen
? <AnnotationStylePopup annotation={annotation} style={style} isOpen={isOpen} />
: <React.Fragment>
{!isNotesPanelDisabled &&
<ActionButton dataElement="annotationCommentButton" title="action.comment" img="ic_comment_black_24px" onClick={this.commentOnAnnotation} />
}
{canModify && hasStyle && !isAnnotationStylePopupDisabled &&
<ActionButton dataElement="annotationStyleEditButton" title="action.style" img="ic_palette_black_24px" onClick={this.openStylePopup} />
}
{canModify &&
<ActionButton dataElement="annotationDeleteButton" title="action.delete" img="ic_delete_black_24px" onClick={this.deleteAnnotation} />
}
</React.Fragment>
}
</div>
);
}
}
const mapStateToProps = state => ({
isNotesPanelDisabled: selectors.isElementDisabled(state, 'notesPanel'),
isDisabled: selectors.isElementDisabled(state, 'annotationPopup'),
isAnnotationStylePopupDisabled: selectors.isElementDisabled(state, 'annotationStylePopup'),
isOpen: selectors.isElementOpen(state, 'annotationPopup'),
isLeftPanelOpen: selectors.isElementOpen(state, 'leftPanel'),
isRightPanelOpen: selectors.isElementOpen(state, 'searchPanel'),
});
const mapDispatchToProps = {
openElement: actions.openElement,
closeElement: actions.closeElement,
setIsNoteEditing: actions.setIsNoteEditing,
setActiveLeftPanel: actions.setActiveLeftPanel
};
export default connect(mapStateToProps, mapDispatchToProps)(AnnotationPopup); |
PageNotFoundx/tractor | oldversion/src/main/java/org/raniaia/minipika/framework/sql/xml/parser/MapperLabelParser.java | <reponame>PageNotFoundx/tractor
package org.jiakesiws.minipika.framework.sql.xml.parser;
/* ************************************************************************
*
* Copyright (C) 2020 2B键盘 All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************/
/*
* Creates on 2019/12/17.
*/
import lombok.Getter;
import lombok.Setter;
import org.jdom2.Content;
import org.jdom2.Element;
import org.jiakesiws.minipika.framework.exception.ExpressionException;
import org.jiakesiws.minipika.framework.provide.ProvideVar;
import org.jiakesiws.minipika.framework.sql.xml.node.XMLNode;
import org.jiakesiws.minipika.framework.tools.StringUtils;
import java.util.List;
/**
* xml mapper下的标签解析
* @author 2B键盘
* @email <EMAIL>
*/
@SuppressWarnings("SpellCheckingInspection")
public class MapperLabelParser implements MapperLabelParserService {
// 解析工具类
private XMLParserUtils util = new XMLParserUtils();
// 语法错误检测
private GrammarCheck grammarCheck = new GrammarCheck();
@Setter
@Getter
private String currentBuilder;
@Setter
@Getter
private String currentMapper;
@Override
public XMLNode text(Content content) {
return null;
}
@Override
public XMLNode ifOrEels(Element element) {
String ieName = element.getName();
XMLNode ieNode = new XMLNode(ieName);
if (ProvideVar.IF.equals(ieName)) {
String test = util.getIfLabelTestAttribute(element);
if (StringUtils.isEmpty(test))
throw new ExpressionException("tag: if label attribute test content cannot null. in mapper "
+currentBuilder + " : "+currentMapper);
ieNode.addAttribute(ProvideVar.IF_TEST, test);
}
List<Content> conditions = element.getContent();
int condCount = 0;
for (Content condition : conditions) {
// 文本
if (condition.getCType() == Content.CType.Text) {
String text = util.trim(condition.getValue());
if (!StringUtils.isEmpty(text)) {
ieNode.addChild(new XMLNode(ProvideVar.TEXT, text));
}
continue;
}
// 标签
if (condition.getCType() == Content.CType.Element) {
Element condElement = ((Element) condition);
XMLNode cond = new XMLNode(condElement.getName(), util.trim(condElement.getValue()));
cond.addAttribute(ProvideVar.COND_ATTRIBUTE_KEY,String.valueOf(condCount));
condCount++;
ieNode.addChild(cond);
}
}
return ieNode;
}
@Override
public XMLNode choose(Element element) {
List<Element> chooseChildren = element.getChildren();
grammarCheck.chooseCheck(chooseChildren, currentBuilder, currentMapper);
String ename = element.getName();
XMLNode chooseNode = new XMLNode(ename);
for (Element child : chooseChildren) {
chooseNode.addChild(ifOrEels(child));
}
return chooseNode;
}
@Override
public XMLNode foreach(Element element) {
XMLNode eachNode = new XMLNode(element.getName());
eachNode.addAttribute(ProvideVar.ITEM,element.getAttributeValue(ProvideVar.ITEM));
eachNode.addAttribute(ProvideVar.INDEX,element.getAttributeValue(ProvideVar.INDEX));
eachNode.addAttribute(ProvideVar.COLLECTIONS,element.getAttributeValue(ProvideVar.COLLECTIONS));
return eachNode;
}
}
|
steve-at/graphium | model/src/main/java/at/srfg/graphium/model/IWayGraphMetadataFactory.java | /**
* Copyright © 2017 Salzburg Research Forschungsgesellschaft (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.srfg.graphium.model;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import com.vividsolutions.jts.geom.Polygon;
/**
* factory for production of @see @link IGraphMetadata objects
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*
*/
public interface IWayGraphMetadataFactory {
/**
* create new empty metadata
* @return empty metadata object
*/
public IWayGraphVersionMetadata newWayGraphVersionMetadata();
/**
* create new metadata with given values
*
* @param id id of graph version
* @param graphId graphId of graph version
* @param graphName name of the graph
* @param version version of the graph
* @param originGraphName name of the original graph (could be different from graphName in case of replication)
* @param originVersion version of the original graph (could be different from graphName in case of replication)
* @param state state of the graph version
* @param validFrom min value for validity
* @param validTo max value for validity
* @param coveredArea geographical area covered by graph version
* @param segmentsCount number of segments within graph version
* @param connectionsCount number of connections between segments within graph version
* @param accessTypes all kinds of access types within graph version
* @param tags optional tags
* @param source graph's source
* @param type type of graph (e.g. routing...)
* @param description optional description
* @param creationTimestamp timestamp of creating the graph version (mainly timestamp of beginning the import)
* @param storageTimestamp timestamp of end of storing the graph version
* @param creator creator of the graph version
* @param originUrl URL of graph's base data
* @return
*/
public IWayGraphVersionMetadata newWayGraphVersionMetadata(long id, long graphId, String graphName, String version,
String originGraphName, String originVersion, State state, Date validFrom, Date validTo, Polygon coveredArea,
int segmentsCount, int connectionsCount, Set<Access> accessTypes,
Map<String, String> tags, ISource source, String type,
String description, Date creationTimestamp, Date storageTimestamp,
String creator, String originUrl);
}
|
EvgenyKungurov/body_builder | examples/app/models/client_restriction.rb | class ClientRestriction < ActiveRecord::Base
def self.max_internet_clients_per_day(internet_day)
internet_clients = 0
Turn.select { |turn| turn.day == internet_day }.each do |turn|
turn.clients.each do |client|
internet_clients += 1 if client.symbol_name_turn.include? 'И'
end
end
internet_clients >= number_internet_clients_per_day.to_i
end
def self.max_clients_per_day(current_day)
clients = 0
Turn.select { |turn| turn.day == current_day }.each do |turn|
turn.clients.each do |client|
clients += 1 if client
end
end
clients >= number_clients_per_day.to_i
end
private
def self.number_clients_per_day
ClientRestriction.find_by(name: 'Общее количество приема в день').count
end
def self.number_internet_clients_per_day
ClientRestriction.find_by(name: 'Количество интернет записей в день').count
end
def self.alternate_people
ClientRestriction.find_by(name: 'Очередность человек').count
end
end
|
shakuzen/helidon | examples/webserver/demo-translator-frontend/src/test/java/io/helidon/webserver/examples/translator/TranslatorTest.java | <gh_stars>1-10
/*
* Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.helidon.webserver.examples.translator;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import io.helidon.webserver.WebServer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static io.helidon.webserver.examples.translator.backend.Main.createBackendWebServer;
import static io.helidon.webserver.examples.translator.frontend.Main.createFrontendWebServer;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.AllOf.allOf;
import static org.hamcrest.core.StringContains.containsString;
/**
* The TranslatorTest.
*/
public class TranslatorTest {
private static WebServer webServerFrontend;
private static WebServer webServerBackend;
private static Client client;
private static WebTarget target;
@BeforeAll
public static void setUp() throws Exception {
CompletionStage<WebServer> backendStage =
createBackendWebServer(null).start();
webServerBackend = backendStage.toCompletableFuture().get(10, TimeUnit.SECONDS);
CompletionStage<WebServer> frontendStage =
createFrontendWebServer(null, "localhost", webServerBackend.port()).start();
webServerFrontend = frontendStage.toCompletableFuture().get(10, TimeUnit.SECONDS);
client = ClientBuilder.newClient();
target = client.target("http://localhost:" + webServerFrontend.port());
}
@AfterAll
public static void tearDown() throws Exception {
webServerFrontend.shutdown().toCompletableFuture().get(10, TimeUnit.SECONDS);
webServerBackend.shutdown().toCompletableFuture().get(10, TimeUnit.SECONDS);
if (client != null) {
client.close();
}
}
@Test
public void e2e() throws Exception {
Response response = target.path("translator")
.queryParam("q", "cloud")
.request()
.get();
assertThat("Unexpected response! Status code: " + response.getStatus(),
response.readEntity(String.class),
allOf(containsString("oblak"),
containsString("nube")));
}
}
|
Lamasusb000/CoinHuntWorld-QuestionComplete | src/components/ContactInfo.js | <reponame>Lamasusb000/CoinHuntWorld-QuestionComplete
import React from "react"
import ContactInfoJSON from "../../site/settings/SiteContactInfo.json"
class ContactInfo extends React.Component{
render(){
return (
<div className="Left-Column">
<h2>Contact Information</h2>
<div>
{CheckPhone()}
{CheckEmail()}
<address>
<p>
Address:
</p>
<p>
<a
href={CreateMapsLink()}
style={{
color: "inherit",
fontFamily: "inherit"
}}>
{CheckAddressLine1()}
{CheckAddressLine2()}
{CheckCity()} {ContactInfoJSON.Address[0].State} {ContactInfoJSON.Address[0].Zipcode}
<br/>
{ContactInfoJSON.Address[0].Country}
</a>
</p>
</address>
</div>
</div>
)
}
}
function CheckPhone(){
if (ContactInfoJSON.Phone !== ""){
return(
<p>
Contact Number:
<a
href={GetTellLink(ContactInfoJSON.Phone)}
style={{
color: "inherit",
fontFamily: "inherit"
}}>
{FormatTellLink(ContactInfoJSON.Phone)}
</a>
</p>
)
}
}
function CheckEmail(){
if (ContactInfoJSON.Email !== ""){
return(
<p>
Contact Email:
<a
href={GetEmailLink(ContactInfoJSON.Email)}
style={{
color: "inherit",
fontFamily: "inherit"
}}>
{ContactInfoJSON.Email}
</a>
</p>
)
}
}
function GetTellLink(PhoneNumber){
return `tel:${PhoneNumber}`
}
function GetEmailLink(Email){
return `mailto:${Email}`
}
function CreateMapsLink(){
return `https://www.google.com/maps/place/${RemoveSpaces(ContactInfoJSON.Address[0].AddressLine1)}+${RemoveSpaces(ContactInfoJSON.Address[0].AddressLine2)}+${RemoveSpaces(ContactInfoJSON.Address[0].City)},+${RemoveSpaces(ContactInfoJSON.Address[0].State)}+${RemoveSpaces(ContactInfoJSON.Address[0].Zipcode)}`
}
function RemoveSpaces(Text){
if (Text !== ""){
var ReturnText = Text.replace(/ /g, "+")
return ReturnText
}
return ""
}
function FormatTellLink(phone) {
phone = phone.replace(/[^\d]/g, "");
if (phone.length === 10) {
return phone.replace(/(\d{3})(\d{3})(\d{4})/, "($1) $2-$3");
}
return null;
}
function CheckAddressLine1(){
if (ContactInfoJSON.Address[0].AddressLine1 === ""){
return
}
if (ContactInfoJSON.Address[0].AddressLine1 !== ""){
return(
<span>
{ContactInfoJSON.Address[0].AddressLine1}
<br/>
</span>
)
}
}
function CheckAddressLine2(){
if (ContactInfoJSON.Address[0].AddressLine2 === ""){
return
}
if (ContactInfoJSON.Address[0].AddressLine2 !== ""){
return(
<span>
{ContactInfoJSON.Address[0].AddressLine2}
<br/>
</span>
)
}
}
function CheckCity(){
if (ContactInfoJSON.Address[0].City === ""){
return
}
if (ContactInfoJSON.Address[0].City !== ""){
return(
<span>
{ContactInfoJSON.Address[0].City},
</span>
)
}
}
export default ContactInfo |
cdcchain/cdc-core | include/consensus/api_extern.hpp | // Copyright (c) 2017-2018 The CDC developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#pragma once
#include <client/Client.hpp>
namespace cdcchain{
namespace client{
extern Client* g_client;
extern bool g_client_quit;
}
}
|
alexhenrie/owltools | OWLTools-NCBI/src/main/java/owltools/ncbi/NCBIOWL.java | package owltools.ncbi;
import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import org.obolibrary.obo2owl.Obo2OWLConstants.Obo2OWLVocabulary;
import org.obolibrary.obo2owl.Obo2Owl;
import org.obolibrary.oboformat.parser.OBOFormatConstants.OboFormatTag;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotationProperty;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
/**
* Provides static methods for creating and initializing an OWLOntology
* into which taxa can be inserted.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class NCBIOWL extends NCBIConverter {
/**
* Create a logger.
*/
protected final static Logger logger =
Logger.getLogger(NCBIOWL.class);
/**
* Create OWLAPI utilities: A data factory.
*/
protected final static OWLDataFactory dataFactory =
OWLManager.getOWLDataFactory();
/**
* Create OWLAPI utilities: An ontology manager.
*/
protected final static OWLOntologyManager manager =
OWLManager.createOWLOntologyManager();
/**
* Names of the NCBI ranks.
*/
public final static HashSet<String> ranks = initializeRanks();
/**
* Initialize the ranks.
*
* @return a set of rank names.
*/
private static HashSet<String> initializeRanks() {
HashSet<String> ranks = new HashSet<String>();
ranks.add("class");
ranks.add("family");
ranks.add("forma");
ranks.add("genus");
ranks.add("infraclass");
ranks.add("infraorder");
ranks.add("kingdom");
ranks.add("order");
ranks.add("parvorder");
ranks.add("phylum");
ranks.add("species group");
ranks.add("species subgroup");
ranks.add("species");
ranks.add("subclass");
ranks.add("subfamily");
ranks.add("subgenus");
ranks.add("subkingdom");
ranks.add("suborder");
ranks.add("subphylum");
ranks.add("subspecies");
ranks.add("subtribe");
ranks.add("superclass");
ranks.add("superfamily");
ranks.add("superkingdom");
ranks.add("superorder");
ranks.add("superphylum");
ranks.add("tribe");
ranks.add("varietas");
return ranks;
}
/**
* A map from NCBI synonym types to OboInOwl annotation properties.
*/
public final static HashMap<String,String> synonymTypes =
initializeFieldMap();
/**
* Initialize the synonymTypes.
*
* @return a map from field names to CURIE strings for the annotation
* properties.
*/
private static HashMap<String,String> initializeFieldMap() {
HashMap<String,String> map = new HashMap<String,String>();
map.put("acronym", "oio:hasBroadSynonym");
map.put("anamorph", "oio:hasRelatedSynonym");
map.put("blast name", "oio:hasRelatedSynonym");
map.put("common name", "oio:hasExactSynonym");
map.put("equivalent name", "oio:hasExactSynonym");
map.put("genbank acronym", "oio:hasBroadSynonym");
map.put("genbank anamorph", "oio:hasRelatedSynonym");
map.put("genbank common name", "oio:hasExactSynonym");
map.put("genbank synonym", "oio:hasRelatedSynonym");
map.put("in-part", "oio:hasRelatedSynonym");
map.put("misnomer", "oio:hasRelatedSynonym");
map.put("misspelling", "oio:hasRelatedSynonym");
map.put("synonym", "oio:hasRelatedSynonym");
map.put("scientific name", "oio:hasExactSynonym");
map.put("teleomorph", "oio:hasRelatedSynonym");
return map;
}
/**
* Create and initialize the ontology. This involves adding several
* annotation properties from OboInOwl and has_rank,
* adding the ranks themselves, and setting annotation properties
* on the ontology itself.
*
* @see org.obolibrary.obo2owl.Obo2OWLConstants.Obo2OWLVocabulary
* @return the ontology, ready for adding taxa
* @throws OWLOntologyCreationException
*/
public static OWLOntology createOWLOntology()
throws OWLOntologyCreationException {
IRI iri = IRI.create(OBO + "ncbitaxon.owl");
OWLOntology ontology = manager.createOntology(iri);
// Add OBO annotation properties
Obo2OWLVocabulary[] terms = {
Obo2OWLVocabulary.IRI_OIO_hasExactSynonym,
Obo2OWLVocabulary.IRI_OIO_hasRelatedSynonym,
Obo2OWLVocabulary.IRI_OIO_hasBroadSynonym,
Obo2OWLVocabulary.IRI_OIO_hasDbXref,
Obo2OWLVocabulary.IRI_OIO_hasOboNamespace,
Obo2OWLVocabulary.IRI_OIO_hasScope,
Obo2OWLVocabulary.IRI_OIO_hasOBOFormatVersion,
Obo2OWLVocabulary.hasSynonymType,
Obo2OWLVocabulary.IRI_IAO_0000115 // IAO definition
};
for (Obo2OWLVocabulary term : terms) {
createAnnotationProperty(ontology, term);
}
// Add has_rank annotation property
OWLAnnotationProperty rank = createAnnotationProperty(
ontology, "obo:ncbitaxon#has_rank");
annotate(ontology, rank, "rdfs:label", "has_rank");
annotate(ontology, rank, "iao:0000115", "A metadata relation between a class and its taxonomic rank (eg species, family)");
annotate(ontology, rank, "rdfs:comment", "This is an abstract class for use with the NCBI taxonomy to name the depth of the node within the tree. The link between the node term and the rank is only visible if you are using an obo 1.3 aware browser/editor; otherwise this can be ignored");
annotate(ontology, rank, "oio:hasOBONamespace", "ncbi_taxonomy");
// Add ranks
OWLClass taxonomicRank = createTaxon(ontology,
"taxonomic_rank");
annotate(ontology, taxonomicRank, "rdfs:label",
"taxonomic rank");
annotate(ontology, taxonomicRank, "rdfs:comment",
"This is an abstract class for use with the NCBI taxonomy to name the depth of the node within the tree. The link between the node term and the rank is only visible if you are using an obo 1.3 aware browser/editor; otherwise this can be ignored.");
for(String rankName : ranks) {
String rankString = reformatName(rankName);
OWLClass rankClass = createTaxon(ontology, rankString);
assertSubClass(ontology, rankClass, taxonomicRank);
annotate(ontology, rankClass, "rdfs:label", rankName);
}
// Add synonym type classes
OWLAnnotationProperty synonymTypeProperty =
createAnnotationProperty(ontology,
"oio:SynonymTypeProperty");
annotate(ontology, synonymTypeProperty, "rdfs:label",
"synonym_type_property");
Set<String> synonymNames = synonymTypes.keySet();
for(String synonymName : synonymNames) {
String synonymString = reformatName(
"ncbitaxon:" + synonymName);
OWLAnnotationProperty synonymProperty =
createAnnotationProperty(
ontology, synonymString);
annotate(ontology, synonymProperty, "rdfs:label",
synonymName);
annotate(ontology, synonymProperty, "oio:hasScope",
synonymTypes.get(synonymName));
assertSubAnnotationProperty(ontology,
synonymProperty, synonymTypeProperty);
}
// Annotate the ontology itself.
annotate(ontology, "rdfs:comment",
"Autogenerated by OWLTools-NCBIConverter.");
logger.debug("Initialized ontology. Axioms: " +
ontology.getAxiomCount());
return ontology;
}
/**
* Setup the alternative identifier annotation property.
*
* @param ontology
* @return annotation property
*
* @see NCBIOWL#addAltId(OWLOntology, String, String, OWLAnnotationProperty)
*/
public static OWLAnnotationProperty setupAltIdProperty(OWLOntology ontology) {
IRI iri = Obo2Owl.trTagToIRI(OboFormatTag.TAG_ALT_ID.getTag());
OWLAnnotationProperty ap = createAnnotationProperty(ontology, iri);
OWLAnnotationProperty prop = dataFactory.getRDFSLabel();
OWLLiteral value = dataFactory.getOWLLiteral(Obo2OWLVocabulary.hasAlternativeId.getLabel());
manager.addAxiom(ontology, dataFactory.getOWLAnnotationAssertionAxiom(prop, iri, value));
return ap;
}
/**
* Add an alternative identifier for a main term.
*
* @param ontology
* @param merged the main term
* @param altId the alternate id to be added
* @param ap annotation property as generated by the setup method
*
* @see NCBIOWL#setupAltIdProperty(OWLOntology)
*/
public static void addAltId(OWLOntology ontology, String merged, String altId, OWLAnnotationProperty ap) {
altId = altId.trim();
merged = merged.trim();
// add alternate id information to owl class
IRI mergedIRI = createNCBIIRI(merged);
OWLLiteral value = dataFactory.getOWLLiteral("NCBITaxon:"+altId);
OWLAxiom axiom = dataFactory.getOWLAnnotationAssertionAxiom(ap, mergedIRI, value);
manager.addAxiom(ontology, axiom);
}
/**
* Load an ontology from a file.
*
* @param inputFile the path to the OWL file
* @return the loaded ontology
* @throws OWLOntologyCreationException if the ontology can't be loaded
*/
public static OWLOntology loadOWLOntology(File inputFile)
throws OWLOntologyCreationException {
return manager.loadOntologyFromOntologyDocument(inputFile);
}
}
|
isifeddi/42-red-tetris | client/src/Components/Stage.test.js | <filename>client/src/Components/Stage.test.js<gh_stars>0
import renderer from "react-test-renderer";
import Stage from "./Stage";
import { Createstage } from "../gameHelper";
let stage = Createstage(12, 20)
test("Stage render test GameOver ", () => {
const tree = renderer.create(<Stage gameOver={true} stage={stage}/>).toJSON();
expect(tree).toMatchSnapshot();
});
test("Stage render test Stage", () => {
const tree = renderer.create(<Stage />).toJSON();
expect(tree).toMatchSnapshot();
}); |
abouteiller/ucx | test/examples/active_message.c | <gh_stars>0
/**
* Copyright (C) UT-Battelle, LLC. 2015. ALL RIGHTS RESERVED.
* See file LICENSE for terms.
*/
#include <ucs/type/status.h>
#include <ucs/async/async.h>
#include <uct/api/uct.h>
#include <mpi.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#define CHKERR_JUMP(cond, msg, label) \
do { \
if (cond) { \
fprintf(stderr, "Failed to %s\n", msg); \
goto label; \
} \
} while (0)
static int holder = 1;
struct iface_info {
/* Interface attributes: capabilities and limitations */
uct_iface_attr_t attr;
/* Communication interface context */
uct_iface_h iface;
/* Memory domain */
uct_md_h pd;
/* Workers represent allocated resources in a communication thread */
uct_worker_h worker;
};
/* Callback for active message */
static ucs_status_t hello_world(void *arg, void *data, size_t length, void *desc)
{
printf("Hello World!!!\n");fflush(stdout);
holder = 0;
return UCS_OK;
}
/* init the transport by its name */
static ucs_status_t init_iface(char *dev_name, char *tl_name, struct iface_info *iface_p)
{
ucs_status_t status;
uct_iface_config_t *config; /* Defines interface configuration options */
/* Read transport-specific interface configuration */
status = uct_iface_config_read(tl_name, NULL, NULL, &config);
CHKERR_JUMP(UCS_OK != status, "setup iface_config", error_ret);
/* Open communication interface */
status = uct_iface_open(iface_p->pd, iface_p->worker, tl_name, dev_name, 0, config,
&iface_p->iface);
uct_config_release(config);
CHKERR_JUMP(UCS_OK != status, "open temporary interface", error_ret);
/* Get interface attributes */
status = uct_iface_query(iface_p->iface, &iface_p->attr);
CHKERR_JUMP(UCS_OK != status, "query iface", error_iface);
/* Check if current device and transport support short active messages */
if (iface_p->attr.cap.flags & UCT_IFACE_FLAG_AM_SHORT) {
return UCS_OK;
}
error_iface:
uct_iface_close(iface_p->iface);
error_ret:
return UCS_ERR_UNSUPPORTED;
}
/* Device and transport to be used are determined by minimum latency */
static ucs_status_t dev_tl_lookup(const char *dev_name, const char *tl_name, struct iface_info *iface_p)
{
int i;
int j;
ucs_status_t status;
uct_md_resource_desc_t *md_resources; /* Memory domain resource descriptor */
uct_tl_resource_desc_t *tl_resources; /*Communication resource descriptor */
unsigned num_md_resources; /* Number of protected domain */
unsigned num_tl_resources; /* Number of transport resources resource objects created */
uct_md_config_t *md_config;
status = uct_query_md_resources(&md_resources, &num_md_resources);
CHKERR_JUMP(UCS_OK != status, "query for protected domain resources", error_ret);
/* Iterate through protected domain resources */
for (i = 0; i < num_md_resources; ++i) {
status = uct_md_config_read(md_resources[i].md_name, NULL, NULL, &md_config);
CHKERR_JUMP(UCS_OK != status, "read PD config", release_pd);
status = uct_md_open(md_resources[i].md_name, md_config, &iface_p->pd);
uct_config_release(md_config);
CHKERR_JUMP(UCS_OK != status, "open protected domains", release_pd);
status = uct_md_query_tl_resources(iface_p->pd, &tl_resources, &num_tl_resources);
CHKERR_JUMP(UCS_OK != status, "query transport resources", close_pd);
/* Go through each available transport and find the proper name */
for (j = 0; j < num_tl_resources; ++j) {
if (!strcmp(dev_name, tl_resources[j].dev_name) &&
!strcmp(tl_name, tl_resources[j].tl_name)) {
status = init_iface(tl_resources[j].dev_name, tl_resources[j].tl_name, iface_p);
if (UCS_OK == status) {
printf("Using %s with %s.\n", tl_resources[j].dev_name, tl_resources[j].tl_name);
fflush(stdout);
uct_release_tl_resource_list(tl_resources);
goto release_pd;
}
}
}
uct_release_tl_resource_list(tl_resources);
uct_md_close(iface_p->pd);
}
fprintf(stderr, "No supported (dev/tl) found (%s/%s)\n", dev_name, tl_name);
status = UCS_ERR_UNSUPPORTED;
release_pd:
uct_release_md_resource_list(md_resources);
error_ret:
return status;
close_pd:
uct_md_close(iface_p->pd);
goto release_pd;
}
int main(int argc, char **argv)
{
/* MPI is initially used to swap the endpoint and interface addresses so each
* process has knowledge of the others. */
int partner;
int size, rank;
uct_device_addr_t *own_dev, *peer_dev;
uct_iface_addr_t *own_iface, *peer_iface;
uct_ep_addr_t *own_ep, *peer_ep;
ucs_status_t status; /* status codes for UCS */
uct_ep_h ep; /* Remote endpoint */
ucs_async_context_t async; /* Async event context manages times and fd notifications */
uint8_t id = 0;
void *arg;
const char *tl_name = NULL;
const char *dev_name = NULL;
struct iface_info if_info;
int exit_fail = 1;
optind = 1;
if (3 == argc) {
dev_name = argv[1];
tl_name = argv[2];
} else {
printf("Usage: %s (<dev-name> <tl-name>)\n", argv[0]);
fflush(stdout);
return 1;
}
MPI_Init(&argc, &argv);
MPI_Comm_size(MPI_COMM_WORLD, &size);
if (size < 2) {
fprintf(stderr, "Failed to create enough mpi processes\n");
goto out;
}
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
if (0 == rank) {
partner = 1;
} else if (1 == rank) {
partner = 0;
} else {
/* just wait for other processes in MPI_Finalize */
exit_fail = 0;
goto out;
}
/* Initialize context */
status = ucs_async_context_init(&async, UCS_ASYNC_MODE_THREAD);
CHKERR_JUMP(UCS_OK != status, "init async context", out);
/* Create a worker object */
status = uct_worker_create(&async, UCS_THREAD_MODE_SINGLE, &if_info.worker);
CHKERR_JUMP(UCS_OK != status, "create worker", out_cleanup_async);
/* Search for the desired transport */
status = dev_tl_lookup(dev_name, tl_name, &if_info);
CHKERR_JUMP(UCS_OK != status, "find supported device and transport", out_destroy_worker);
/* Expect that addr len is the same on both peers */
own_dev = (uct_device_addr_t*)calloc(2, if_info.attr.device_addr_len);
CHKERR_JUMP(NULL == own_dev, "allocate memory for dev addrs", out_destroy_iface);
peer_dev = (uct_device_addr_t*)((char*)own_dev + if_info.attr.device_addr_len);
own_iface = (uct_iface_addr_t*)calloc(2, if_info.attr.iface_addr_len);
CHKERR_JUMP(NULL == own_iface, "allocate memory for if addrs", out_free_dev_addrs);
peer_iface = (uct_iface_addr_t*)((char*)own_iface + if_info.attr.iface_addr_len);
/* Get device address */
status = uct_iface_get_device_address(if_info.iface, own_dev);
CHKERR_JUMP(UCS_OK != status, "get device address", out_free_if_addrs);
MPI_Sendrecv(own_dev, if_info.attr.device_addr_len, MPI_BYTE, partner, 0,
peer_dev, if_info.attr.device_addr_len, MPI_BYTE, partner,0,
MPI_COMM_WORLD, MPI_STATUS_IGNORE);
status = uct_iface_is_reachable(if_info.iface, peer_dev, NULL);
CHKERR_JUMP(0 == status, "reach the peer", out_free_if_addrs);
/* Get interface address */
if (if_info.attr.cap.flags & UCT_IFACE_FLAG_CONNECT_TO_IFACE) {
status = uct_iface_get_address(if_info.iface, own_iface);
CHKERR_JUMP(UCS_OK != status, "get interface address", out_free_if_addrs);
MPI_Sendrecv(own_iface, if_info.attr.iface_addr_len, MPI_BYTE, partner, 0,
peer_iface, if_info.attr.iface_addr_len, MPI_BYTE, partner,0,
MPI_COMM_WORLD, MPI_STATUS_IGNORE);
}
/* Again, expect that ep addr len is the same on both peers */
own_ep = (uct_ep_addr_t*)calloc(2, if_info.attr.ep_addr_len);
CHKERR_JUMP(NULL == own_ep, "allocate memory for ep addrs", out_free_if_addrs);
peer_ep = (uct_ep_addr_t*)((char*)own_ep + if_info.attr.ep_addr_len);
if (if_info.attr.cap.flags & UCT_IFACE_FLAG_CONNECT_TO_EP) {
/* Create new endpoint */
status = uct_ep_create(if_info.iface, &ep);
CHKERR_JUMP(UCS_OK != status, "create endpoint", out_free_ep_addrs);
/* Get endpoint address */
status = uct_ep_get_address(ep, own_ep);
CHKERR_JUMP(UCS_OK != status, "get endpoint address", out_free_ep);
}
MPI_Sendrecv(own_ep, if_info.attr.ep_addr_len, MPI_BYTE, partner, 0,
peer_ep, if_info.attr.ep_addr_len, MPI_BYTE, partner, 0,
MPI_COMM_WORLD, MPI_STATUS_IGNORE);
if (if_info.attr.cap.flags & UCT_IFACE_FLAG_CONNECT_TO_EP) {
/* Connect endpoint to a remote endpoint */
status = uct_ep_connect_to_ep(ep, peer_dev, peer_ep);
MPI_Barrier(MPI_COMM_WORLD);
} else if (if_info.attr.cap.flags & UCT_IFACE_FLAG_CONNECT_TO_IFACE) {
/* Create an endpoint which is connected to a remote interface */
status = uct_ep_create_connected(if_info.iface, peer_dev, peer_iface, &ep);
} else {
status = UCS_ERR_UNSUPPORTED;
}
CHKERR_JUMP(UCS_OK != status, "connect endpoint", out_free_ep);
/*Set active message handler */
status = uct_iface_set_am_handler(if_info.iface, id, hello_world, arg, UCT_AM_CB_FLAG_SYNC);
CHKERR_JUMP(UCS_OK != status, "set callback", out_free_ep);
if (0 == rank) {
uint64_t header;
char payload[8];
unsigned length = sizeof(payload);
/* Send active message to remote endpoint */
status = uct_ep_am_short(ep, id, header, payload, length);
CHKERR_JUMP(UCS_OK != status, "send active msg", out_free_ep);
} else if (1 == rank) {
while (holder) {
/* Explicitly progress any outstanding active message requests */
uct_worker_progress(if_info.worker);
}
}
/* Everything is fine, we need to call MPI_Finalize rather than MPI_Abort */
exit_fail = 0;
out_free_ep:
uct_ep_destroy(ep);
out_free_ep_addrs:
free(own_ep);
out_free_if_addrs:
free(own_iface);
out_free_dev_addrs:
free(own_dev);
out_destroy_iface:
uct_iface_close(if_info.iface);
uct_md_close(if_info.pd);
out_destroy_worker:
uct_worker_destroy(if_info.worker);
out_cleanup_async:
ucs_async_context_cleanup(&async);
out:
(0 == exit_fail) ? MPI_Finalize() : MPI_Abort(MPI_COMM_WORLD, 1);
return exit_fail;
}
|
Fita1488/recon | packages/recon-engine/src/parse/__tests__/__fixtures__/basic-components/src.js | <filename>packages/recon-engine/src/parse/__tests__/__fixtures__/basic-components/src.js
/* eslint-disable */
import React from 'react';
export function FunctionalComponent() {
return <div>Hello world!</div>;
}
export const ArrowFunctionalComponent = () => <div />;
export default class ClassComponent {
render() {
return <div>Hello world! <a href="link">Click Here!</a></div>;
}
}
|
bluememon/Anxiety-Monitor | app/controllers/categoList.js | var args = arguments[0] || {};
var idPaciente = arguments[0].idPatient;
var dataArrayCatego = [];
var buttonToggle = false;
getTodoList(idPaciente);
$.activityIndicator.show();
loadData();
function loadData(){
var sendit = Ti.Network.createHTTPClient({
onerror: function(e){
Ti.API.debug(e.error);
alert('There was an error during the connection');
//$.connectionError.show();
},
timeout:3000,
});
//Here you have to change it for your local ip
sendit.open('POST', 'http://app.bluecoreservices.com/webservices/getMoodInfo.php');
var params = ({
"idPaciente": idPaciente,
});
sendit.send(params);
//Function to be called upon a successful response
sendit.onload = function(){
var json = JSON.parse(this.responseText);
var json = json.lineData;
//if the database is empty show an alert
if(json.length == 0){
$.noInfoView.show();
}
//Emptying the data to refresh the view
//Insert the JSON data to the table view
for( var i=0; i<json.length; i++){
arrayTemp = [];
arrayTemp.push(json[i].name);
arrayTemp.push(json[i].y);
dataArrayCatego.push(arrayTemp);
/*dataArrayFecha.push(json[i].fechaEnvio);*/
}
//$.chartWebView.evalJS('createalert()');
};
};
$.addDAS.addEventListener("click", function(){
var temp = Alloy.createController('DASInstrument', { idPatient: idPaciente }).getView();
});
$.addShort.addEventListener("click", function(){
//var temp = Alloy.createController('moodInstrument', { idPatient: idPaciente }).getView();
var temp = Alloy.createController('respirationGame', { idPatient: idPaciente }).getView();
});
$.chartWebView.addEventListener('load', function() {
$.chartWebView.evalJS('crearGrafica(' + JSON.stringify(dataArrayCatego) + ')');
});
$.reintentar.addEventListener("click", function(){
loadData();
});
$.expandButtons.addEventListener("click", function(){
if (buttonToggle == false){
var m = Ti.UI.create2DMatrix({
rotate: 45
});
var a1 = Ti.UI.createAnimation();
a1.transform = m;
a1.duration = 200;
$.expandButtons.animate(a1);
$.buttonBackground.setHeight(Titanium.UI.FILL);
$.buttonBackground.setWidth(Titanium.UI.FILL);
$.buttonBackground.show();
$.buttonBackground.animate({
opacity: '.5',
duration: 200
});
$.addShort.animate({
bottom: '80sp',
opacity: '1',
duration: 200
});
$.addDAS.animate({
bottom: '160sp',
opacity: '1',
duration: 200
}, function () {
buttonToggle = true;
});
}
else{
var m = Ti.UI.create2DMatrix({
rotate: 0
});
var a1 = Ti.UI.createAnimation();
a1.transform = m;
a1.duration = 200;
$.expandButtons.animate(a1);
$.buttonBackground.animate({
opacity: '0',
duration: 200
}, function(){
$.buttonBackground.setHeight(0);
$.buttonBackground.setWidth(0);
$.buttonBackground.hide();
});
$.addShort.animate({
bottom: '0',
opacity: '0',
duration: 200
});
$.addDAS.animate({
bottom: '0',
opacity: '0',
duration: 200
}, function () {
buttonToggle = false;
});
}
});
function agregarColor(resultado) {
if (resultado > 80){
return "#800000";
}
if (resultado <= 80 && resultado > 50){
return "#FF6600";
}
if (resultado <= 50){
return "#66CCFF";
}
};
function getTodoList (idPatient) {
//function to use HTTP to connect to a web server and transfer the data.
var sendit = Ti.Network.createHTTPClient({
onerror: function(e){
Ti.API.debug(e.error);
alert('There was an error during the connection');
$.connectionError.show();
},
timeout:3000,
});
//Here you have to change it for your local ip
sendit.open('POST', 'http://app.bluecoreservices.com/webservices/ListCatego.php');
var params = ({
"idPaciente": idPatient,
});
sendit.send(params);
//Function to be called upon a successful response
sendit.onload = function(){
var json = JSON.parse(this.responseText);
var json = json.DasList;
//if the database is empty show an alert
if(json.length == 0){
$.noInfoView.show();
}
//Emptying the data to refresh the view
dataArray = [];
//Insert the JSON data to the table view
for( var i=0; i<json.length; i++){
var wrapper = Titanium.UI.createView({
className: 'cardWrapper',
layout: 'vertical',
height: Titanium.UI.SIZE,
width: Titanium.UI.FILL,
backgroundColor: "#C4C4C4",
bottom: 0,
top: "10sp",
borderRadius: "5sp"
});
var row = Titanium.UI.createView({
className: 'elementRow',
layout: 'horizontal',
height: Titanium.UI.SIZE,
width: Titanium.UI.FILL,
backgroundColor: "#FFFFFF",
top: 0,
bottom: "2sp",
borderRadius: "5sp"
});
var viewResult = Titanium.UI.createView({
className: 'rowResult',
height: Titanium.UI.SIZE,
width: Titanium.UI.SIZE,
bubbleParent: true,
layout: 'composite'
});
var viewResultColor = Titanium.UI.createView({
className: 'rowResult',
height: '60sp',
width: '60sp',
borderRadius: '30sp',
backgroundColor: agregarColor(json[i].severidad),
top: '10sp',
bottom: '10sp',
left: '10sp',
right: '10sp',
bubbleParent: true,
layout: 'composite'
});
var viewDate = Titanium.UI.createView({
className: 'rowDate',
height: Titanium.UI.SIZE,
width: Titanium.UI.SIZE,
left: "10sp",
bubbleParent: true
});
var resultLabel = Titanium.UI.createLabel({
text: Math.round(json[i].severidad),
font: {
fontSize: '20sp'
},
color: '#FFFFFF'
});
var dateLabel = Titanium.UI.createLabel({
text: json[i].nombre,
font: {
fontSize: '20sp'
},
color: '#000000'
});
viewResult.add(resultLabel);
viewDate.add(dateLabel);
viewResultColor.add(viewResult);
wrapper.add(row);
row.add(viewResultColor);
row.add(viewDate);
//dataArray.push(row);
$.DASCatego.add(wrapper);
};
$.activityIndicator.hide();
//$.DASListas.add(dataArray);
};
}; |
ExpediaDotCom/haystack-pipes | commons/src/main/java/com/expedia/www/haystack/pipes/commons/serialization/SpanProtobufSerializer.java | <reponame>ExpediaDotCom/haystack-pipes<gh_stars>1-10
/*
* Copyright 2018 Expedia, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.expedia.www.haystack.pipes.commons.serialization;
import com.expedia.open.tracing.Span;
import com.netflix.servo.monitor.Stopwatch;
import com.netflix.servo.monitor.Timer;
import org.apache.kafka.common.serialization.Serializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class SpanProtobufSerializer extends SerializerDeserializerBase implements Serializer<Span> {
static final String PROTOBUF_SERIALIZATION_TIMER_NAME = "PROTOBUF_SERIALIZATION";
static Logger logger = LoggerFactory.getLogger(SpanProtobufSerializer.class);
static final Map<String, Timer> PROTOBUF_SERIALIZATION_TIMERS = new ConcurrentHashMap<>();
private final Timer protobufSerialization;
@SuppressWarnings("WeakerAccess")
public SpanProtobufSerializer(String application) {
super(application);
protobufSerialization = getOrCreateTimer(PROTOBUF_SERIALIZATION_TIMERS, PROTOBUF_SERIALIZATION_TIMER_NAME);
}
@Override
public void configure(Map<String, ?> map, boolean b) {
// Nothing to do
}
@Override
public byte[] serialize(String key, Span span) {
request.increment();
final Stopwatch stopwatch = protobufSerialization.start();
final byte[] bytes = span.toByteArray();
bytesIn.increment(bytes.length);
stopwatch.stop();
return bytes;
}
@Override
public void close() {
// Nothing to do
}
}
|
bufan1228/jdonframework | src/main/java/com/jdon/util/UtilDateTime.java | <reponame>bufan1228/jdonframework
/*
* $Id: UtilDateTime.java,v 1.2 2005/01/31 05:27:55 jdon Exp $
*
* Copyright (c) 2001, 2002 The Open For Business Project - www.ofbiz.org
*
* Permission is hereby granted, free of charge, to any person obtaining event
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.jdon.util;
import java.text.DateFormat;
import java.util.Calendar;
import java.util.Date;
/**
* Utility class for handling java.util.Date, the java.sql data/time classes and
* related information
*
* @author <event href="mailto:<EMAIL>"><NAME></event>
* @author <event href="mailto:<EMAIL>"><NAME></event>
* @version $Revision: 1.2 $
* @since 2.0
*/
public class UtilDateTime {
/**
* Return event Timestamp for right now
*
* @return Timestamp for right now
*/
public static java.sql.Timestamp nowTimestamp() {
return new java.sql.Timestamp(System.currentTimeMillis());
}
/**
* Return event Date for right now
*
* @return Date for right now
*/
public static java.util.Date nowDate() {
return new java.util.Date();
}
public static java.sql.Timestamp getDayStart(java.sql.Timestamp stamp) {
return getDayStart(stamp, 0);
}
public static java.sql.Timestamp getDayStart(java.sql.Timestamp stamp, int daysLater) {
Calendar tempCal = Calendar.getInstance();
tempCal.setTime(new java.util.Date(stamp.getTime()));
tempCal.set(tempCal.get(Calendar.YEAR), tempCal.get(Calendar.MONTH), tempCal.get(Calendar.DAY_OF_MONTH), 0, 0, 0);
tempCal.add(Calendar.DAY_OF_MONTH, daysLater);
return new java.sql.Timestamp(tempCal.getTime().getTime());
}
public static java.sql.Timestamp getNextDayStart(java.sql.Timestamp stamp) {
return getDayStart(stamp, 1);
}
public static java.sql.Timestamp getDayEnd(java.sql.Timestamp stamp) {
return getDayEnd(stamp, 0);
}
public static java.sql.Timestamp getDayEnd(java.sql.Timestamp stamp, int daysLater) {
Calendar tempCal = Calendar.getInstance();
tempCal.setTime(new java.util.Date(stamp.getTime()));
tempCal.set(tempCal.get(Calendar.YEAR), tempCal.get(Calendar.MONTH), tempCal.get(Calendar.DAY_OF_MONTH), 23, 59, 59);
tempCal.add(Calendar.DAY_OF_MONTH, daysLater);
return new java.sql.Timestamp(tempCal.getTime().getTime());
}
/**
* Converts event date String into event java.sql.Date
*
* @param date
* The date String: MM/DD/YYYY
* @return A java.sql.Date made from the date String
*/
public static java.sql.Date toSqlDate(String date) {
java.util.Date newDate = toDate(date, "00:00:00");
if (newDate != null)
return new java.sql.Date(newDate.getTime());
else
return null;
}
/**
* Makes event java.sql.Date from separate Strings for month, day, year
*
* @param monthStr
* The month String
* @param dayStr
* The day String
* @param yearStr
* The year String
* @return A java.sql.Date made from separate Strings for month, day, year
*/
public static java.sql.Date toSqlDate(String monthStr, String dayStr, String yearStr) {
java.util.Date newDate = toDate(monthStr, dayStr, yearStr, "0", "0", "0");
if (newDate != null)
return new java.sql.Date(newDate.getTime());
else
return null;
}
/**
* Makes event java.sql.Date from separate ints for month, day, year
*
* @param month
* The month int
* @param day
* The day int
* @param year
* The year int
* @return A java.sql.Date made from separate ints for month, day, year
*/
public static java.sql.Date toSqlDate(int month, int day, int year) {
java.util.Date newDate = toDate(month, day, year, 0, 0, 0);
if (newDate != null)
return new java.sql.Date(newDate.getTime());
else
return null;
}
/**
* Converts event time String into event java.sql.Time
*
* @param time
* The time String: either HH:MM or HH:MM:SS
* @return A java.sql.Time made from the time String
*/
public static java.sql.Time toSqlTime(String time) {
java.util.Date newDate = toDate("1/1/1970", time);
if (newDate != null)
return new java.sql.Time(newDate.getTime());
else
return null;
}
/**
* Makes event java.sql.Time from separate Strings for hour, minute, and second.
*
* @param hourStr
* The hour String
* @param minuteStr
* The minute String
* @param secondStr
* The second String
* @return A java.sql.Time made from separate Strings for hour, minute, and
* second.
*/
public static java.sql.Time toSqlTime(String hourStr, String minuteStr, String secondStr) {
java.util.Date newDate = toDate("0", "0", "0", hourStr, minuteStr, secondStr);
if (newDate != null)
return new java.sql.Time(newDate.getTime());
else
return null;
}
/**
* Makes event java.sql.Time from separate ints for hour, minute, and second.
*
* @param hour
* The hour int
* @param minute
* The minute int
* @param second
* The second int
* @return A java.sql.Time made from separate ints for hour, minute, and
* second.
*/
public static java.sql.Time toSqlTime(int hour, int minute, int second) {
java.util.Date newDate = toDate(0, 0, 0, hour, minute, second);
if (newDate != null)
return new java.sql.Time(newDate.getTime());
else
return null;
}
/**
* Converts event date and time String into event Timestamp
*
* @param dateTime
* A combined data and time string in the format
* "MM/DD/YYYY HH:MM:SS", the seconds are optional
* @return The corresponding Timestamp
*/
public static java.sql.Timestamp toTimestamp(String dateTime) {
java.util.Date newDate = toDate(dateTime);
if (newDate != null)
return new java.sql.Timestamp(newDate.getTime());
else
return null;
}
/**
* Converts event date String and event time String into event Timestamp
*
* @param date
* The date String: MM/DD/YYYY
* @param time
* The time String: either HH:MM or HH:MM:SS
* @return A Timestamp made from the date and time Strings
*/
public static java.sql.Timestamp toTimestamp(String date, String time) {
java.util.Date newDate = toDate(date, time);
if (newDate != null)
return new java.sql.Timestamp(newDate.getTime());
else
return null;
}
/**
* Makes event Timestamp from separate Strings for month, day, year, hour,
* minute, and second.
*
* @param monthStr
* The month String
* @param dayStr
* The day String
* @param yearStr
* The year String
* @param hourStr
* The hour String
* @param minuteStr
* The minute String
* @param secondStr
* The second String
* @return A Timestamp made from separate Strings for month, day, year,
* hour, minute, and second.
*/
public static java.sql.Timestamp toTimestamp(String monthStr, String dayStr, String yearStr, String hourStr, String minuteStr, String secondStr) {
java.util.Date newDate = toDate(monthStr, dayStr, yearStr, hourStr, minuteStr, secondStr);
if (newDate != null)
return new java.sql.Timestamp(newDate.getTime());
else
return null;
}
/**
* Makes event Timestamp from separate ints for month, day, year, hour, minute,
* and second.
*
* @param month
* The month int
* @param day
* The day int
* @param year
* The year int
* @param hour
* The hour int
* @param minute
* The minute int
* @param second
* The second int
* @return A Timestamp made from separate ints for month, day, year, hour,
* minute, and second.
*/
public static java.sql.Timestamp toTimestamp(int month, int day, int year, int hour, int minute, int second) {
java.util.Date newDate = toDate(month, day, year, hour, minute, second);
if (newDate != null)
return new java.sql.Timestamp(newDate.getTime());
else
return null;
}
/**
* Converts event date and time String into event Date
*
* @param dateTime
* A combined data and time string in the format
* "MM/DD/YYYY HH:MM:SS", the seconds are optional
* @return The corresponding Date
*/
public static java.util.Date toDate(String dateTime) {
// dateTime must have one space between the date and time...
String date = dateTime.substring(0, dateTime.indexOf(" "));
String time = dateTime.substring(dateTime.indexOf(" ") + 1);
return toDate(date, time);
}
/**
* Converts event date String and event time String into event Date
*
* @param date
* The date String: MM/DD/YYYY
* @param time
* The time String: either HH:MM or HH:MM:SS
* @return A Date made from the date and time Strings
*/
public static java.util.Date toDate(String date, String time) {
if (date == null || time == null)
return null;
String month;
String day;
String year;
String hour;
String minute;
String second;
int dateSlash1 = date.indexOf("/");
int dateSlash2 = date.lastIndexOf("/");
if (dateSlash1 <= 0 || dateSlash1 == dateSlash2)
return null;
int timeColon1 = time.indexOf(":");
int timeColon2 = time.lastIndexOf(":");
if (timeColon1 <= 0)
return null;
month = date.substring(0, dateSlash1);
day = date.substring(dateSlash1 + 1, dateSlash2);
year = date.substring(dateSlash2 + 1);
hour = time.substring(0, timeColon1);
if (timeColon1 == timeColon2) {
minute = time.substring(timeColon1 + 1);
second = "0";
} else {
minute = time.substring(timeColon1 + 1, timeColon2);
second = time.substring(timeColon2 + 1);
}
return toDate(month, day, year, hour, minute, second);
}
/**
* Makes event Date from separate Strings for month, day, year, hour, minute,
* and second.
*
* @param monthStr
* The month String
* @param dayStr
* The day String
* @param yearStr
* The year String
* @param hourStr
* The hour String
* @param minuteStr
* The minute String
* @param secondStr
* The second String
* @return A Date made from separate Strings for month, day, year, hour,
* minute, and second.
*/
public static java.util.Date toDate(String monthStr, String dayStr, String yearStr, String hourStr, String minuteStr, String secondStr) {
int month, day, year, hour, minute, second;
try {
month = Integer.parseInt(monthStr);
day = Integer.parseInt(dayStr);
year = Integer.parseInt(yearStr);
hour = Integer.parseInt(hourStr);
minute = Integer.parseInt(minuteStr);
second = Integer.parseInt(secondStr);
} catch (Exception e) {
return null;
}
return toDate(month, day, year, hour, minute, second);
}
/**
* Makes event Date from separate ints for month, day, year, hour, minute, and
* second.
*
* @param month
* The month int
* @param day
* The day int
* @param year
* The year int
* @param hour
* The hour int
* @param minute
* The minute int
* @param second
* The second int
* @return A Date made from separate ints for month, day, year, hour,
* minute, and second.
*/
public static java.util.Date toDate(int month, int day, int year, int hour, int minute, int second) {
Calendar calendar = Calendar.getInstance();
try {
calendar.set(year, month - 1, day, hour, minute, second);
} catch (Exception e) {
return null;
}
return new java.util.Date(calendar.getTime().getTime());
}
/**
* Makes event date String in the format MM/DD/YYYY from event Date
*
* @param date
* The Date
* @return A date String in the format MM/DD/YYYY
*/
public static String toDateString(java.util.Date date) {
if (date == null)
return "";
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
int month = calendar.get(Calendar.MONTH) + 1;
int day = calendar.get(Calendar.DAY_OF_MONTH);
int year = calendar.get(Calendar.YEAR);
String monthStr;
String dayStr;
String yearStr;
if (month < 10) {
monthStr = "0" + month;
} else {
monthStr = "" + month;
}
if (day < 10) {
dayStr = "0" + day;
} else {
dayStr = "" + day;
}
yearStr = "" + year;
return monthStr + "/" + dayStr + "/" + yearStr;
}
/**
* Makes event time String in the format HH:MM:SS from event Date. If the seconds
* are 0, then the output is in HH:MM.
*
* @param date
* The Date
* @return A time String in the format HH:MM:SS or HH:MM
*/
public static String toTimeString(java.util.Date date) {
if (date == null)
return "";
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return (toTimeString(calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND)));
}
/**
* Makes event time String in the format HH:MM:SS from event separate ints for hour,
* minute, and second. If the seconds are 0, then the output is in HH:MM.
*
* @param hour
* The hour int
* @param minute
* The minute int
* @param second
* The second int
* @return A time String in the format HH:MM:SS or HH:MM
*/
public static String toTimeString(int hour, int minute, int second) {
String hourStr;
String minuteStr;
String secondStr;
if (hour < 10) {
hourStr = "0" + hour;
} else {
hourStr = "" + hour;
}
if (minute < 10) {
minuteStr = "0" + minute;
} else {
minuteStr = "" + minute;
}
if (second < 10) {
secondStr = "0" + second;
} else {
secondStr = "" + second;
}
if (second == 0)
return hourStr + ":" + minuteStr;
else
return hourStr + ":" + minuteStr + ":" + secondStr;
}
/**
* Makes event combined data and time string in the format "MM/DD/YYYY HH:MM:SS"
* from event Date. If the seconds are 0 they are left off.
*
* @param date
* The Date
* @return A combined data and time string in the format
* "MM/DD/YYYY HH:MM:SS" where the seconds are left off if they are
* 0.
*/
public static String toDateTimeString(java.util.Date date) {
if (date == null)
return "";
String dateString = toDateString(date);
String timeString = toTimeString(date);
if (dateString != null && timeString != null)
return dateString + " " + timeString;
else
return "";
}
/**
* Makes event Timestamp for the beginning of the month
*
* @return A Timestamp of the beginning of the month
*/
public static java.sql.Timestamp monthBegin() {
Calendar mth = Calendar.getInstance();
mth.set(Calendar.DAY_OF_MONTH, 1);
mth.set(Calendar.HOUR_OF_DAY, 0);
mth.set(Calendar.MINUTE, 0);
mth.set(Calendar.SECOND, 0);
mth.set(Calendar.AM_PM, Calendar.AM);
return new java.sql.Timestamp(mth.getTime().getTime());
}
private static final char[] zeroArray = "0000000000000000".toCharArray();
public static final String zeroPadString(String string, int length) {
if (string == null || string.length() > length) {
return string;
}
StringBuilder buf = new StringBuilder(length);
buf.append(zeroArray, 0, length - string.length()).append(string);
return buf.toString();
}
/**
* save the datetime of System.currentTimeMillis() to dtatbase persistence.
*
* @param now
* @return
*/
public static final String dateToMillis(long now) {
return zeroPadString(Long.toString(now), 15);
}
/**
* datetime is the String of System.currentTimeMillis() 返回标准中国(缺省)的时间显示格式
*
*/
public static String getDateTimeDisp(String datetime) {
if ((datetime == null) || (datetime.equals("")))
return "";
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM);
long datel = Long.parseLong(datetime);
return formatter.format(new Date(datel));
}
}
|
star-finder/jpf-star | expected-output/random_after_reset_index/sll/Input_withNextDown1.java | package random.sll;
import common.Utilities;
import org.junit.Test;
import gov.nasa.jpf.util.test.TestJPF;
public class Input_withNextDown1 extends TestJPF {
@Test
public void test_withNextDown1() throws Exception {
Input obj = new Input();
Node root = new Node();
Node next_66 = null;
int elem_65 = -22;
root.elem = elem_65;
root.next = next_66;
obj.withNextDown(root);
}
@Test
public void test_withNextDown2() throws Exception {
Input obj = new Input();
random.sll.Node root = null;
obj.withNextDown(root);
}
}
|
mxc-foundation/lora-app-server | internal/devprovision/devprovision.go | package devprovision
import (
"bytes"
"context"
"encoding/binary"
"encoding/hex"
"sync"
"time"
"github.com/jacobsa/crypto/cmac"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/brocaar/chirpstack-api/go/v3/as"
gwV3 "github.com/brocaar/chirpstack-api/go/v3/gw"
"github.com/brocaar/lorawan"
duration "github.com/golang/protobuf/ptypes/duration"
nsPb "github.com/mxc-foundation/lpwan-app-server/api/networkserver"
psPb "github.com/mxc-foundation/lpwan-app-server/api/ps-serves-appserver"
nsd "github.com/mxc-foundation/lpwan-app-server/internal/api/external/ns"
"github.com/mxc-foundation/lpwan-app-server/internal/devprovision/ecdh"
gwd "github.com/mxc-foundation/lpwan-app-server/internal/modules/gateway/data"
"github.com/mxc-foundation/lpwan-app-server/internal/nscli"
"github.com/mxc-foundation/lpwan-app-server/internal/pscli"
)
// LoRa Frame Message and Response Type
//#define MAX_MESSAGE_SIZE 256
const (
upMessageHello = 0x01
upMessageAuth = 0x11
downRespHello = 0x81
downRespAuthAccept = 0x91
downRespAuthReject = 0x92
)
const (
sizeUpMessageHello = 74
sizeUpMessageAuth = 61
)
// Proprietary Payload
type proprietaryPayload struct {
MacPayload []byte
GatewayMAC lorawan.EUI64
DownlinkFreq uint32
UplinkFreq uint32
UplinkBandwidth uint32
UplinkSf uint32
Context []byte
Delay *duration.Duration
Mic []byte
}
// DeviceSessionList defines a struct maintaining device session information, this data shall be stored in app package
// on start and shared between as and devprovision packages
type DeviceSessionList struct {
maxNumberOfDevSession int //5000
sessionlist map[uint64]deviceSession
mutexSessionList sync.RWMutex
deviceSessionLifeCycle time.Duration //time.Minute * 5
}
type nsClient interface {
GetNetworkServerExtraServiceClient(networkServerID int64) (nsPb.NetworkServerExtraServiceClient, error)
}
type controller struct {
psCli psPb.DeviceProvisionClient
nsCli nsClient
devSessionList *DeviceSessionList
}
func (c *controller) sendToNs(networkServerID int64, req *nsPb.SendDelayedProprietaryPayloadRequest) error {
nsClient, err := c.nsCli.GetNetworkServerExtraServiceClient(networkServerID)
if err != nil {
return err
}
_, err = nsClient.SendDelayedProprietaryPayload(context.Background(), req)
if err != nil {
return err
}
return nil
}
// Start prepares device provisioning service module
func Start(psCli *pscli.Client, nsCli *nscli.Client) (*DeviceSessionList, error) {
devSessionList := &DeviceSessionList{
sessionlist: make(map[uint64]deviceSession),
mutexSessionList: sync.RWMutex{},
maxNumberOfDevSession: 5000,
deviceSessionLifeCycle: time.Minute * 5,
}
ctrl := &controller{
nsCli: nsCli,
psCli: psCli.GetDeviceProvisionServiceClient(),
devSessionList: devSessionList,
}
ctrl.devSessionList.clearDeviceSessionList()
go ctrl.cleanUpLoop()
return devSessionList, nil
}
// cleanUpLoop is a never returning function, performing cleanup
func (c *controller) cleanUpLoop() {
for {
c.devSessionList.clearExpiredDevSession()
time.Sleep(time.Second * 10)
}
}
func (c *controller) processMessage(ctx context.Context, nID int64, req *as.HandleProprietaryUplinkRequest,
targetgateway *gwV3.UplinkRXInfo) (bool, error) {
processed := false
messageType := req.MacPayload[0]
messageSize := len(req.MacPayload)
if (messageType == upMessageHello) && (messageSize == sizeUpMessageHello) {
err := c.handleHello(ctx, nID, req, targetgateway)
if err != nil {
return false, errors.Wrap(err, "process HELLO msg error")
}
processed = true
} else if (messageType == upMessageAuth) && (messageSize == sizeUpMessageAuth) {
err := c.handleAuth(ctx, nID, req, targetgateway)
if err != nil {
return false, errors.Wrap(err, "process AUTH msg error")
}
processed = true
} else {
logrus.Debug("Unknown Message.")
}
return processed, nil
}
// Store defines db API used by device provision service
type Store interface {
GetGateway(ctx context.Context, mac lorawan.EUI64, forUpdate bool) (gwd.Gateway, error)
GetNetworkServer(ctx context.Context, id int64) (nsd.NetworkServer, error)
}
// HandleReceivedFrame handles a ping received by one or multiple gateways.
func HandleReceivedFrame(ctx context.Context, req *as.HandleProprietaryUplinkRequest, h Store,
psCli psPb.DeviceProvisionClient, nsCli nsClient, sessionList *DeviceSessionList) (bool, error) {
var mic lorawan.MIC
copy(mic[:], req.Mic)
// logrus.Debugf("Rx MacPayload:\n%s", hex.Dump(req.MacPayload))
// logrus.Debugf(" MIC: %s", hex.EncodeToString(req.Mic))
// Find max RSSI gw
var maxRssiRx *gwV3.UplinkRXInfo = nil
for _, rx := range req.RxInfo {
if maxRssiRx == nil {
maxRssiRx = rx
} else if rx.Rssi > maxRssiRx.Rssi {
maxRssiRx = rx
}
}
if maxRssiRx == nil {
return false, errors.Errorf("No gateway found.")
}
logrus.Debugf(" MAC:%s, RSSI: %d, Context: %s", hex.EncodeToString(maxRssiRx.GatewayId), maxRssiRx.Rssi,
hex.EncodeToString(maxRssiRx.Context))
// Get Gateway
var mac lorawan.EUI64
copy(mac[:], maxRssiRx.GatewayId)
var gw gwd.Gateway
var n nsd.NetworkServer
var err error
gw, err = h.GetGateway(ctx, mac, false)
if err != nil {
return false, errors.Wrap(err, "get gateway error")
}
n, err = h.GetNetworkServer(ctx, gw.NetworkServerID)
if err != nil {
return false, errors.Wrap(err, "get network-server error")
}
logrus.Debugf(" NetworkServer: %s", n.Server)
// Check MIC
calmic := calProprietaryMic(req.MacPayload)
if !bytes.Equal(calmic, req.Mic) {
// logrus.Debugf("MacPayload:\n%s", hex.Dump(req.MacPayload))
logrus.Debugf("Wrong MIC calmic=%s, rxed mic=%s", hex.EncodeToString(calmic), hex.EncodeToString(req.Mic))
return false, errors.Wrap(err, "Wrong MIC for MacPayload")
}
ctrl := &controller{
psCli: psCli,
nsCli: nsCli,
devSessionList: sessionList,
}
return ctrl.processMessage(ctx, n.ID, req, maxRssiRx)
}
func calProprietaryMic(macpayload []byte) []byte {
micbuf := make([]byte, 4)
hash, err := cmac.New(getFixedKey())
if err != nil {
return micbuf
}
if _, err = hash.Write([]byte{0xe0}); err != nil {
return micbuf
}
if _, err = hash.Write(macpayload); err != nil {
return micbuf
}
hb := hash.Sum([]byte{})
copy(micbuf[0:], hb[:])
return micbuf
}
func (c *controller) sendProprietary(networkServerID int64, payload proprietaryPayload) error {
req := nsPb.SendDelayedProprietaryPayloadRequest{
MacPayload: payload.MacPayload,
GatewayMacs: [][]byte{payload.GatewayMAC[:]},
PolarizationInversion: true,
UplinkFreq: payload.UplinkFreq,
DownlinkFreq: payload.DownlinkFreq,
UplinkBandwidth: payload.UplinkBandwidth,
UplinkSf: payload.UplinkSf,
Context: payload.Context,
Delay: payload.Delay,
Mic: calProprietaryMic(payload.MacPayload),
}
logrus.Debugf(" sendProprietary() MIC: %s", hex.EncodeToString(req.Mic))
err := c.sendToNs(networkServerID, &req)
if err != nil {
return errors.Wrap(err, "send proprietary payload error")
}
logrus.WithFields(logrus.Fields{
"gateway_mac": payload.GatewayMAC,
"up_freq": payload.UplinkFreq,
"up_bw": payload.UplinkBandwidth,
"up_sf": payload.UplinkSf,
"down_freq": payload.DownlinkFreq,
}).Infof("gateway proprietary payload sent to network server %d", networkServerID)
return nil
}
func makeHelloResponse(session deviceSession) []byte {
payload := []byte{downRespHello}
payload = append(payload, session.rDevEui...)
payload = append(payload, session.serverPublicKey...)
payload = append(payload, session.serverNonce...)
return payload
}
func (c *controller) handleHello(ctx context.Context, nID int64, req *as.HandleProprietaryUplinkRequest,
targetgateway *gwV3.UplinkRXInfo) error {
logrus.Debug(" HELLO Message.")
var err error
var frameversion byte
rdeveui := make([]byte, 8)
copy(rdeveui[0:], req.MacPayload[1:])
sessionid := binary.BigEndian.Uint64(rdeveui)
logrus.Debugf(" sessionid=%X", sessionid)
frameversion = req.MacPayload[73]
ok, currentsession := c.devSessionList.searchDeviceSession(sessionid)
if !ok {
rdeveui := make([]byte, 8)
devicepublickey := make([]byte, ecdh.K233PubKeySize)
logrus.Debugf(" Creating new session")
copy(rdeveui[0:], req.MacPayload[1:])
copy(devicepublickey[0:], req.MacPayload[9:])
ok, currentsession = c.devSessionList.createDeviceSession(sessionid, rdeveui, devicepublickey)
if !ok {
// Create session failed. drop this frame. return true to mark is processed.
logrus.Errorf("create session failed")
return nil
}
}
// Drop if already sent to the same Gateway context
ok, currentsession = c.devSessionList.checkDeviceSession(sessionid, targetgateway.Context)
if !ok {
return nil
}
logrus.Debugf(" rDevEui: %s", hex.EncodeToString(currentsession.rDevEui))
logrus.Debugf(" devicePublicKey: %s", hex.EncodeToString(currentsession.devicePublicKey))
logrus.Debugf(" serverPrivateKey: %s", hex.EncodeToString(currentsession.serverPrivateKey))
logrus.Debugf(" serverPublicKey: %s", hex.EncodeToString(currentsession.serverPublicKey))
logrus.Debugf(" serverNonce: %s", hex.EncodeToString(currentsession.serverNonce))
logrus.Debugf(" sharedKey: %s", hex.EncodeToString(currentsession.sharedKey))
logrus.Debugf(" version: %d", frameversion)
//
var mac lorawan.EUI64
copy(mac[:], targetgateway.GatewayId)
payload := proprietaryPayload{
MacPayload: makeHelloResponse(currentsession),
GatewayMAC: mac,
UplinkFreq: req.TxInfo.Frequency,
UplinkBandwidth: req.TxInfo.GetLoraModulationInfo().GetBandwidth(),
UplinkSf: req.TxInfo.GetLoraModulationInfo().SpreadingFactor,
DownlinkFreq: 0,
Delay: &duration.Duration{Seconds: 5, Nanos: 0},
Context: targetgateway.Context,
Mic: []byte{0x00, 0x00, 0x00, 0x00},
}
// logrus.Debugf("Tx MacPayload:\n%s", hex.Dump(payload.MacPayload))
err = c.sendProprietary(nID, payload)
if err != nil {
return err
}
return nil
}
func makeAuthAccept(session deviceSession, verifycode []byte) []byte {
authpayload := make([]byte, 32)
copy(authpayload[0:], session.assignedDevEui[:])
copy(authpayload[8:], session.assignedAppEui[:])
copy(authpayload[16:], verifycode[:])
encpayload := session.encryptAuthPayload(authpayload, false)
payload := []byte{downRespAuthAccept}
payload = append(payload, session.rDevEui...)
payload = append(payload, encpayload...)
return payload
}
func makeAuthReject(session deviceSession) []byte {
payload := []byte{downRespAuthReject}
payload = append(payload, session.rDevEui...)
return payload
}
func (c *controller) handleAuth(ctx context.Context, nID int64, req *as.HandleProprietaryUplinkRequest,
targetgateway *gwV3.UplinkRXInfo) error {
logrus.Debug(" AUTH Message.")
//
rdeveui := make([]byte, 8)
copy(rdeveui[0:], req.MacPayload[1:])
sessionid := binary.BigEndian.Uint64(rdeveui)
logrus.Debugf(" sessionid=%X", sessionid)
//
ok, currentsession := c.devSessionList.searchDeviceSession(sessionid)
if !ok {
logrus.Debugf(" Auth message without active session. Frame dropped.")
return nil
}
// Drop if already sent to the same Gateway context
ok, currentsession = c.devSessionList.checkDeviceSession(sessionid, targetgateway.Context)
if !ok {
return nil
}
//
authpayload := make([]byte, 52)
copy(authpayload[:], req.MacPayload[9:])
authpayload = currentsession.encryptAuthPayload(authpayload, true)
privisionidhash := make([]byte, 32)
verifycode := make([]byte, 16)
copy(privisionidhash[:], authpayload[0:])
copy(verifycode[:], authpayload[32:])
copy(currentsession.devNonce[:], authpayload[48:])
logrus.Debugf(" rDevEui: %s", hex.EncodeToString(currentsession.rDevEui))
logrus.Debugf(" devNonce: %s", hex.EncodeToString(currentsession.devNonce))
logrus.Debugf(" privisionidhash: %s", hex.EncodeToString(privisionidhash))
logrus.Debugf(" verifycode: %s", hex.EncodeToString(verifycode))
authaccepted := true
found, deviceinfo := findDeviceBySnHash(ctx, privisionidhash, c.psCli)
if !found {
return errors.Errorf("Device %s not found.", hex.EncodeToString(privisionidhash))
} else if deviceinfo.Status == "DISABLED" {
logrus.Errorf("Device %s disabled.", deviceinfo.ProvisionID)
authaccepted = false
}
logrus.Debugf(" Device found. %s, mfgID=%d, server=%s", deviceinfo.ProvisionID, deviceinfo.ManufacturerID, deviceinfo.Server)
logrus.Debugf(" devEUI=%s, appEUI=%s, appKey=%s, nwkKey=%s",
hex.EncodeToString(deviceinfo.DevEUI), hex.EncodeToString(deviceinfo.AppEUI),
hex.EncodeToString(deviceinfo.AppKey), hex.EncodeToString(deviceinfo.NwkKey))
logrus.Debugf(" status=%v, model=%v, fixedDevEUI=%v, created=%v", deviceinfo.Status, deviceinfo.Model, deviceinfo.FixedDevEUI,
deviceinfo.TimeCreated)
if deviceinfo.Server != "" {
logrus.Errorf("Device %s registered to %v, provisioning not allowed.", deviceinfo.ProvisionID, deviceinfo.Server)
authaccepted = false
}
if authaccepted {
calverifycode := currentsession.calVerifyCode(deviceinfo.ProvisionID, true)
if !bytes.Equal(verifycode, calverifycode) {
return errors.Errorf("Incorrect verify code at Auth message")
}
currentsession, deviceinfo, err := c.updateDevice(ctx, currentsession, deviceinfo)
if err != nil {
return errors.Wrap(err, "updateDevice error")
}
c.devSessionList.updateDeviceSession(sessionid, currentsession)
err = saveDevice(ctx, deviceinfo, c.psCli)
if err != nil {
return errors.Wrap(err, "saveDevice error")
}
}
//
var mac lorawan.EUI64
copy(mac[:], targetgateway.GatewayId)
verifycode = currentsession.calVerifyCode(deviceinfo.ProvisionID, false)
payload := proprietaryPayload{
GatewayMAC: mac,
UplinkFreq: req.TxInfo.Frequency,
DownlinkFreq: 0,
UplinkBandwidth: req.TxInfo.GetLoraModulationInfo().GetBandwidth(),
UplinkSf: req.TxInfo.GetLoraModulationInfo().SpreadingFactor,
Delay: &duration.Duration{Seconds: 5, Nanos: 0},
Context: targetgateway.Context,
Mic: []byte{0x00, 0x00, 0x00, 0x00},
}
if authaccepted {
payload.MacPayload = makeAuthAccept(currentsession, verifycode)
} else {
payload.MacPayload = makeAuthReject(currentsession)
}
// logrus.Debugf("Tx MacPayload:\n%s", hex.Dump(payload.MacPayload))
err := c.sendProprietary(nID, payload)
if err != nil {
return err
}
return nil
}
// Device session handling
func (l *DeviceSessionList) searchDeviceSession(sessionid uint64) (bool, deviceSession) {
l.mutexSessionList.Lock()
currentsession, sessionfound := l.sessionlist[sessionid]
l.mutexSessionList.Unlock()
if !sessionfound {
return false, deviceSession{}
}
return true, currentsession
}
func (l *DeviceSessionList) updateDeviceSession(sessionid uint64, newsession deviceSession) {
l.mutexSessionList.Lock()
_, sessionfound := l.sessionlist[sessionid]
l.mutexSessionList.Unlock()
if sessionfound {
l.sessionlist[sessionid] = newsession
}
}
func (l *DeviceSessionList) checkDeviceSession(sessionid uint64, gwcontext []byte) (bool, deviceSession) {
l.mutexSessionList.Lock()
defer l.mutexSessionList.Unlock()
currentsession, sessionfound := l.sessionlist[sessionid]
if !sessionfound {
return false, deviceSession{}
}
if bytes.Equal(currentsession.lastGwContext, gwcontext) {
// Same gateway context already handled, retrun false to cause the frame being drop
return false, deviceSession{}
}
// Save gateway context
currentsession.lastGwContext = make([]byte, len(gwcontext))
copy(currentsession.lastGwContext[:], gwcontext)
l.sessionlist[sessionid] = currentsession
return true, currentsession
}
func (l *DeviceSessionList) createDeviceSession(sessionid uint64, rdeveui []byte, devicepublickey []byte) (bool, deviceSession) {
l.mutexSessionList.Lock()
defer l.mutexSessionList.Unlock()
if len(l.sessionlist) >= l.maxNumberOfDevSession {
logrus.Warnf("Maximum number (%d) of device provisioning session reached. Request dropped.", l.maxNumberOfDevSession)
return false, deviceSession{}
}
// New session
currentsession := makeDeviceSession(l.deviceSessionLifeCycle)
copy(currentsession.rDevEui[0:], rdeveui)
copy(currentsession.devicePublicKey[0:], devicepublickey)
err := currentsession.genServerKeys()
if err != nil {
return false, deviceSession{}
}
currentsession.genSharedKey()
currentsession.deriveKeys()
l.sessionlist[sessionid] = currentsession
return true, currentsession
}
func (l *DeviceSessionList) clearExpiredDevSession() {
l.mutexSessionList.Lock()
now := time.Now()
for key, session := range l.sessionlist {
if now.After(session.expireTime) {
delete(l.sessionlist, key)
}
}
l.mutexSessionList.Unlock()
}
func (l *DeviceSessionList) clearDeviceSessionList() {
l.mutexSessionList.Lock()
for k := range l.sessionlist {
delete(l.sessionlist, k)
}
l.mutexSessionList.Unlock()
}
func (c *controller) updateDevice(ctx context.Context, session deviceSession, deviceinfo deviceInfo) (deviceSession, deviceInfo, error) {
if isByteArrayAllZero(session.assignedDevEui) || !bytes.Equal(session.assignedDevEui, deviceinfo.DevEUI) {
// Session is new
deveui := make([]byte, 8)
appeui := make([]byte, 8)
copy(deveui[:], deviceinfo.DevEUI)
copy(appeui[:], deviceinfo.AppEUI)
if !deviceinfo.FixedDevEUI || isByteArrayAllZero(deveui) {
// Generate devEUI
resp, err := c.psCli.GenDevEUI(ctx, &psPb.GenDevEuiRequest{})
if err != nil {
return session, deviceinfo, err
}
copy(deveui[:], resp.DevEUI[:])
copy(deviceinfo.DevEUI[:], resp.DevEUI[:])
}
copy(session.assignedDevEui[:], deveui[:])
copy(session.assignedAppEui[:], appeui[:])
copy(deviceinfo.AppKey[:], session.appKey)
copy(deviceinfo.NwkKey[:], session.nwkKey)
copy(deviceinfo.AppEUI[:], appeui)
}
return session, deviceinfo, nil
}
//
func fillByteArray(input []byte, value uint8) {
for i := range input {
input[i] = value
}
}
func isByteArrayAllZero(input []byte) bool {
for i := range input {
if input[i] != 0 {
return false
}
}
return true
}
|
NBANDROIDTEAM/NBANDROID-V2 | nbandroid.gradle.spi/src/main/java/nbandroid/gradle/spi/GradleJvmConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package nbandroid.gradle.spi;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import org.netbeans.api.project.Project;
import org.netbeans.spi.project.AuxiliaryProperties;
/**
*
* @author arsi
*/
public class GradleJvmConfiguration {
private final Project project;
private final AuxiliaryProperties auxProps;
private static final String PREFERENCE_GRADLE_JVM_MIN_HEAP = "PREFERENCE_GRADLE_JVM_MIN_HEAP";
private static final String PREFERENCE_GRADLE_JVM_MAX_HEAP = "PREFERENCE_GRADLE_JVM_MAX_HEAP";
private static final String PREFERENCE_GRADLE_JVM_CUSTOM = "PREFERENCE_GRADLE_JVM_CUSTOM";
private String minHeap;
private String maxHeap;
private String custom;
public GradleJvmConfiguration(Project project) {
this.project = project;
auxProps = project.getLookup().lookup(AuxiliaryProperties.class);
minHeap = auxProps.get(PREFERENCE_GRADLE_JVM_MIN_HEAP, false);
maxHeap = auxProps.get(PREFERENCE_GRADLE_JVM_MAX_HEAP, false);
custom = auxProps.get(PREFERENCE_GRADLE_JVM_CUSTOM, false);
if (minHeap == null || minHeap.isEmpty()) {
minHeap = "800";
}
if (maxHeap == null || maxHeap.isEmpty()) {
maxHeap = "2000";
}
}
public String getMinHeap() {
return minHeap;
}
public void setMinHeap(String minHeap) {
this.minHeap = minHeap;
auxProps.put(PREFERENCE_GRADLE_JVM_MIN_HEAP, minHeap, false);
}
public String getMaxHeap() {
return maxHeap;
}
public int getMinHeapInt() {
try {
return Integer.parseInt(minHeap);
} catch (NumberFormatException numberFormatException) {
}
return 800;
}
public int getMaxHeapInt() {
try {
return Integer.parseInt(maxHeap);
} catch (NumberFormatException numberFormatException) {
}
return 800;
}
public void setMaxHeap(String maxHeap) {
this.maxHeap = maxHeap;
auxProps.put(PREFERENCE_GRADLE_JVM_MAX_HEAP, maxHeap, false);
}
public String getCustom() {
return custom;
}
public void setCustom(String custom) {
this.custom = custom;
auxProps.put(PREFERENCE_GRADLE_JVM_CUSTOM, custom, false);
}
public String[] getJvmArguments() {
List<String> arguments = new ArrayList<>();
arguments.add("-Xms" + minHeap + "M");
arguments.add("-Xmx" + maxHeap + "M");
if (custom != null && !custom.isEmpty()) {
String tmp = custom.replace("\n\r", ";").replace("\r\n", ";").replace("\r", ";").replace("\n", ";");
StringTokenizer tok = new StringTokenizer(tmp, ";", false);
while (tok.hasMoreElements()) {
arguments.add(tok.nextToken().trim());
}
}
return arguments.toArray(new String[arguments.size()]);
}
}
|
badfic/phil-bot-java | src/main/java/com/badfic/philbot/listeners/phil/swampy/TimeoutCommand.java | <reponame>badfic/phil-bot-java<gh_stars>1-10
package com.badfic.philbot.listeners.phil.swampy;
import static net.dv8tion.jda.api.Permission.MESSAGE_ATTACH_FILES;
import static net.dv8tion.jda.api.Permission.MESSAGE_EMBED_LINKS;
import static net.dv8tion.jda.api.Permission.MESSAGE_WRITE;
import com.badfic.philbot.config.Constants;
import com.badfic.philbot.data.phil.TimeoutCase;
import com.badfic.philbot.data.phil.TimeoutCaseRepository;
import com.jagrosh.jdautilities.command.CommandEvent;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import javax.annotation.Resource;
import net.dv8tion.jda.api.Permission;
import net.dv8tion.jda.api.entities.ChannelType;
import net.dv8tion.jda.api.entities.GuildChannel;
import net.dv8tion.jda.api.entities.Member;
import net.dv8tion.jda.api.entities.TextChannel;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Component;
@Component
public class TimeoutCommand extends BaseSwampy {
private static final int TIMEOUT_MINUTES = 7;
@Resource
private TimeoutCaseRepository timeoutCaseRepository;
public TimeoutCommand() {
name = "timeout";
requiredRole = Constants.ADMIN_ROLE;
help = """
put a user in timeout (temporary mute/ban/jail).
`!!timeout @user` puts a user in timeout for 5 minutes.
`!!timeout 30 @user` puts a user in timeout for 30 minutes.""";
}
@Override
protected void execute(CommandEvent event) {
TextChannel timeoutChannel = philJda.getTextChannelById(baseConfig.timeoutChannelId);
if (CollectionUtils.size(event.getMessage().getMentionedMembers()) != 1) {
event.replyError("Please mention one user to put in timeout");
return;
}
Member member = event.getMessage().getMentionedMembers().get(0);
if (member.getUser().isBot()) {
event.replyError("You can't put a bot in timeout");
return;
}
if (hasRole(member, Constants.ADMIN_ROLE)) {
event.replyError("You can't put a mod in timeout");
return;
}
if (timeoutCaseRepository.existsById(member.getIdLong())) {
event.replyError("That user is already in timeout");
return;
}
try {
timeoutCaseRepository.save(new TimeoutCase(member.getIdLong(), LocalDateTime.now().truncatedTo(ChronoUnit.MINUTES).plusMinutes(TIMEOUT_MINUTES)));
event.getGuild()
.getChannels()
.stream()
.filter(c -> c.getType() == ChannelType.TEXT || c.getType() == ChannelType.VOICE)
.forEach(channel -> assignOverridePermissions(channel, member));
if (timeoutChannel != null) {
timeoutChannel.sendMessage(member.getEffectiveName() + " has been put in timeout for " + TIMEOUT_MINUTES + " minutes").queue();
}
} catch (Exception e) {
honeybadgerReporter.reportError(e, null, "Failed to put user " + member.getAsMention() + " in timeout");
}
}
private void assignOverridePermissions(GuildChannel channel, Member member) {
if (baseConfig.timeoutChannelId.equals(channel.getId())) {
channel.upsertPermissionOverride(member).setAllow(MESSAGE_WRITE, MESSAGE_EMBED_LINKS, MESSAGE_ATTACH_FILES).queue();
} else {
channel.upsertPermissionOverride(member).deny(Permission.ALL_PERMISSIONS).queue();
}
}
}
|
mtunganati/oneops | oneops-admin/lib/chef/knife/model_sync.rb | require 'chef/knife/base_sync'
require 'chef/cookbook_loader'
class Chef
class Knife
class ModelSync < Chef::Knife::CookbookMetadata
include ::BaseSync
banner "Loads class and relation metadata into OneOps\nUsage: \n circuit model [OPTIONS] [COOKBOOKS...]"
option :all,
:short => "-a",
:long => "--all",
:description => "Sync metadata for all class cookbooks, rather than just a single cookbook"
option :register,
:short => "-r REGISTER",
:long => "--register REGISTER",
:description => "Specify the source register name to use during uploads"
option :version,
:short => "-v VERSION",
:long => "--version VERSION",
:description => "Specify the source register version to use during uploads"
option :classes,
:long => "--classes",
:description => "Sync metadata for classes only (by default is ON if not specified and --relations not specified)"
option :relations,
:short => "-r",
:long => "--relations",
:description => "Sync metadata for relations only (by default is OFF if not specified)"
option :cookbook_path,
:short => "-o PATH:PATH",
:long => "--cookbook-path PATH:PATH",
:description => "A colon-separated path to look for cookbooks in",
:proc => lambda {|o| o.split(":")}
def run
t1 = Time.now
ENV['CMS_TRACE'] = 'true' if config[:cms_trace]
config[:cookbook_path] ||= Chef::Config[:cookbook_path]
config[:register] ||= Chef::Config[:register]
config[:version] ||= (Chef::Config[:version] || '1.0.0')
if config[:all]
cookbooks = config[:cookbook_path].inject([]) do |a, path|
a + Chef::CookbookLoader.new(path).load_cookbooks.keys.sort
end
else
cookbooks = @name_args
end
if cookbooks.blank?
ui.error 'You must specify cookbook name(s) or use the --all option to sync all.'
exit(1)
end
sync_relations = config[:relations]
sync_classes = config[:classes] || sync_relations.nil?
models = []
models += sync_cookbooks(cookbooks, true, false) if sync_classes
models += sync_cookbooks(cookbooks, false, true) if sync_relations
if models.present?
ok, error = Cms::MdCache.reset
ui.warn("Failed to tigger metadata cache reset: #{error}") unless ok
else
ui.warn('Nothing to do - no matching metadata definitions found.')
end
t2 = Time.now
ui.info("\nProcessed #{cookbooks.size} cookbooks, resulting in #{models.size} models.\nDone at #{t2} in #{(t2 - t1).round(1)}sec")
end
private
def sync_cookbooks(cookbooks, sync_classes, sync_relations)
cookbooks.inject([]) do |a, cookbook|
config[:cookbook_path].inject(a) do |aa, path|
file = File.expand_path(File.join(path, cookbook, 'metadata.rb'))
File.exists?(file) ? (aa + sync_cookbook_metadata(cookbook, file, sync_classes, sync_relations)) : aa
end
end
end
def sync_cookbook_metadata(cookbook, file, sync_classes, sync_relations)
md = Chef::Cookbook::Metadata.new
md.name(cookbook.capitalize)
md.from_file(file)
return [] if md.groupings.blank? # Nothing to do - just a placeholder metadata file.
ui.info("\n--------------------------------------------------")
ui.info("#{" #{md.name} ".blue(true)} #{sync_classes ? 'classes' : 'relations'}")
ui.info('--------------------------------------------------')
models = []
if md.groupings['default'][:relation]
models = build_model_relations(md) if sync_relations
else
if sync_classes
models = build_model_classes(md)
sync_docs(md, file)
end
end
Log.debug(models.to_yaml) if Log.debug?
if models.present?
ok, error = (sync_classes ? Cms::CiMd : Cms::RelationMd).bulk(models)
if ok
ui.info('Successfully synched models'.green)
else
ui.error("Failed to save models: #{error}")
exit 1
end
else
ui.info("Nothing to do - no #{sync_classes ? 'class' : 'relation'} definitions found.")
end
return models
rescue Exceptions::ObsoleteDependencySyntax, Exceptions::InvalidVersionConstraint => e
ui.error "ERROR: The cookbook '#{cookbook}' contains invalid or obsolete metadata syntax in #{file}: #{e.message}"
exit 1
rescue Exception => e
ui.error "Failed to process cookbook #{cookbook}: #{e}"
exit 1
end
def build_model_classes(md)
ui.info('models:')
classes = []
# must sync the base class first
md.groupings.each do |group_name, group_properties|
group_properties[:packages].select {|v| v == 'base'}.each do |package_name|
classes << build_class(md, package_name, group_name, group_properties)
end
end
md.groupings.each do |group_name, group_properties|
group_properties[:packages].reject {|v| v == 'base'}.each do |package_name|
classes << build_class(md, package_name, group_name, group_properties)
end
end
return classes
end
def build_class(md, package, group, group_props)
short_name = build_md_name(md.name)
cms_class = Cms::CiMd.new
cms_class.className = "#{package}.#{short_name}"
cms_class.superClassName = "base.#{short_name}" unless package == 'base'
ui.info(" - #{cms_class.className}")
cms_class.impl = group_props[:impl] || Chef::Config[:default_impl]
cms_class.description = group_props[:description] || md.description
namespace = group_props[:namespace]
if namespace && !namespace.is_a?(TrueClass) && !namespace.is_a?(FalseClass)
ui.error 'You must specify boolean value type for namespace attribute.'
exit 1
end
cms_class.isNamespace = !!namespace
# Who and why hacked this in?
cloud_classes = %w(mgmt.cloud.service cloud.service service)
cms_class.useClassNameNS = true if cloud_classes.index {|s| cms_class.className.start_with?(s)} != nil
cms_class.accessLevel = group_props[:access] || 'global'
cms_class.fromRelations = []
cms_class.toRelations = []
cms_class.actions = []
cms_class.mdAttributes = md.attributes.inject([]) do |a, (name, properties)|
if properties[:grouping] == group || (package == 'base' && !properties[:grouping])
a << generate_class_attribute(name, properties)
end
a
end
cms_class.actions = md.recipes.inject([]) do |a, (recipe, properties)|
action = {:actionName => recipe, :description => properties['description']}
args = properties['args']
if args
args = args.to_json if args.is_a?(Hash)
JSON.parse(args) # check for valid json.
action[:arguments] = args
end
a << action
end
return cms_class
end
def build_model_relations(md)
ui.info('models:')
relations = []
# must sync the base relation first
md.groupings.each do |group_name, group_properties|
group_properties[:packages].select {|v| v == 'base'}.each do |package_name|
relations << build_relation(md, package_name, group_name, group_properties)
end
end
md.groupings.each do |group_name, group_properties|
group_properties[:packages].reject {|v| v == 'base'}.each do |package_name|
relations << build_relation(md, package_name, group_name, group_properties)
end
end
return relations
end
def build_relation(md, package, group, group_props)
short_name = build_md_name(md.name)
cms_relation = Cms::RelationMd.new
cms_relation.relationName = "#{package}.#{short_name}"
cms_relation.description = group_props[:description] || md.description
cms_relation.mdAttributes = Array.new
cms_relation.targets = Array.new
ui.info("#{cms_relation.relationName}")
md.attributes.each do |name, properties|
if properties[:relation_target]
if properties[:package] && properties[:package] == package
cms_relation.targets.push(generate_target(name, properties))
end
else
if !properties[:grouping] || properties[:grouping] == group
attribute = generate_relation_attribute(name, properties)
cms_relation.mdAttributes.push(attribute)
end
end
end
return cms_relation
end
def generate_class_attribute(name, properties)
attribute = generate_attribute(name, properties)
attribute.isInheritable = properties['inherit'] == 'no' ? false : true
attribute.isEncrypted = properties['encrypted'] || false
attribute.isImmutable = properties['immutable'] || false
attribute
end
def generate_relation_attribute(name, properties)
generate_attribute(name, properties)
end
def generate_attribute(name, properties)
attribute = Cms::AttrMd.new
attribute.attributeName = name
attribute.description = properties['description']
attribute.isMandatory = properties['required'] == 'required'
attribute.defaultValue = properties['default'] || ''
attribute.valueFormat = properties['format'].is_a?(Hash) ? properties['format'].to_json : properties['format']
attribute.dataType = properties['data_type']
unless properties['data_type']
case properties['type']
when 'array'
attribute.dataType = 'enum'
when 'hash'
attribute.dataType = 'object'
else
attribute.dataType = 'string'
end
end
return attribute
end
def generate_target(name, properties)
target = Cms::TargetMd.new
target.fromClassName = properties[:from_class]
target.toClassName = properties[:to_class]
target.linkType = properties[:link_type]
target.isStrong = (properties['required'] == 'required')
target.description = name
return target
end
def sync_docs(md, md_file)
return unless sync_docs?
doc_dir = md_file.gsub(/metadata\.rb$/, 'doc')
files = Dir.glob("#{doc_dir}/**/*")
if files.present?
ui.info('docs and images:')
files.each {|file| sync_doc_file(file, file.gsub(doc_dir, build_md_name(md.name)))}
end
end
def build_md_name(name)
suffix = Chef::Config[:admin] ? '' : "#{config[:register]}.#{"#{config[:version].split('.').first}." if Chef::Config[:useversion]}"
"#{suffix}#{name[0].upcase}#{name[1..-1]}"
end
end
end
end
|
legioner9/Node_Way_source_2 | Store/myNpm/st_doc1/js_DOCS/Function/Docs/Bind/3-callback-named_bind_to_contract.js | 'use strict';
const fs = require ( 'fs' );
const path = require ( 'path' );
const print = ( fileName, day, err, data ) => {
console.log ( { day } );
console.log ( { fileName } );
console.log ( { lines: data.split ( '\n' ).length } );
};
const fileName = path.join ( __dirname, '1-callback.js' );
const day = ( new Date () ).getDay();
const callback = print.bind ( null, fileName, day );
fs.readFile ( fileName, 'utf8', callback );
|
Neusoft-Technology-Solutions/aws-sdk-cpp | aws-cpp-sdk-config/include/aws/config/model/EvaluationResultQualifier.h | <filename>aws-cpp-sdk-config/include/aws/config/model/EvaluationResultQualifier.h
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/config/ConfigService_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace ConfigService
{
namespace Model
{
/**
* <p>Identifies an AWS Config rule that evaluated an AWS resource, and provides
* the type and ID of the resource that the rule evaluated.</p><p><h3>See
* Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/config-2014-11-12/EvaluationResultQualifier">AWS
* API Reference</a></p>
*/
class AWS_CONFIGSERVICE_API EvaluationResultQualifier
{
public:
EvaluationResultQualifier();
EvaluationResultQualifier(Aws::Utils::Json::JsonView jsonValue);
EvaluationResultQualifier& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline const Aws::String& GetConfigRuleName() const{ return m_configRuleName; }
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline bool ConfigRuleNameHasBeenSet() const { return m_configRuleNameHasBeenSet; }
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline void SetConfigRuleName(const Aws::String& value) { m_configRuleNameHasBeenSet = true; m_configRuleName = value; }
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline void SetConfigRuleName(Aws::String&& value) { m_configRuleNameHasBeenSet = true; m_configRuleName = std::move(value); }
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline void SetConfigRuleName(const char* value) { m_configRuleNameHasBeenSet = true; m_configRuleName.assign(value); }
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline EvaluationResultQualifier& WithConfigRuleName(const Aws::String& value) { SetConfigRuleName(value); return *this;}
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline EvaluationResultQualifier& WithConfigRuleName(Aws::String&& value) { SetConfigRuleName(std::move(value)); return *this;}
/**
* <p>The name of the AWS Config rule that was used in the evaluation.</p>
*/
inline EvaluationResultQualifier& WithConfigRuleName(const char* value) { SetConfigRuleName(value); return *this;}
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline const Aws::String& GetResourceType() const{ return m_resourceType; }
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline bool ResourceTypeHasBeenSet() const { return m_resourceTypeHasBeenSet; }
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline void SetResourceType(const Aws::String& value) { m_resourceTypeHasBeenSet = true; m_resourceType = value; }
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline void SetResourceType(Aws::String&& value) { m_resourceTypeHasBeenSet = true; m_resourceType = std::move(value); }
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline void SetResourceType(const char* value) { m_resourceTypeHasBeenSet = true; m_resourceType.assign(value); }
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline EvaluationResultQualifier& WithResourceType(const Aws::String& value) { SetResourceType(value); return *this;}
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline EvaluationResultQualifier& WithResourceType(Aws::String&& value) { SetResourceType(std::move(value)); return *this;}
/**
* <p>The type of AWS resource that was evaluated.</p>
*/
inline EvaluationResultQualifier& WithResourceType(const char* value) { SetResourceType(value); return *this;}
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline const Aws::String& GetResourceId() const{ return m_resourceId; }
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline bool ResourceIdHasBeenSet() const { return m_resourceIdHasBeenSet; }
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline void SetResourceId(const Aws::String& value) { m_resourceIdHasBeenSet = true; m_resourceId = value; }
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline void SetResourceId(Aws::String&& value) { m_resourceIdHasBeenSet = true; m_resourceId = std::move(value); }
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline void SetResourceId(const char* value) { m_resourceIdHasBeenSet = true; m_resourceId.assign(value); }
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline EvaluationResultQualifier& WithResourceId(const Aws::String& value) { SetResourceId(value); return *this;}
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline EvaluationResultQualifier& WithResourceId(Aws::String&& value) { SetResourceId(std::move(value)); return *this;}
/**
* <p>The ID of the evaluated AWS resource.</p>
*/
inline EvaluationResultQualifier& WithResourceId(const char* value) { SetResourceId(value); return *this;}
private:
Aws::String m_configRuleName;
bool m_configRuleNameHasBeenSet;
Aws::String m_resourceType;
bool m_resourceTypeHasBeenSet;
Aws::String m_resourceId;
bool m_resourceIdHasBeenSet;
};
} // namespace Model
} // namespace ConfigService
} // namespace Aws
|
mowangdk/huskar | huskar_api/models/manifest.py | <gh_stars>10-100
from __future__ import absolute_import
from huskar_sdk_v2.utils import combine
from huskar_sdk_v2.consts import BASE_PATH
from huskar_api.models import huskar_client
from huskar_api.models.znode import ZnodeList
__all__ = ['application_manifest']
class ApplicationManifest(object):
"""The manifest of all applications in ZooKeeper.
This model serves the minimal mode of Huskar API. Once the database falls
in a system outage, the API will provide application list here instead.
"""
def __init__(self, huskar_client):
self._lists = [
ZnodeList(huskar_client.client, combine(BASE_PATH, 'service')),
ZnodeList(huskar_client.client, combine(BASE_PATH, 'switch')),
ZnodeList(huskar_client.client, combine(BASE_PATH, 'config')),
]
def start(self):
for l in self._lists:
l.start()
def check_is_application(self, name):
return any(name in l.children for l in self._lists)
def as_list(self):
return sorted({c for l in self._lists for c in l.children})
application_manifest = ApplicationManifest(huskar_client)
application_manifest.start()
|
mramshaw/alexa-skills-kit-java | src/com/amazon/speech/speechlet/interfaces/core/directive/HintDirective.java | /*
Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
except in compliance with the License. A copy of the License is located at
http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
the specific language governing permissions and limitations under the License.
*/
package com.amazon.speech.speechlet.interfaces.core.directive;
import com.amazon.speech.speechlet.Directive;
import com.amazon.speech.speechlet.interfaces.core.Hint;
import com.fasterxml.jackson.annotation.JsonTypeName;
/**
* The hint directive renders a hint to customer when there is an appropriate rendering mechanism.
*/
@JsonTypeName("Hint")
public class HintDirective extends Directive {
private Hint hint;
/**
* Returns the hint.
*
* @return the hint
*/
public Hint getHint() {
return hint;
}
/**
* Sets the hint to be rendered.
*
* @param hint
* the hint to render
*/
public void setHint(Hint hint) {
this.hint = hint;
}
}
|
DangHT/workflow | scheduler/src/main/java/me/danght/workflow/scheduler/element/Node.java | package me.danght.workflow.scheduler.element;
import io.quarkus.redis.client.RedisClient;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import me.danght.workflow.common.api.schduler.ProcessInstanceService;
import me.danght.workflow.scheduler.dao.TaskInstanceRepository;
import me.danght.workflow.scheduler.dao.TokenRepository;
import me.danght.workflow.scheduler.dataobject.Token;
import me.danght.workflow.scheduler.service.ActivityInstanceService;
import me.danght.workflow.scheduler.service.ProcessParamsRecordService;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
@EqualsAndHashCode(callSuper = true)
@Data
@Accessors(chain = true)
public class Node extends BaseElement implements Serializable {
public List<SequenceFlow> incomingFlows = new ArrayList<SequenceFlow>();
public List<SequenceFlow> outgoingFlows = new ArrayList<SequenceFlow>();
public void leave(Token token,
ProcessParamsRecordService processParamsRecordService,
TokenRepository tokenRepository,
TaskInstanceRepository taskInstanceRepository,
ActivityInstanceService activityInstanceService,
ProcessInstanceService processInstanceService,
RedisClient redisClient){
leave(token,getDefaultOutgoing(), processParamsRecordService, tokenRepository, taskInstanceRepository, activityInstanceService, processInstanceService, redisClient);
}
public void leave(Token token,
SequenceFlow sequenceFlow,
ProcessParamsRecordService processParamsRecordService,
TokenRepository tokenRepository,
TaskInstanceRepository taskInstanceRepository,
ActivityInstanceService activityInstanceService,
ProcessInstanceService processInstanceService,
RedisClient redisClient){
token.setCurrentNode(this);
token.setElementNo(no);
sequenceFlow.take(token, processParamsRecordService, tokenRepository, taskInstanceRepository, activityInstanceService, processInstanceService, redisClient);
}
public SequenceFlow getDefaultOutgoing(){
//TODO 先不做判空,但是结束节点怎么解决还没考虑
if(outgoingFlows == null || outgoingFlows.size() == 0)
return null;
return outgoingFlows.get(0);
}
public void enter(Token token,
ProcessParamsRecordService processParamsRecordService,
TokenRepository tokenRepository,
TaskInstanceRepository taskInstanceRepository,
ActivityInstanceService activityInstanceService,
ProcessInstanceService processInstanceService,
RedisClient redisClient){
token.setCurrentNode(this);
token.setElementNo(no);
execute(token,
processParamsRecordService,
tokenRepository,
taskInstanceRepository,
activityInstanceService,
processInstanceService,
redisClient);
}
public void execute(Token token,
ProcessParamsRecordService processParamsRecordService,
TokenRepository tokenRepository,
TaskInstanceRepository taskInstanceRepository,
ActivityInstanceService activityInstanceService,
ProcessInstanceService processInstanceService,
RedisClient redisClient){
//这块由子类重写,所以父类方法为空
}
}
|
lxb1226/leetcode_cpp | src/question88.cpp | <filename>src/question88.cpp
#include <iostream>
#include <vector>
#include <algorithm>
using namespace std;
class Solution {
public:
void merge(vector<int>& nums1, int m, vector<int>& nums2, int n) {
int i = m - 1, j = n - 1;
int len = m + n;
while(i >= 0 && j >= 0){
if(nums1[i] >= nums2[j]){
nums1[--len] = nums1[i--];
}else{
nums1[--len] = nums2[j--];
}
}
while(j >= 0) nums1[--len] = nums2[j--];
}
};
int main(){
// vector<int> nums1{1, 2, 3, 0, 0, 0};
// vector<int> nums2{2, 5, 6};
// int m = 3, n = 3;
// vector<int> nums1{1};
// vector<int> nums2;
// int m = 1, n = 0;
vector<int> nums1{0};
vector<int> nums2{1};
int m = 0, n = 1;
Solution solution;
solution.merge(nums1, m,nums2, n);
auto print = [](const int& n){ std::cout << " " << n;};
for_each(nums1.cbegin(), nums1.cend(), print);
cout << endl;
return 0;
} |
richardqiu/pyjanitor | janitor/xarray/functions.py | <reponame>richardqiu/pyjanitor<gh_stars>1-10
"""
Functions to augment XArray DataArrays and Datasets with additional
functionality.
"""
from typing import Union
import numpy as np
import xarray as xr
from pandas_flavor import (
register_xarray_dataarray_method,
register_xarray_dataset_method,
)
@register_xarray_dataarray_method
def clone_using(
da: xr.DataArray,
np_arr: np.array,
use_coords: bool = True,
use_attrs: bool = False,
new_name: str = None,
) -> xr.DataArray:
"""
Given a NumPy array, return an XArray ``DataArray`` which contains the same
dimension names and (optionally) coordinates and other properties as the
supplied ``DataArray``.
This is similar to ``xr.DataArray.copy()`` with more specificity for
the type of cloning you would like to perform - the different properties
that you desire to mirror in the new ``DataArray``.
If the coordinates from the source ``DataArray`` are not desired, the shape
of the source and new NumPy arrays don't need to match.
The number of dimensions do, however.
Usage example - making a new ``DataArray`` from a previous one, keeping the
dimension names but dropping the coordinates (the input NumPy array is of a
different size):
.. code-block:: python
da = xr.DataArray(
np.zeros((512, 512)), dims=['ax_1', 'ax_2'],
coords=dict(ax_1=np.linspace(0, 1, 512),
ax_2=np.logspace(-2, 2, 1024)),
name='original'
)
new_da = da.clone_using(np.ones((4, 6)), new_name='new_and_improved',
use_coords=False)
:param da: The ``DataArray`` supplied by the method itself.
:param np_arr: The NumPy array which will be wrapped in a new ``DataArray``
given the properties copied over from the source ``DataArray``.
:param use_coords: If ``True``, use the coordinates of the source
``DataArray`` for the coordinates of the newly-generated array. Shapes
must match in this case. If ``False``, only the number of dimensions
must match.
:param use_attrs: If ``True``, copy over the ``attrs`` from the source
``DataArray``.
The data inside ``attrs`` itself is not copied, only the mapping.
Otherwise, use the supplied attrs.
:param new_name: If set, use as the new name of the returned ``DataArray``.
Otherwise, use the name of ``da``.
:return: A ``DataArray`` styled like the input ``DataArray`` containing the
NumPy array data.
:raises ValueError: if number of dimensions in ``NumPy`` array and
``DataArray`` do not match.
:raises ValueError: if shape of ``NumPy`` array and ``DataArray``
do not match.
"""
if np_arr.ndim != da.ndim:
raise ValueError(
"Number of dims in the NumPy array and the DataArray "
"must match."
)
if use_coords and not all(
np_ax_len == da_ax_len
for np_ax_len, da_ax_len in zip(np_arr.shape, da.shape)
):
raise ValueError(
"Input NumPy array and DataArray must have the same "
"shape if copying over coordinates."
)
return xr.DataArray(
np_arr,
dims=da.dims,
coords=da.coords if use_coords else None,
attrs=da.attrs.copy() if use_attrs else None,
name=new_name if new_name is not None else da.name,
)
@register_xarray_dataset_method
@register_xarray_dataarray_method
def convert_datetime_to_number(
da_or_ds: Union[xr.DataArray, xr.Dataset],
time_units: str,
dim: str = "time",
):
"""
Convert the coordinates of a datetime axis to a human-readable float
representation.
Usage example to convert a ``DataArray``'s time dimension coordinates from
a ``datetime`` to minutes:
.. code-block:: python
timepoints = 60
da = xr.DataArray(
np.random.randint(0, 10, size=timepoints),
dims='time',
coords=dict(time=np.arange(timepoints) * np.timedelta64(1, 's'))
)
da_minutes = da.convert_datetime_to_number('m', dim='time)
:param da_or_ds: XArray object.
:param time_units: Numpy timedelta string specification for the unit you
would like to convert the coordinates to.
:param dim: the time dimension whose coordinates are datetime objects.
:return: The original XArray object with the time dimension reassigned.
"""
times = da_or_ds.coords[dim].data / np.timedelta64(1, time_units)
return da_or_ds.assign_coords({dim: times})
|
leftjs/qs-manager | src/reducers/index.js | <filename>src/reducers/index.js<gh_stars>0
/* Combine all available reducers to a single root reducer.
*
* CAUTION: When using the generators, this file is modified in some places.
* This is done via AST traversal - Some of your formatting may be lost
* in the process - no functionality should be broken though.
* This modifications only run once when the generator is invoked - if
* you edit them, they are not updated again.
*/
import { combineReducers } from 'redux';
import { routerReducer } from 'react-router-redux'
import * as types from '../actions/const'
import { loadingBarReducer } from 'react-redux-loading-bar'
import * as agent from './agent'
import * as good from './good'
import * as sale from './sale'
import * as admin from './admin'
/* Populated by react-webpack-redux:reducer */
const reducers = {
hello: (state = {}, action) => {
switch (action.type) {
case `${types.SAY_HELLO}_PENDING`:
return {
...state
}
default:
return state
}
}
};
export default combineReducers({
...reducers,
...agent,
...good,
...sale,
...admin,
routing: routerReducer,
loadingBar: loadingBarReducer
});
|
aakoshh/metronome | metronome/checkpointing/models/src/io/iohk/metronome/checkpointing/models/Block.scala | package io.iohk.metronome.checkpointing.models
import io.iohk.metronome.checkpointing.models.Transaction.ProposerBlock
import scodec.bits.ByteVector
/** Represents what the HotStuff paper called "nodes" as the "tree",
* with the transactions in the body being the "commands".
*
* The block contents are specific to the checkpointing application.
*
* The header and body are separated because headers have to part
* of the Checkpoint Certificate; there's no need to repeat all
* the transactions there, the Merkle root will make it possible
* to prove that a given CheckpointCandidate transaction was
* indeed part of the block. The headers are needed for parent-child
* validation in the certificate as well.
*/
sealed abstract case class Block private (
header: Block.Header,
body: Block.Body
) {
def hash: Block.Header.Hash = header.hash
}
object Block {
type Hash = Block.Header.Hash
/** Create a from a header and body we received from the network.
*
* It will need to be validated before it can be used, to make sure
* the header really belongs to the body.
*/
def makeUnsafe(header: Header, body: Body): Block =
new Block(header, body) {}
/** Smart constructor for a block, setting the correct hashes in the header. */
def make(
parent: Block.Header,
postStateHash: Ledger.Hash,
body: Block.Body
): Block = {
val header = Header(
parentHash = parent.hash,
height = parent.height + 1,
postStateHash = postStateHash,
contentMerkleRoot = Body.contentMerkleRoot(body)
)
makeUnsafe(header, body)
}
/** Check that the block hashes are valid. */
def isValid(block: Block): Boolean =
block.header.contentMerkleRoot == Body.contentMerkleRoot(block.body)
/** The first, empty block. */
val genesis: Block = {
val body = Body(Vector.empty)
val header = Header(
parentHash = Block.Header.Hash(ByteVector.empty),
height = 0,
postStateHash = Ledger.empty.hash,
contentMerkleRoot = MerkleTree.empty.hash
)
makeUnsafe(header, body)
}
case class Header(
parentHash: Header.Hash,
height: Long,
// Hash of the Ledger after executing the block.
postStateHash: Ledger.Hash,
// Merkle root of the transactions in the body.
contentMerkleRoot: MerkleTree.Hash
) extends RLPHash[Header, Header.Hash]
object Header extends RLPHashCompanion[Header]()(RLPCodecs.rlpBlockHeader)
case class Body(
transactions: IndexedSeq[Transaction]
) extends RLPHash[Body, Body.Hash] {
def proposerBlocks: IndexedSeq[Transaction.ProposerBlock] =
transactions.collect { case pb: ProposerBlock => pb }
}
object Body extends RLPHashCompanion[Body]()(RLPCodecs.rlpBlockBody) {
val empty = Body(Vector.empty)
def contentMerkleRoot(body: Body): MerkleTree.Hash =
MerkleTree
.build(body.transactions.map(tx => MerkleTree.Hash(tx.hash)))
.hash
}
}
|
AndrewTimokhin/fizteh-java-2014 | src/ru/fizteh/fivt/students/IvanShafran/shell/commands/Command.java | <reponame>AndrewTimokhin/fizteh-java-2014<filename>src/ru/fizteh/fivt/students/IvanShafran/shell/commands/Command.java<gh_stars>1-10
package ru.fizteh.fivt.students.IvanShafran.shell.commands;
import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList;
public abstract class Command {
public abstract void execute(ArrayList<String> args) throws Exception;
public void checkArgs(ArrayList<String> args) throws Exception {
}
public static String getAbsolutePath(String workingDirectory, String path,
boolean checkExisting) throws Exception {
String resultPath;
if (!Paths.get(path).isAbsolute()) {
resultPath = Paths.get(workingDirectory, path).toString();
} else {
resultPath = Paths.get(path).toString();
}
File resultFile = new File(resultPath);
if (checkExisting && !resultFile.exists()) {
throw new Exception(path + ": No such file or directory");
}
return resultPath;
}
public static String getAbsolutePath(String workingDirectory, String path) throws Exception {
return getAbsolutePath(workingDirectory, path, true);
}
}
|
domenic/test262 | external/contributions/Google/sputniktests/tests/Conformance/11_Expressions/11.13_Assignment_Operators/11.13.2_Compound_Assignment/S11.13.2_A3.2_T7.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* @name: S11.13.2_A3.2_T7;
* @section: 11.13.2;
* @assertion: Operator x @= y returns x @ y;
* @description: Checking Expression and Variable statements for x >>= y;
*/
//CHECK#1
var x = 4;
var x1 = (x >>= 1);
if (x1 !== 2) {
$ERROR('#1: var x = 4; var x1 = (x >>= 1); x1 === 2. Actual: ' + (x1));
}
//CHECK#2
y = 4;
y1 = (y >>= 1);
if (y1 !== 2) {
$ERROR('#2: y = 4; y1 = (y >>= 1); y1 === 2. Actual: ' + (y1));
}
|
comake/yip-yip | src/components/searchbar/search_input.js | import React from 'react';
import { YIPYIP_INPUT_ID } from '../../constants.js';
const SearchInput = (props) => {
const { searchText, updateSearchText, inputRef, onBlur } = props;
const onSearchTextChange = React.useCallback(event => updateSearchText(event.target.value), [updateSearchText])
return (
<div id={'yipyip-input-container'}>
<input
ref={inputRef}
id={YIPYIP_INPUT_ID}
type='text'
placeholder={'YipYip!'}
value={searchText}
onChange={onSearchTextChange}
autoComplete={'off'}
name={'yipyip-search'}
list="autocompleteOff"
data-lpignore='true'
onBlur={onBlur}
/>
</div>
)
}
export default SearchInput;
|
Starcounter/rap | frameheader.go | // Copyright 2018 <NAME>. All rights reserved.
// Use of this source code is governed by the MIT license, see the LICENSE file.
// A frame header consists of four bytes. First byte is the flow-control bit
// and high seven bits of the size data. Second byte is low eight bits of the
// size data. Third byte is high eight bits of Conn ID, and fourth byte
// is the low eight bits of the Conn ID.
//
// If the flow-control bit is set, the frame has no actual payload, but is
// simply acknowledging receipt of the given number of bytes.
//
// Flow control is implemented by allowing up to FrameMaxSize bytes in
// transit per Conn. If the limit would be exceeded, block on sending.
// Before sending, increment the counter with the frame size. On receipt of a
// control flow frame, decrement the counter with the indicated number of
// bytes.
package rap
import "fmt"
/*
FrameHeader is 32 bits, divided into a 16-bit Size value, a 3-bit
control field and a 13-bit Conn ID. If the ID is 0x1fff (highest
possible), the frame is a *Muxer* control frame and the control field
is a 3-bit MSB value specifying the frame type:
* 000 - Panic, sender is shutting down due to error, Size is bytes of optional technical information
* 001 - reserved, but expect Size to reflect payload size
* 010 - Ping, Size is bytes of payload data to return in a Pong
* 011 - Pong, Size is bytes of payload data as received in the Ping
* 100 - reserved, ignore the Size value
* 101 - reserved, ignore the Size value
* 110 - reserved, ignore the Size value
* 111 - reserved, ignore the Size value
If Index is 0..0x1ffe (inclusive), the frame applies to that Conn, and
the control field is mapped to three flags: Flow, Body and Head. The following
table lists the valid flag combinations and their meaning:
* 000 - *reserved*, expect Size to reflect payload size
* 001 - the data bytes starts with a RAP *record*, without any body bytes
* 010 - data bytes form body data, no RAP *record* present
* 011 - data bytes starts with a RAP *record*, remaining bytes form body data
* 100 - flow control acknowledging the receipt of a data frame
* 101 - *reserved*, ignore the Size value
* 110 - final frame, requesting a ack in the form of a (Flow|Body)
* 111 - final frame, sent in response to a (Flow|Head), no response may be sent
*/
type FrameHeader []byte
// FrameFlag enumerates the flags used in the frame control bits.
type FrameFlag byte
const (
// FrameFlagHead indicates the presence of a frame head record at the start
// of the frame payload when the FLow flag is not set.
FrameFlagHead FrameFlag = 0x20
// FrameFlagBody indicates the presence of body data in the frame payload
// when the FLow flag is not set If the frame payload also has a Head record,
// the body data starts after it.
FrameFlagBody FrameFlag = 0x40
// FrameFlagFlow signals the final frame for a Conn.
FrameFlagFlow FrameFlag = 0x80
// FrameFlagMask is a byte mask of the bits used in the third header byte.
FrameFlagMask = byte(FrameFlagFlow | FrameFlagBody | FrameFlagHead)
)
// MuxerControl enumerates the different types of Muxer control frames.
type MuxerControl byte
const (
// MuxerControlPanic means sender is shutting down due to error,
// Size is bytes of optional technical information. Abort all active requests
// and log the technical information, if available.
MuxerControlPanic MuxerControl = MuxerControl(0)
// Unused but reserved for future use, Size contains payload size.
muxerControlReserved001 MuxerControl = MuxerControl(FrameFlagHead)
// MuxerControlPing requests a Pong in response with the same payload
// as this Ping message. Note that the other side may choose to
// not respond to all Pings.
MuxerControlPing MuxerControl = MuxerControl(FrameFlagBody)
// MuxerControlPong is in response to a Ping. The Size value must be the
// same as the Size value for last received Ping.
MuxerControlPong MuxerControl = MuxerControl(FrameFlagBody | FrameFlagHead)
// Unused but reserved for future use, ignore Size value
muxerControlReserved100 MuxerControl = MuxerControl(FrameFlagFlow)
// Unused but reserved for future use, ignore Size value
muxerControlReserved101 MuxerControl = MuxerControl(FrameFlagFlow | FrameFlagHead)
// Unused but reserved for future use, ignore Size value
muxerControlReserved110 MuxerControl = MuxerControl(FrameFlagFlow | FrameFlagBody)
// Unused but reserved for future use, ignore Size value
muxerControlReserved111 MuxerControl = MuxerControl(FrameFlagFlow | FrameFlagBody | FrameFlagHead)
)
var muxerControlTexts = map[MuxerControl]string{
MuxerControlPanic: "Panic",
muxerControlReserved001: "Rsvd001",
MuxerControlPing: "Ping",
MuxerControlPong: "Pong",
muxerControlReserved100: "Rsvd100",
muxerControlReserved101: "Rsvd101",
muxerControlReserved110: "Rsvd110",
muxerControlReserved111: "Rsvd111",
}
var muxerFlagTexts = map[FrameFlag]string{
(0): "...",
(FrameFlagHead): "..H",
(FrameFlagBody): ".B.",
(FrameFlagBody | FrameFlagHead): ".BH",
(FrameFlagFlow): "F..",
(FrameFlagFlow | FrameFlagHead): "F.H",
(FrameFlagFlow | FrameFlagBody): "FB.",
(FrameFlagFlow | FrameFlagBody | FrameFlagHead): "FBH",
}
func (fh FrameHeader) String() string {
var midText string
if fh.IsMuxerControl() {
midText = muxerControlTexts[fh.MuxerControl()]
} else {
midText = muxerFlagTexts[fh.FrameControl()]
}
return fmt.Sprintf("[FrameHeader %s %s %d (%d)]", fh.ConnID(), midText, fh.SizeValue(), len(fh))
}
// returns the 16-bit value stored in bytes 0 and 1
func (fh FrameHeader) getLargeValue() uint16 {
return uint16(fh[0])<<8 | uint16(fh[1])
}
// sets the 16-bit value stored in bytes 0 and 1
func (fh FrameHeader) setLargeValue(n uint16) {
fh[0] = byte(n >> 8)
fh[1] = byte(n)
}
// gets the 13-bit value stored in LSB of bytes 2 and 3
func (fh FrameHeader) getSmallValue() uint16 {
return uint16(fh[2]&(^FrameFlagMask))<<8 | uint16(fh[3])
}
// sets the 13-bit value stored in LSB of bytes 2 and 3
func (fh FrameHeader) setSmallValue(n uint16) {
fh[2] = (fh[2] & FrameFlagMask) | byte(n>>8)
fh[3] = byte(n)
}
// SizeValue returns the Size value of the frame.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) SizeValue() int {
return int(fh.getLargeValue())
}
// SetSizeValue sets the Size value of the header.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) SetSizeValue(n int) {
fh.setLargeValue(uint16(n))
}
// ConnID returns the Conn ID of the frame.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) ConnID() ConnID {
return ConnID(fh.getSmallValue())
}
// SetConnID sets the Conn ID.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) SetConnID(connID ConnID) {
if connID > MuxerConnID {
panic("SetConnID(): connID > MuxerConnID")
}
fh.setSmallValue(uint16(connID))
}
// HasPayload returns true if the Size value is used for payload size,
// and either the Body or Head bit is set.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) HasPayload() bool {
return !fh.HasFlow() && fh.HasBodyOrHead()
}
// IsAck returns true if the frame is acknowledging a sent frame.
func (fh FrameHeader) IsAck() bool {
return FrameFlag(fh[2]&FrameFlagMask) == FrameFlagFlow
}
// IsFinal returns true if the frame is a final frame (either type).
func (fh FrameHeader) IsFinal() bool {
return FrameFlag(fh[2]&byte(FrameFlagFlow|FrameFlagBody)) == FrameFlagFlow|FrameFlagBody
}
// IsFinalAck returns true if the frame is a final frame acknowledgement,
// and no response is to be sent.
func (fh FrameHeader) IsFinalAck() bool {
return fh[2]&byte(FrameFlagMask) == FrameFlagMask
}
// PayloadSize returns the number of payload bytes for a frame.
// If you have ensured HasPayload() returns true, use SizeValue() directly.
// This is valid for both MuxerControl frames and data frames.
func (fh FrameHeader) PayloadSize() (n int) {
if fh.HasPayload() {
n = int(fh.SizeValue())
}
return
}
// IsMuxerControl returns true if the Conn ID indicates this is a Muxer control frame.
func (fh FrameHeader) IsMuxerControl() bool {
return fh.ConnID() == MuxerConnID
}
// MuxerControl returns the frame control bits as a MuxerControl value.
// Only valid for Muxer control frames where IsMuxerControl() returns true.
func (fh FrameHeader) MuxerControl() MuxerControl {
return MuxerControl(fh[2] & FrameFlagMask)
}
// FrameControl returns the frame control bits as a FrameControl bitmask.
// Only valid for Muxer control frames where IsMuxerControl() returns false.
func (fh FrameHeader) FrameControl() FrameFlag {
return FrameFlag(fh[2] & FrameFlagMask)
}
// SetMuxerControl sets the frame header to a Muxer control frame.
// This sets the control bits and also sets the Conn ID to MuxerConnID.
func (fh FrameHeader) SetMuxerControl(sc MuxerControl) {
fh[2] = (fh[2] & (^FrameFlagMask)) | byte(sc)
fh.SetConnID(MuxerConnID)
}
// HasFlow returns true if the Flow bit is set in the frame header.
// Only valid for data frames where IsMuxerControl() returns false.
func (fh FrameHeader) HasFlow() bool {
return FrameFlag(fh[2])&FrameFlagFlow == FrameFlagFlow
}
// SetFlow sets the Flow bit in the header.
// Only valid for data frames where IsMuxerControl() returns false.
func (fh FrameHeader) SetFlow() {
fh[2] |= byte(FrameFlagFlow)
}
// HasBodyOrHead returns true if either the Body or Head bits are set.
func (fh FrameHeader) HasBodyOrHead() bool {
return FrameFlag(fh[2])&(FrameFlagBody|FrameFlagHead) != 0
}
// HasHead returns true if the Head bit is set in the frame header.
// Only valid for data frames where IsMuxerControl() returns false.
func (fh FrameHeader) HasHead() bool {
return FrameFlag(fh[2])&FrameFlagHead == FrameFlagHead
}
// SetHead sets the Head bit in the header. Only valid for data frames.
func (fh FrameHeader) SetHead() {
fh[2] |= byte(FrameFlagHead)
}
// HasBody returns true if the Body bit is set in the frame header.
// Only valid for data frames where IsMuxerControl() returns false.
func (fh FrameHeader) HasBody() bool {
return FrameFlag(fh[2])&FrameFlagBody == FrameFlagBody
}
// SetBody sets the Body bit in the header. Only valid for data frames.
func (fh FrameHeader) SetBody() {
fh[2] |= byte(FrameFlagBody)
}
// Clear zeroes out the frameheader bytes.
func (fh FrameHeader) Clear() {
fh[0] = byte(0)
fh[1] = byte(0)
fh[2] = byte(0)
fh[3] = byte(0)
}
// ClearID zeroes out the frameheader bytes and sets the ConnID.
func (fh FrameHeader) ClearID(connID ConnID) {
if connID > MaxConnID {
panic("AppendFrameHeader(): connID > MaxConnID")
}
fh.Clear()
fh.SetConnID(connID)
}
|
matkosoric/OCP | src/main/java/edu/matkosoric/execution/output/prrt/PRRT.java | <reponame>matkosoric/OCP
package edu.matkosoric.execution.output.prrt;
/*
* Code examples for Oracle Certified Professional (OCP) Exam
* Java 11 SE, 2021.
* Created by © <NAME>.
*/
// #TAG1
public class PRRT {
// what has to be added to line 1 in order to output PRRT
// continue a;
public static void main(String[] args) {
StringBuilder txt1 = new StringBuilder("PPQRRRSTT");
int i = 0;
a:
while (i < txt1.length()) {
char x = txt1.charAt(i);
int j = 0;
i++;
b:
while (j < txt1.length()) {
char y = txt1.charAt(j);
if (i != j && y == x) {
txt1.deleteCharAt(j);
continue a; //LINE 1
}
j++;
}
}
System.out.println(txt1);
}
}
|
msleprosy/cloud-pipeline | e2e/cli/buckets/mv/test_mv_with_folders.py | <reponame>msleprosy/cloud-pipeline
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common_utils.pipe_cli import *
from ..utils.assertions_utils import *
from ..utils.file_utils import *
from ..utils.utilities_for_test import *
class TestMoveWithFolders(object):
bucket_name = "epmcmbibpc-it-mv-folders{}".format(get_test_prefix())
other_bucket_name = "{}-other".format(bucket_name)
current_directory = os.getcwd()
home_dir = "test_cp_home_dir-681%s/" % get_test_prefix()
checkout_dir = "mv-folders-checkout/"
test_prefix = "%s-mv-folders-" % get_test_prefix()
output_folder = test_prefix + TestFiles.TEST_FOLDER_FOR_OUTPUT
test_file_1 = test_prefix + TestFiles.TEST_FILE1
test_file_with_other_extension = test_prefix + TestFiles.TEST_FILE_WITH_OTHER_EXTENSION
test_file_2 = test_prefix + TestFiles.TEST_FILE2
test_folder = test_prefix + TestFiles.TEST_FOLDER
test_folder_2 = test_prefix + TestFiles.TEST_FOLDER2
source_dir = "mv-folders-sources%s/" % get_test_prefix()
@classmethod
def setup_class(cls):
logging.basicConfig(filename='tests.log', level=logging.INFO,
format='%(levelname)s %(asctime)s %(module)s:%(message)s')
create_buckets(cls.bucket_name, cls.other_bucket_name)
# /test_folder
create_test_folder(os.path.abspath(cls.source_dir + cls.test_folder))
# /cp-files-test_folder_for_outputs
create_test_folder(os.path.abspath(cls.output_folder))
# ./test_file.txt
create_test_file(os.path.abspath(cls.source_dir + cls.test_file_1), TestFiles.DEFAULT_CONTENT)
# ./test_folder/test_file.txt
create_test_file(os.path.abspath(cls.source_dir + cls.test_folder + cls.test_file_1), TestFiles.DEFAULT_CONTENT)
# ./test_folder/test_file.json
create_test_file(os.path.abspath(cls.source_dir + cls.test_folder + cls.test_file_with_other_extension),
TestFiles.DEFAULT_CONTENT)
# ./test_folder/other/test_file.txt
create_test_file(os.path.abspath(cls.source_dir + cls.test_folder + cls.test_folder_2 + cls.test_file_1),
TestFiles.DEFAULT_CONTENT)
# ./test_file2.txt
create_test_file(os.path.abspath(cls.source_dir + cls.test_file_2), TestFiles.COPY_CONTENT)
# ~/test_cp_home_dir/test_file.txt
create_test_file(os.path.join(os.path.expanduser('~'), cls.source_dir + cls.home_dir, cls.test_file_1),
TestFiles.DEFAULT_CONTENT)
# ~/test_cp_home_dir/other/test_file.txt
create_test_file(os.path.join(os.path.expanduser('~'), cls.source_dir + cls.home_dir, cls.test_folder_2,
cls.test_file_1), TestFiles.DEFAULT_CONTENT)
@classmethod
def teardown_class(cls):
delete_buckets(cls.bucket_name, cls.other_bucket_name)
clean_test_data(os.path.abspath(cls.source_dir))
clean_test_data(os.path.join(os.path.expanduser('~'), cls.output_folder))
clean_test_data(os.path.join(os.path.expanduser('~'), cls.source_dir))
clean_test_data(os.path.join(os.path.expanduser('~'), cls.home_dir))
clean_test_data(os.path.abspath(cls.checkout_dir))
clean_test_data(os.path.abspath(cls.output_folder))
clean_test_data(os.path.abspath(cls.test_folder))
"""
1. epam test case
2. source path
3. with --force option
4. path to directory if need to switch current directory
"""
test_case_for_upload_folders = [
("EPMCMBIBPC-680", os.path.abspath(test_folder), False, None),
("EPMCMBIBPC-681", "~/" + home_dir, None, None),
("EPMCMBIBPC-684", "./" + test_folder, False, None),
("EPMCMBIBPC-684-1", "./", False, True),
("EPMCMBIBPC-684-2", ".", False, True),
("EPMCMBIBPC-685", os.path.abspath(test_folder) + "/", True, None),
]
@pytest.mark.run(order=1)
@pytest.mark.parametrize("case,source,force,switch_dir", test_case_for_upload_folders)
def test_folder_should_be_uploaded(self, case, source, force, switch_dir):
destination = "cp://{}/{}/".format(self.bucket_name, case)
if force:
pipe_storage_cp(os.path.abspath(self.source_dir + self.test_file_2), destination + self.test_file_1,
expected_status=0)
pipe_storage_cp(os.path.abspath(self.source_dir + self.test_file_2),
destination + self.test_folder + self.test_file_1,
expected_status=0)
if source.startswith("~"):
source_for_checks = os.path.join(os.path.expanduser('~'), source.strip("~/"))
else:
source_for_checks = source
if switch_dir:
dir_path = os.path.abspath(self.checkout_dir)
create_test_files(TestFiles.DEFAULT_CONTENT, os.path.join(dir_path, self.test_file_1),
os.path.join(dir_path, self.test_folder, self.test_file_1))
assert os.path.exists(os.path.join(dir_path, self.test_file_1))
assert os.path.exists(os.path.join(dir_path, self.test_folder, self.test_file_1))
source_for_checks = dir_path
os.chdir(dir_path)
else:
create_test_file(os.path.join(source_for_checks, self.test_file_1), TestFiles.DEFAULT_CONTENT)
create_test_file(os.path.join(source_for_checks, self.test_folder, self.test_file_1),
TestFiles.DEFAULT_CONTENT)
source_file_object = ObjectInfo(True).build(os.path.join(source_for_checks, self.test_file_1))
source_folder_file_object = ObjectInfo(True).build(os.path.join(source_for_checks, self.test_folder,
self.test_file_1))
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, force=force, recursive=True, expected_status=0)
assert_copied_object_info(source_file_object,
ObjectInfo(False).build(self.bucket_name, os.path.join(case, self.test_file_1)),
case)
assert_copied_object_info(source_folder_file_object,
ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder + self.test_file_1)), case)
assert_files_deleted(None, os.path.join(source_for_checks, self.test_file_1))
assert_files_deleted(None, os.path.join(source_for_checks, self.test_folder, self.test_file_1))
os.chdir(self.current_directory)
"""
1. epam test case
2. source path
3. path to directory if need to switch current directory
4. relative path to file to rewrite (with --force option)
"""
test_case_for_download_folders = [
("EPMCMBIBPC-680", os.path.abspath(output_folder + "EPMCMBIBPC-680") + "/", None, None),
("EPMCMBIBPC-681", "~/" + output_folder + "EPMCMBIBPC-681/", None, None),
("EPMCMBIBPC-684", "./" + output_folder + "EPMCMBIBPC-684/", None, None),
("EPMCMBIBPC-684-1", "./", None, True),
("EPMCMBIBPC-684-2", ".", None, True),
("EPMCMBIBPC-685", os.path.abspath(output_folder + "EPMCMBIBPC-685") + "/", True, None),
]
@pytest.mark.run(order=2)
@pytest.mark.parametrize("case,destination,force,switch_dir", test_case_for_download_folders)
def test_folder_should_be_downloaded(self, case, destination, force, switch_dir):
source = "cp://{}/{}/".format(self.bucket_name, case)
if force:
create_test_file(destination + self.test_file_1, TestFiles.COPY_CONTENT)
create_test_file(destination + self.test_folder + self.test_file_1, TestFiles.COPY_CONTENT)
if destination.startswith("~"):
destination_for_checks = os.path.join(os.path.expanduser('~'), destination.strip("~/"))
else:
destination_for_checks = destination
if switch_dir:
dir_path = os.path.abspath(os.path.join(destination, self.checkout_dir, case))
create_test_folder(dir_path)
os.chdir(dir_path)
source_file_object = ObjectInfo(False).build(self.bucket_name, os.path.join(case, self.test_file_1))
source_folder_file_object = ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder + self.test_file_1))
logging.info("Ready to perform operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, force=force, recursive=True)
assert_copied_object_info(source_file_object,
ObjectInfo(True).build(os.path.join(destination_for_checks, self.test_file_1)),
case)
assert_copied_object_info(source_folder_file_object,
ObjectInfo(True).build(os.path.join(destination_for_checks, self.test_folder,
self.test_file_1)), case)
assert_files_deleted(self.bucket_name, os.path.join(case, self.test_file_1))
assert_files_deleted(self.bucket_name, os.path.join(case, self.test_folder + self.test_file_1))
os.chdir(self.current_directory)
"""
1. epam test case
2. --force option
"""
test_case_for_copy_between_buckets_folders = [
("EPMCMBIBPC-680", False),
("EPMCMBIBPC-685", True),
]
@pytest.mark.run(order=3)
@pytest.mark.parametrize("case,force", test_case_for_copy_between_buckets_folders)
def test_folder_should_be_copied(self, case, force):
source = "cp://{}/{}/".format(self.bucket_name, case)
destination = "cp://{}/{}/".format(self.other_bucket_name, case)
pipe_storage_cp(self.source_dir + self.test_file_1, source + self.test_file_1, expected_status=0)
pipe_storage_cp(self.source_dir + self.test_folder + self.test_file_1, source + self.test_folder +
self.test_file_1, expected_status=0)
source_file_object = ObjectInfo(False).build(self.bucket_name, os.path.join(case, self.test_file_1))
source_folder_file_object = ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder, self.test_file_1))
if force:
pipe_storage_cp(self.source_dir + self.test_file_2, destination + self.test_file_1,
expected_status=0)
pipe_storage_cp(self.source_dir + self.test_file_2,
destination + self.test_folder + self.test_file_1, expected_status=0)
logging.info("Ready to perform operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, force=force, recursive=True, expected_status=0)
assert_copied_object_info(source_file_object,
ObjectInfo(False).build(self.other_bucket_name,
os.path.join(case, self.test_file_1)), case)
assert_copied_object_info(source_folder_file_object,
ObjectInfo(False).build(self.other_bucket_name, os.path.join(
case, self.test_folder, self.test_file_1)), case)
assert_files_deleted(self.bucket_name, self.source_dir + self.test_file_1)
assert_files_deleted(self.bucket_name, self.source_dir + self.test_folder + self.test_file_1)
@pytest.mark.run(order=1)
def test_excluded_files_should_be_uploaded(self):
case = "EPMCMBIBPC-686-1"
source = os.path.abspath(self.test_folder)
source_test_file_1 = os.path.join(source, self.test_file_1)
source_test_file_2 = os.path.join(source, self.test_file_with_other_extension)
source_test_folder_file = os.path.join(source, self.test_folder, self.test_file_1)
destination = "cp://{}/{}/".format(self.bucket_name, case)
create_test_files(TestFiles.DEFAULT_CONTENT, source_test_file_1, source_test_file_2, source_test_folder_file)
source_object = ObjectInfo(True).build(source_test_file_1)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, exclude=["*json", "{}*".format(self.test_folder)],
expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(self.bucket_name,
os.path.join(case, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_file_with_other_extension)), case)
assert_files_skipped(None, source_test_folder_file)
assert_files_skipped(None, source_test_file_2)
@pytest.mark.run(order=2)
def test_excluded_files_should_be_downloaded(self):
case = "EPMCMBIBPC-686-2"
source = "cp://{}/{}/".format(self.bucket_name, case)
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_file_2, key_folder_file)
destination = os.path.abspath(os.path.join(self.output_folder, case))
source_object = ObjectInfo(False).build(self.bucket_name, key_file_1)
logging.info("Ready to perform operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, exclude=["*json", "{}*".format(self.test_folder)],
expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(True).build(os.path.join(destination, self.test_file_1)),
case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_folder,
self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(
destination, self.test_file_with_other_extension)), case)
assert_files_skipped(self.bucket_name, key_file_2)
assert_files_skipped(self.bucket_name, key_folder_file)
@pytest.mark.run(order=3)
def test_excluded_files_should_be_copied(self):
case = "EPMCMBIBPC-686-3"
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
source = "cp://{}/{}/".format(self.bucket_name, case)
destination = "cp://{}/{}/".format(self.other_bucket_name, case)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_folder_file, key_file_2)
source_object = ObjectInfo(False).build(self.bucket_name, key_file_1)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, exclude=["*json", "{}*".format(self.test_folder)],
expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(self.other_bucket_name, key_file_1), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_folder_file), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_file_2), case)
assert_files_skipped(self.bucket_name, key_folder_file)
assert_files_skipped(self.bucket_name, key_file_2)
@pytest.mark.run(order=1)
def test_included_files_should_be_uploaded(self):
case = "EPMCMBIBPC-688-1"
source = os.path.abspath(self.test_folder)
source_test_file_1 = os.path.join(source, self.test_file_1)
source_test_file_2 = os.path.join(source, self.test_file_with_other_extension)
source_test_folder_file = os.path.join(source, self.test_folder, self.test_file_1)
destination = "cp://{}/{}/".format(self.bucket_name, case)
create_test_files(TestFiles.DEFAULT_CONTENT, source_test_file_1, source_test_file_2, source_test_folder_file)
source_object = ObjectInfo(True).build(source_test_file_2)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*json"], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(
self.bucket_name, os.path.join(case, self.test_file_with_other_extension)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_file_1)), case)
assert_files_skipped(None, source_test_file_1)
assert_files_skipped(None, source_test_folder_file)
@pytest.mark.run(order=2)
def test_included_files_should_be_downloaded(self):
case = "EPMCMBIBPC-688-2"
source = "cp://{}/{}/".format(self.bucket_name, case)
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_file_2, key_folder_file)
destination = os.path.abspath(os.path.join(self.output_folder, case))
source_object = ObjectInfo(False).build(self.bucket_name, key_file_2)
logging.info("Ready to perform operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*json"], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(True).build(os.path.join(
destination, self.test_file_with_other_extension)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_folder,
self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(
destination, self.test_file_1)), case)
assert_files_skipped(self.bucket_name, key_file_1)
assert_files_skipped(self.bucket_name, key_folder_file)
@pytest.mark.run(order=3)
def test_excluded_files_should_be_copied(self):
case = "EPMCMBIBPC-688-3"
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
source = "cp://{}/{}/".format(self.bucket_name, case)
destination = "cp://{}/{}/".format(self.other_bucket_name, case)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_folder_file, key_file_2)
source_object = ObjectInfo(False).build(self.bucket_name, key_file_2)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*json"], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(self.other_bucket_name, key_file_2), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_folder_file), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_file_1), case)
assert_files_skipped(self.bucket_name, key_folder_file)
assert_files_skipped(self.bucket_name, key_file_1)
@pytest.mark.run(order=1)
def test_included_excluded_files_should_be_uploaded(self):
case = "EPMCMBIBPC-689-1"
source = os.path.abspath(self.test_folder) + "/"
source_test_file_1 = os.path.join(source, self.test_file_1)
source_test_file_2 = os.path.join(source, self.test_file_with_other_extension)
source_test_folder_file = os.path.join(source, self.test_folder, self.test_file_1)
destination = "cp://{}/{}/".format(self.bucket_name, case)
create_test_files(TestFiles.DEFAULT_CONTENT, source_test_file_1, source_test_file_2, source_test_folder_file)
source_object = ObjectInfo(True).build(source_test_file_1)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*txt"],
exclude=["{}*".format(self.test_folder)], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(
self.bucket_name, os.path.join(case, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_folder, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name, os.path.join(
case, self.test_file_with_other_extension)), case)
assert_files_skipped(None, source_test_file_2)
assert_files_skipped(None, source_test_folder_file)
@pytest.mark.run(order=2)
def test_included_files_should_be_downloaded(self):
case = "EPMCMBIBPC-689-2"
source = "cp://{}/{}/".format(self.bucket_name, case)
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_file_2, key_folder_file)
destination = os.path.abspath(os.path.join(self.output_folder, case))
source_object = ObjectInfo(False).build(self.bucket_name, key_file_1)
logging.info("Ready to perform operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*txt"],
exclude=["{}*".format(self.test_folder)], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(True).build(os.path.join(
destination, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_folder,
self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(
destination, self.test_file_with_other_extension)), case)
assert_files_skipped(self.bucket_name, key_file_2)
assert_files_skipped(self.bucket_name, key_folder_file)
@pytest.mark.run(order=3)
def test_excluded_files_should_be_copied(self):
case = "EPMCMBIBPC-689-3"
key_file_1 = os.path.join(case, self.test_file_1)
key_file_2 = os.path.join(case, self.test_file_with_other_extension)
key_folder_file = os.path.join(case, self.test_folder, self.test_file_1)
source = "cp://{}/{}/".format(self.bucket_name, case)
destination = "cp://{}/{}/".format(self.other_bucket_name, case)
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name, key_file_1,
key_folder_file, key_file_2)
source_object = ObjectInfo(False).build(self.bucket_name, key_file_1)
logging.info("Ready to perform mv operation from {} to {}".format(source, destination))
pipe_storage_mv(source, destination, recursive=True, include=["*txt"],
exclude=["{}*".format(self.test_folder)], expected_status=0)
assert_copied_object_info(source_object, ObjectInfo(False).build(self.other_bucket_name, key_file_1), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_folder_file), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.other_bucket_name, key_file_2), case)
assert_files_skipped(self.bucket_name, key_folder_file)
assert_files_skipped(self.bucket_name, key_file_2)
@pytest.mark.run(order=4)
def test_upload_without_recursive(self):
case = "EPMCMBIBPC-674"
source = os.path.abspath(self.source_dir + self.test_folder)
destination = "cp://{}/".format(os.path.join(self.bucket_name, case))
error_text = pipe_storage_mv(source, destination, expected_status=1)[1]
assert_error_message_is_present(error_text, "Flag --recursive (-r) is required to copy folders.")
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name,
os.path.join(case, self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(False).build(self.bucket_name,
os.path.join(case, self.test_folder_2,
self.test_file_1)), case)
assert_files_skipped(None, os.path.join(source, self.test_file_1))
assert_files_skipped(None, os.path.join(source, self.test_folder_2, self.test_file_1))
@pytest.mark.run(order=5)
def test_download_without_recursive(self):
case = "EPMCMBIBPC-674"
source = "cp://{}/".format(os.path.join(self.bucket_name, case))
create_test_files_on_bucket(self.source_dir + self.test_file_1, self.bucket_name,
os.path.join(case, self.test_file_1),
os.path.join(case, self.test_folder, self.test_file_1))
destination = os.path.abspath(self.output_folder + case) + "/"
error_text = pipe_storage_mv(source, destination, expected_status=1)[1]
assert_error_message_is_present(error_text, "Flag --recursive (-r) is required to copy folders.")
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_folder,
self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_file_1)), case)
assert_files_skipped(self.bucket_name, os.path.join(case, self.test_file_1))
assert_files_skipped(self.bucket_name, os.path.join(case, self.test_folder, self.test_file_1))
@pytest.mark.run(order=6)
def test_copy_without_recursive(self):
case = "EPMCMBIBPC-674"
source = "cp://{}/".format(os.path.join(self.bucket_name, case))
destination = "cp://{}/".format(os.path.join(self.other_bucket_name, case))
error_text = pipe_storage_mv(source, destination, expected_status=1)[1]
assert_error_message_is_present(error_text, "Flag --recursive (-r) is required to copy folders.")
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_folder,
self.test_file_1)), case)
assert_copied_object_does_not_exist(ObjectInfo(True).build(os.path.join(destination, self.test_file_1)),
case)
assert_files_skipped(self.bucket_name, os.path.join(case, self.test_file_1))
assert_files_skipped(self.bucket_name, os.path.join(case, self.test_folder, self.test_file_1))
@pytest.mark.run(order=6)
def test_upload_file_to_bucket_with_folder_with_same_name(self):
case = "EPMCMBIBPC-2001"
source = os.path.abspath(os.path.join(self.source_dir, self.test_folder, case))
try:
create_test_folder(source)
source = os.path.join(source, self.test_file_1)
create_test_file(source, TestFiles.DEFAULT_CONTENT)
pipe_storage_cp(source, "cp://%s/%s/%s/" % (self.bucket_name, case, self.test_file_1))
assert object_exists(self.bucket_name, "%s/%s/%s" % (case, self.test_file_1, self.test_file_1))
source_for_check = ObjectInfo(True).build(source)
pipe_storage_mv(source, "cp://%s/%s/" % (self.bucket_name, case))
assert_copied_object_info(source_for_check, ObjectInfo(False)
.build(self.bucket_name, os.path.join(case, self.test_file_1)), case)
assert_files_deleted(None, source)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_upload_folder_structure(self):
case = "EPMCMBIBPC-2003"
source = os.path.abspath(os.path.join(self.source_dir, self.test_folder, case))
try:
create_test_folder(source)
create_test_folder(os.path.join(source, "folder1"))
source1 = os.path.join(os.path.join(source, "folder1"), self.test_file_1)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
create_test_folder(os.path.join(source, "folder2"))
source2 = os.path.join(os.path.join(source, "folder2"), self.test_file_2)
create_test_file(source2, TestFiles.DEFAULT_CONTENT)
source_for_check1 = ObjectInfo(True).build(source1)
source_for_check2 = ObjectInfo(True).build(source2)
pipe_storage_mv(source, "cp://%s/%s/" % (self.bucket_name, case), recursive=True)
assert_copied_object_info(source_for_check1, ObjectInfo(False)
.build(self.bucket_name, os.path.join(case, "folder1", self.test_file_1)), case)
assert_files_deleted(None, source1)
assert_copied_object_info(source_for_check2, ObjectInfo(False)
.build(self.bucket_name, os.path.join(case, "folder2", self.test_file_2)), case)
assert_files_deleted(None, source2)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_download_folder_structure(self):
case = "EPMCMBIBPC-2006"
source = os.path.abspath(os.path.join(self.source_dir, self.test_folder, case))
try:
create_test_folder(source)
source1 = os.path.join(source, self.test_file_1)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
pipe_storage_cp(source1, "cp://%s/%s/%s/" % (self.bucket_name, case, "folder1"))
assert object_exists(self.bucket_name, "%s/%s/%s" % (case, "folder1", self.test_file_1))
source_for_check1 = ObjectInfo(True).build(source1)
source2 = os.path.join(source, self.test_file_2)
create_test_file(source2, TestFiles.COPY_CONTENT)
pipe_storage_cp(source2, "cp://%s/%s/%s/" % (self.bucket_name, case, "folder2"))
assert object_exists(self.bucket_name, "%s/%s/%s" % (case, "folder2", self.test_file_2))
source_for_check2 = ObjectInfo(True).build(source2)
destination = os.path.join(source, "output") + "/"
pipe_storage_mv("cp://%s/%s/" % (self.bucket_name, case), destination, recursive=True)
assert_copied_object_info(source_for_check1, ObjectInfo(True)
.build(os.path.join(destination, "folder1", self.test_file_1)), case)
assert not object_exists(self.bucket_name, "%s/%s/%s" % (case, "folder1", self.test_file_1))
assert_copied_object_info(source_for_check2, ObjectInfo(True)
.build(os.path.join(destination, "folder2", self.test_file_2)), case)
assert not object_exists(self.bucket_name, "%s/%s/%s" % (case, "folder2", self.test_file_2))
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_upload_folders_with_similar_keys(self):
case = "EPMCMBIBPC-2009"
source_folder = os.path.abspath(os.path.join(self.test_folder, case))
test_folder1 = "folder"
test_folder2 = "folder2"
try:
create_test_folder(source_folder)
create_test_folder(os.path.join(source_folder, test_folder1))
create_test_folder(os.path.join(source_folder, test_folder2))
create_test_file(os.path.join(source_folder, test_folder1, self.test_file_1), TestFiles.DEFAULT_CONTENT)
create_test_file(os.path.join(source_folder, test_folder2, self.test_file_2), TestFiles.COPY_CONTENT)
pipe_storage_mv(os.path.join(source_folder, test_folder1), "cp://%s/%s/" % (self.bucket_name, case),
recursive=True)
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
assert_files_deleted(None, os.path.join(source_folder, test_folder1, self.test_file_1))
assert os.path.exists(os.path.join(source_folder, test_folder2, self.test_file_2))
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_download_folders_with_similar_keys(self):
case = "EPMCMBIBPC-2010"
source_folder = os.path.abspath(os.path.join(self.test_folder, case))
source1 = os.path.join(source_folder, self.test_file_1)
source2 = os.path.join(source_folder, self.test_file_2)
try:
create_test_folder(source_folder)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
create_test_file(source2, TestFiles.COPY_CONTENT)
pipe_storage_cp(source1, "cp://%s/%s/folder/" % (self.bucket_name, case))
assert object_exists(self.bucket_name, os.path.join(case, "folder", self.test_file_1))
pipe_storage_cp(source2, "cp://%s/%s/folder2/" % (self.bucket_name, case))
assert object_exists(self.bucket_name, os.path.join(case, "folder2", self.test_file_2))
pipe_storage_mv("cp://%s/%s/folder" % (self.bucket_name, case),
"%s/" % os.path.join(self.output_folder, case), recursive=True)
assert os.path.exists(os.path.abspath(os.path.join(self.output_folder, case, self.test_file_1)))
assert not os.path.exists(os.path.abspath(
os.path.join(self.output_folder, case, self.test_file_2)))
assert not object_exists(self.bucket_name, os.path.join(case, "folder", self.test_file_1))
assert object_exists(self.bucket_name, os.path.join(case, "folder2", self.test_file_2))
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
test_case_for_destination_slash = [("EPMCMBIBPC-2160-1", True, True), ("EPMCMBIBPC-2160-2", True, False),
("EPMCMBIBPC-2160-3", False, False), ("EPMCMBIBPC-2160-4", False, True)]
@pytest.mark.run(order=6)
@pytest.mark.parametrize("case,has_destination_slash,has_source_slash", test_case_for_destination_slash)
def test_folder_with_slash_should_upload_content_only(self, case, has_destination_slash, has_source_slash):
source_folder = os.path.abspath(os.path.join(self.test_folder, case))
source1 = os.path.join(source_folder, self.test_file_1)
source2 = os.path.join(source_folder, self.test_file_2)
source = os.path.abspath(os.path.join(self.test_folder, case))
destination = "cp://%s/%s" % (self.bucket_name, case)
source, destination = prepare_paths_with_slash(source, destination, has_source_slash, has_destination_slash)
try:
create_test_folder(source_folder)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
create_test_file(source2, TestFiles.COPY_CONTENT)
pipe_storage_mv(source, destination, recursive=True)
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
assert not os.path.exists(source1)
assert not os.path.exists(source2)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
test_case_for_download_with_slash = [("EPMCMBIBPC-2202-1", True, True), ("EPMCMBIBPC-2202-2", True, False),
("EPMCMBIBPC-2202-3", False, False), ("EPMCMBIBPC-2202-4", False, True)]
@pytest.mark.run(order=6)
@pytest.mark.parametrize("case,has_destination_slash,has_source_slash", test_case_for_download_with_slash)
def test_folder_with_slash_should_download_content_only(self, case, has_destination_slash, has_source_slash):
source = "cp://%s/%s" % (self.bucket_name, case)
destination = os.path.abspath(os.path.join(self.output_folder, case))
source, destination = prepare_paths_with_slash(source, destination, has_source_slash, has_destination_slash)
try:
self._create_folder_on_bucket(case)
pipe_storage_mv(source, destination, recursive=True)
assert os.path.exists(os.path.join(destination, self.test_file_1))
assert os.path.exists(os.path.join(destination, self.test_file_2))
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
test_case_for_copy_between_buckets_with_slash = [("EPMCMBIBPC-2203-1", True, True),
("EPMCMBIBPC-2203-2", True, False),
("EPMCMBIBPC-2203-3", False, False),
("EPMCMBIBPC-2203-4", False, True)]
@pytest.mark.run(order=6)
@pytest.mark.parametrize("case,has_destination_slash,has_source_slash",
test_case_for_copy_between_buckets_with_slash)
def test_folder_with_slash_should_copy_between_buckets_content_only(self, case, has_destination_slash,
has_source_slash):
source = "cp://%s/%s" % (self.bucket_name, case)
destination = "cp://%s/%s" % (self.other_bucket_name, case)
source, destination = prepare_paths_with_slash(source, destination, has_source_slash, has_destination_slash)
try:
self._create_folder_on_bucket(case)
pipe_storage_mv(source, destination, recursive=True)
assert object_exists(self.other_bucket_name, os.path.join(case, self.test_file_1))
assert object_exists(self.other_bucket_name, os.path.join(case, self.test_file_2))
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_upload_folder_with_skip_existing_option_should_skip(self):
case = "EPMCMBIBPC-2167"
source_folder = os.path.abspath(os.path.join(self.test_folder, case))
source1 = os.path.join(source_folder, self.test_file_1)
source2 = os.path.join(source_folder, self.test_file_2)
destination = "cp://%s/%s" % (self.bucket_name, case)
key1 = os.path.join(case, self.test_file_1)
key2 = os.path.join(case, self.test_file_2)
try:
create_test_folder(source_folder)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
create_test_file(source2, TestFiles.COPY_CONTENT)
expected = create_file_on_bucket(self.bucket_name, key1, source1)
pipe_storage_mv(source_folder, destination, force=True, recursive=True, skip_existing=True)
assert object_exists(self.bucket_name, key1)
assert object_exists(self.bucket_name, key2)
actual = ObjectInfo(False).build(self.bucket_name, key1)
assert expected.size == actual.size, \
"Sizes must be the same.\nExpected %s\nActual %s" % (expected.size, actual.size)
assert expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be the same.\n" \
"Expected %s\nActual %s".format(expected.last_modified, actual.last_modified)
assert not os.path.exists(source2)
assert os.path.exists(source1)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_upload_folder_with_skip_existing_option_should_not_skip(self):
case = "EPMCMBIBPC-2168"
source_folder = os.path.abspath(os.path.join(self.test_folder, case))
source1 = os.path.join(source_folder, self.test_file_1)
source2 = os.path.join(source_folder, self.test_file_2)
destination = "cp://%s/%s" % (self.bucket_name, case)
key1 = os.path.join(case, self.test_file_1)
key2 = os.path.join(case, self.test_file_2)
try:
create_test_folder(source_folder)
create_test_file(source1, TestFiles.DEFAULT_CONTENT)
create_test_file(source2, TestFiles.COPY_CONTENT)
expected = create_file_on_bucket(self.bucket_name, key1, source2)
pipe_storage_mv(source_folder, destination, force=True, recursive=True, skip_existing=True)
assert object_exists(self.bucket_name, key1)
assert object_exists(self.bucket_name, key2)
actual = ObjectInfo(False).build(self.bucket_name, key1)
assert not expected.size == actual.size, "Sizes must be the different."
assert not expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be different."
assert not os.path.exists(source2)
assert not os.path.exists(source1)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_download_folder_with_skip_existing_option_should_skip(self):
case = "EPMCMBIBPC-2198"
destination_folder = os.path.abspath(os.path.join(self.output_folder, case))
destination1 = os.path.join(destination_folder, self.test_file_1)
destination2 = os.path.join(destination_folder, self.test_file_2)
source_folder = "cp://%s/%s/" % (self.bucket_name, case)
try:
create_test_file(destination1, TestFiles.DEFAULT_CONTENT)
assert os.path.exists(destination1)
expected = ObjectInfo(True).build(destination1)
pipe_storage_cp(os.path.abspath(os.path.join(self.source_dir, self.test_file_1)), source_folder)
pipe_storage_cp(os.path.abspath(os.path.join(self.source_dir, self.test_file_2)), source_folder)
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
pipe_storage_mv(source_folder, destination_folder, force=True, recursive=True, skip_existing=True)
assert os.path.exists(destination1)
assert os.path.exists(destination2)
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
actual = ObjectInfo(True).build(destination1)
assert expected.size == actual.size, \
"Sizes must be the same.\nExpected %s\nActual %s" % (expected.size, actual.size)
assert expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be the same.\n" \
"Expected %s\nActual %s".format(expected.last_modified, actual.last_modified)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_download_folder_with_skip_existing_option_should_not_skip(self):
case = "EPMCMBIBPC-2199"
destination_folder = os.path.abspath(os.path.join(self.output_folder, case))
destination1 = os.path.join(destination_folder, self.test_file_1)
destination2 = os.path.join(destination_folder, self.test_file_2)
source_folder = "cp://%s/%s/" % (self.bucket_name, case)
try:
create_test_file(destination1, TestFiles.COPY_CONTENT)
assert os.path.exists(destination1)
expected = ObjectInfo(True).build(destination1)
pipe_storage_cp(os.path.abspath(os.path.join(self.source_dir, self.test_file_1)), source_folder)
pipe_storage_cp(os.path.abspath(os.path.join(self.source_dir, self.test_file_2)), source_folder)
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
assert object_exists(self.bucket_name, os.path.join(case, self.test_file_2))
pipe_storage_mv(source_folder, destination_folder, force=True, recursive=True, skip_existing=True)
assert os.path.exists(destination1)
assert os.path.exists(destination2)
assert not object_exists(self.bucket_name, os.path.join(case, self.test_file_1))
actual = ObjectInfo(True).build(destination1)
assert not expected.size == actual.size, "Sizes must be the different."
assert not expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be different."
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_copy_folder_between_buckets_with_skip_existing_option_should_skip(self):
case = "EPMCMBIBPC-2211"
source_folder = "cp://%s/%s/" % (self.bucket_name, case)
destination_folder = "cp://%s/%s/" % (self.other_bucket_name, case)
key1 = os.path.join(case, self.test_file_1)
key2 = os.path.join(case, self.test_file_2)
source_file1 = os.path.abspath(os.path.join(self.source_dir, self.test_file_1))
source_file2 = os.path.abspath(os.path.join(self.source_dir, self.test_file_2))
try:
expected = create_file_on_bucket(self.other_bucket_name, key1, source_file1)
pipe_storage_cp(source_file1, "cp://%s/%s" % (self.bucket_name, key1))
pipe_storage_cp(source_file2, "cp://%s/%s" % (self.bucket_name, key2))
assert object_exists(self.bucket_name, key1)
assert object_exists(self.bucket_name, key2)
pipe_storage_mv(source_folder, destination_folder, force=True, recursive=True, skip_existing=True)
assert object_exists(self.other_bucket_name, key1)
assert object_exists(self.other_bucket_name, key2)
assert object_exists(self.bucket_name, key1)
assert not object_exists(self.bucket_name, key2)
actual = ObjectInfo(False).build(self.other_bucket_name, key1)
assert expected.size == actual.size, \
"Sizes must be the same.\nExpected %s\nActual %s" % (expected.size, actual.size)
assert expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be the same.\n" \
"Expected %s\nActual %s".format(expected.last_modified, actual.last_modified)
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
@pytest.mark.run(order=6)
def test_copy_folder_between_buckets_with_skip_existing_option_should_not_skip(self):
case = "EPMCMBIBPC-2212"
source_folder = "cp://%s/%s/" % (self.bucket_name, case)
destination_folder = "cp://%s/%s/" % (self.other_bucket_name, case)
key1 = os.path.join(case, self.test_file_1)
key2 = os.path.join(case, self.test_file_2)
source_file1 = os.path.abspath(os.path.join(self.source_dir, self.test_file_1))
source_file2 = os.path.abspath(os.path.join(self.source_dir, self.test_file_2))
try:
expected = create_file_on_bucket(self.other_bucket_name, key1, source_file2)
pipe_storage_cp(source_file1, "cp://%s/%s" % (self.bucket_name, key1))
pipe_storage_cp(source_file2, "cp://%s/%s" % (self.bucket_name, key2))
assert object_exists(self.bucket_name, key1)
assert object_exists(self.bucket_name, key2)
pipe_storage_mv(source_folder, destination_folder, force=True, recursive=True, skip_existing=True)
assert object_exists(self.other_bucket_name, key1)
assert object_exists(self.other_bucket_name, key2)
assert not object_exists(self.bucket_name, key1)
assert not object_exists(self.bucket_name, key2)
actual = ObjectInfo(False).build(self.other_bucket_name, key1)
assert not expected.size == actual.size, "Sizes must be the different."
assert not expected.last_modified == actual.last_modified, \
"Last modified time of destination and source file must be different."
except BaseException as e:
pytest.fail("Test case {} failed. {}".format(case, e.message))
def _create_folder_on_bucket(self, case):
source_files = os.path.abspath(self.source_dir)
source1 = os.path.join(source_files, self.test_file_1)
source2 = os.path.join(source_files, self.test_file_2)
pipe_storage_cp(source1, "cp://%s/%s/%s" % (self.bucket_name, case, self.test_file_1))
pipe_storage_cp(source2, "cp://%s/%s/%s" % (self.bucket_name, case, self.test_file_2))
|
xmgz/commafeed | src/main/java/com/commafeed/backend/model/FeedEntry.java | package com.commafeed.backend.model;
import java.util.Date;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import org.hibernate.annotations.Index;
import com.google.api.client.util.Sets;
@Entity
@Table(name = "FEEDENTRIES")
@SuppressWarnings("serial")
public class FeedEntry extends AbstractModel {
@Column(length = 2048, nullable = false)
private String guid;
@Column(length = 40, nullable = false)
@Index(name = "guidHash_index")
private String guidHash;
@ManyToMany
@JoinTable(name = "FEED_FEEDENTRIES", joinColumns = { @JoinColumn(name = "FEEDENTRY_ID", nullable = false, updatable = false) }, inverseJoinColumns = { @JoinColumn(name = "FEED_ID", nullable = false, updatable = false) })
private Set<Feed> feeds = Sets.newHashSet();
@OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY, optional = false)
@JoinColumn(nullable = false, updatable = false)
private FeedEntryContent content;
@Column(length = 2048)
private String url;
//@Column(length = 128)
@Transient
private String author;
@Temporal(TemporalType.TIMESTAMP)
private Date inserted;
@Temporal(TemporalType.TIMESTAMP)
@Index(name = "updated_index")
private Date updated;
@OneToMany(mappedBy = "entry")
private Set<FeedEntryStatus> statuses;
public String getGuid() {
return guid;
}
public void setGuid(String guid) {
this.guid = guid;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public Date getUpdated() {
return updated;
}
public void setUpdated(Date updated) {
this.updated = updated;
}
public Set<Feed> getFeeds() {
return feeds;
}
public void setFeeds(Set<Feed> feeds) {
this.feeds = feeds;
}
public Set<FeedEntryStatus> getStatuses() {
return statuses;
}
public void setStatuses(Set<FeedEntryStatus> statuses) {
this.statuses = statuses;
}
public Date getInserted() {
return inserted;
}
public void setInserted(Date inserted) {
this.inserted = inserted;
}
public FeedEntryContent getContent() {
return content;
}
public void setContent(FeedEntryContent content) {
this.content = content;
}
public String getGuidHash() {
return guidHash;
}
public void setGuidHash(String guidHash) {
this.guidHash = guidHash;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
}
|
tranleduy2000/JSPIIJ | src/com/js/interpreter/pascaltypes/CustomType.java | <filename>src/com/js/interpreter/pascaltypes/CustomType.java
package com.js.interpreter.pascaltypes;
import com.js.interpreter.ast.VariableDeclaration;
import com.js.interpreter.ast.expressioncontext.ExpressionContext;
import com.js.interpreter.ast.returnsvalue.RValue;
import com.js.interpreter.ast.returnsvalue.cloning.CloneableObjectCloner;
import com.js.interpreter.exceptions.NonArrayIndexed;
import com.js.interpreter.exceptions.ParsingException;
import com.js.interpreter.pascaltypes.bytecode.RegisterAllocator;
import com.js.interpreter.pascaltypes.bytecode.ScopedRegisterAllocator;
import com.js.interpreter.pascaltypes.bytecode.SimpleRegisterAllocator;
import com.js.interpreter.pascaltypes.bytecode.TransformationInput;
import com.js.interpreter.runtime.variables.ContainsVariables;
import serp.bytecode.*;
import java.util.ArrayList;
import java.util.List;
public class CustomType extends ObjectType {
/**
* This class represents a declaration of a new operator in pascal.
*/
/**
* This is a list of the defined variables in the custom operator.
*/
public List<VariableDeclaration> variable_types;
private Class cachedClass = null;
public CustomType() {
variable_types = new ArrayList<VariableDeclaration>();
}
/**
* Adds another sub-variable to this user defined operator.
*
* @param v The name and operator of the variable to add.
*/
public void add_variable_declaration(VariableDeclaration v) {
variable_types.add(v);
}
@Override
public Object initialize() {
try {
return getTransferClass().newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
@Override
public int hashCode() {
return variable_types.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CustomType)) {
return false;
}
CustomType other = (CustomType) obj;
return variable_types.equals(other.variable_types);
}
@Override
public boolean equals(DeclaredType obj) {
return equals((Object) obj);
}
@Override
public Class getTransferClass() {
if (cachedClass != null) {
return cachedClass;
}
String name = "com.js.interpreter.custom_types."
+ Integer.toHexString(hashCode());
try {
cachedClass = bcl.loadClass(name);
return cachedClass;
} catch (ClassNotFoundException e) {
}
Project p = new Project();
BCClass c = p.loadClass(name);
declareClassElements(c);
cachedClass = bcl.loadThisClass(c.toByteArray());
return cachedClass;
}
protected void declareClassElements(BCClass c) {
c.declareInterface(ContainsVariables.class);
c.setDeclaredInterfaces(new Class[]{ContainsVariables.class});
for (VariableDeclaration v : variable_types) {
Class type = v.type.getStorageClass();
c.declareField(v.name, type);
}
add_constructor(c);
add_get_var(c);
add_set_var(c);
add_clone(c);
}
static ByteClassLoader bcl = new ByteClassLoader();
public static class ByteClassLoader extends ClassLoader {
public Class<?> loadThisClass(byte[] bytes) {
Class<?> c = defineClass(null, bytes, 0, bytes.length);
resolveClass(c);
return c;
}
}
@Override
public RValue convert(RValue value, ExpressionContext f)
throws ParsingException {
RuntimeType other_type = value.get_type(f);
if (this.equals(other_type.declType)) {
return cloneValue(value);
}
return null;
}
@Override
public DeclaredType getMemberType(String name) {
for (VariableDeclaration v : variable_types) {
if (v.name.equals(name)) {
return v.type;
}
}
System.err.println("Could not find member " + name);
return null;
}
@Override
public void pushDefaultValue(Code constructor_code, RegisterAllocator ra) {
constructor_code.anew().setType(this.getTransferClass());
try {
constructor_code.invokespecial().setMethod(
this.getTransferClass().getConstructor());
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void add_constructor(BCClass b) {
BCMethod constructor = b.addDefaultConstructor();
constructor.removeCode();
Code constructor_code = constructor.getCode(true);
// "this" takes up one local slot.
RegisterAllocator ra = new SimpleRegisterAllocator(1);
constructor_code.aload().setThis();
try {
constructor_code.invokespecial().setMethod(
Object.class.getDeclaredConstructor());
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (VariableDeclaration v : variable_types) {
constructor_code.aload().setThis();
v.type.pushDefaultValue(constructor_code,
new ScopedRegisterAllocator(ra));
constructor_code.putfield().setField(v.get_name(),
v.type.getStorageClass());
}
constructor_code.vreturn();
constructor_code.calculateMaxLocals();
constructor_code.calculateMaxStack();
}
/**
* Adds the get_var method to a specified class. This method will conform to
* the ideas of the contains_variables interface, and will allow access to
* all declared fields.
*
* @param b The class to modify.
*/
private void add_get_var(BCClass b) {
BCMethod get_var = b.declareMethod("get_var", Object.class,
new Class[]{String.class});
get_var.makePublic();
Code get_var_code = get_var.getCode(true);
get_var_code.aload().setParam(0);
get_var_code.invokevirtual().setMethod(String.class, "intern",
String.class, new Class[]{});
get_var_code.astore().setParam(0);
JumpInstruction previous_if = null;
for (BCField f : b.getFields()) {
Instruction code_block = get_var_code.constant().setValue(
f.getName());
if (previous_if != null) {
previous_if.setTarget(code_block);
}
get_var_code.aload().setParam(0);
previous_if = get_var_code.ifacmpne();
get_var_code.aload().setThis();
get_var_code.getfield().setField(f);
Class return_type = f.getType();
if (return_type == int.class) {
get_var_code.invokestatic().setMethod(Integer.class, "valueOf",
Integer.class, new Class[]{int.class});
} else if (return_type == double.class) {
get_var_code.invokestatic().setMethod(Double.class,
"valueOf", Double.class,
new Class[]{double.class});
} else if (return_type == char.class) {
get_var_code.invokestatic().setMethod(Character.class,
"valueOf", Character.class, new Class[]{char.class});
} else if (return_type == boolean.class) {
get_var_code.invokestatic().setMethod(Boolean.class, "valueOf",
Boolean.class, new Class[]{boolean.class});
}
get_var_code.areturn();
}
Instruction i = get_var_code.constant().setNull();
if (previous_if != null) {
previous_if.setTarget(i);
}
get_var_code.areturn();
get_var_code.calculateMaxLocals();
get_var_code.calculateMaxStack();
}
/**
* Adds the set_var method to a specified class. This method will conform to
* the ideas of the contains_variables interface, and will allow access to
* all declared fields.
*
* @param b The class to modify.
*/
private void add_set_var(BCClass b) {
BCMethod set_var = b.declareMethod("set_var", void.class, new Class[]{
String.class, Object.class});
set_var.makePublic();
Code set_var_code = set_var.getCode(true);
set_var_code.aload().setParam(0);
set_var_code.invokevirtual().setMethod(String.class, "intern",
String.class, new Class[]{});
set_var_code.astore().setParam(0);
JumpInstruction previous_if = null;
for (BCField f : b.getFields()) {
Instruction jump_to = set_var_code.constant().setValue(f.getName());
if (previous_if != null) {
previous_if.setTarget(jump_to);
}
set_var_code.aload().setParam(0);
previous_if = set_var_code.ifacmpne();
set_var_code.aload().setThis();
set_var_code.aload().setParam(1);
Class field_class = f.getType();
if (field_class == int.class) {
set_var_code.checkcast().setType(Integer.class);
set_var_code.invokevirtual().setMethod(Integer.class,
"intValue", int.class, new Class[]{});
} else if (field_class == double.class) {
set_var_code.checkcast().setType(Double.class);
set_var_code.invokevirtual().setMethod(Double.class,
"doubleValue", double.class, new Class[]{});
} else if (field_class == boolean.class) {
set_var_code.checkcast().setType(Boolean.class);
set_var_code.invokevirtual().setMethod(Boolean.class,
"booleanValue", boolean.class, new Class[]{});
} else if (field_class == char.class) {
set_var_code.checkcast().setType(Character.class);
set_var_code.invokevirtual().setMethod(Character.class,
"charValue", char.class, new Class[]{});
} else {
set_var_code.checkcast().setType(field_class);
}
set_var_code.putfield().setField(f);
}
Instruction jump_to = set_var_code.vreturn();
if (previous_if != null) {
previous_if.setTarget(jump_to);
}
set_var_code.calculateMaxLocals();
set_var_code.calculateMaxStack();
}
private void add_clone(BCClass b) {
BCMethod clone_method = b.declareMethod("clone",
ContainsVariables.class, new Class[0]);
clone_method.makePublic();
Code clone_code = clone_method.getCode(true);
try {
clone_code.anew().setType(b);
clone_code.dup();
clone_code.invokespecial().setMethod(b.addDefaultConstructor());
clone_code.astore().setLocal(1);
for (BCField f : b.getFields()) {
clone_code.aload().setLocal(1);
if (f.getType() == StringBuilder.class) {
clone_code.anew().setType(StringBuilder.class);
clone_code.dup();
clone_code.aload().setThis();
clone_code.getfield().setField(f);
clone_code.invokespecial().setMethod(
StringBuilder.class
.getConstructor(CharSequence.class));
} else if (f.getType().isPrimitive()) {
clone_code.aload().setThis();
clone_code.getfield().setField(f);
} else if (f.getType().isArray()) {
clone_code.aload().setThis();
clone_code.getfield().setField(f);
} else {
clone_code.aload().setThis();
clone_code.getfield().setField(f);
clone_code.invokevirtual().setMethod(
f.getType().getMethod("clone", new Class[0]));
}
clone_code.putfield().setField(f);
}
clone_code.aload().setLocal(1);
clone_code.areturn();
clone_code.calculateMaxLocals();
clone_code.calculateMaxStack();
} catch (SecurityException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
}
}
@Override
public void cloneValueOnStack(TransformationInput t) {
t.pushInputOnStack();
try {
t.getCode().invokeinterface()
.setMethod("clone", ContainsVariables.class, new Class[0]);
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public RValue generateArrayAccess(RValue array,
RValue index) throws NonArrayIndexed {
throw new NonArrayIndexed(array.getLineNumber(), this);
}
@Override
public RValue cloneValue(RValue r) {
return new CloneableObjectCloner(r);
}
@Override
public Class<?> getStorageClass() {
return getTransferClass();
}
public void lol() {
int[][][] l = new int[5][6][7];
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 6; j++) {
for (int k = 0; k < 7; k++) {
l[i][k][j] = 0;
}
}
}
}
@Override
public void arrayStoreOperation(Code c) {
c.aastore();
}
@Override
public void convertStackToStorageType(Code c) {
// do nothing.
}
@Override
public void pushArrayOfType(Code code, RegisterAllocator ra,
List<SubrangeType> ranges) {
//Because I cannot mix this method into DeclaredType (no multiple inheritance) I have to duplicate it.
ArrayType.pushArrayOfNonArrayType(this, code, ra, ranges);
}
}
|
npocmaka/Windows-Server-2003 | inetsrv/msmq/src/admin/mqsnap/infodlg.h | <reponame>npocmaka/Windows-Server-2003
// InfoDlg.h : header file
//
/////////////////////////////////////////////////////////////////////////////
// CInfoDlgDialog dialog
class CInfoDlgDialog : public CMqDialog
{
// Construction
public:
static CInfoDlgDialog *CreateObject(LPCTSTR szInfoText, CWnd* pParent = NULL);
private:
//
// Private constructor - this object can only be created
// using CreateObject
//
CInfoDlgDialog(LPCTSTR szInfoText, CWnd* pParent = NULL); // standard constructor
// Dialog Data
//{{AFX_DATA(CInfoDlgDialog)
enum { IDD = IDD_INFO_DLG };
CString m_szInfoText;
//}}AFX_DATA
// Overrides
// ClassWizard generated virtual function overrides
//{{AFX_VIRTUAL(CInfoDlgDialog)
protected:
virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support
virtual void PostNcDestroy();
//}}AFX_VIRTUAL
// Implementation
protected:
// Generated message map functions
//{{AFX_MSG(CInfoDlgDialog)
//}}AFX_MSG
DECLARE_MESSAGE_MAP()
private:
BOOL Create();
CWnd* m_pParent;
int m_nID;
};
class CInfoDlg
{
public:
CInfoDlg(LPCTSTR szInfoText, CWnd* pParent = NULL);
~CInfoDlg();
private:
CInfoDlgDialog *m_pinfoDlg;
};
|
DaManDOH/Simd | src/Simd/SimdSse2Reduce.cpp | /*
* Simd Library (http://ermig1979.github.io/Simd).
*
* Copyright (c) 2011-2018 <NAME>,
* 2018-2018 <NAME>.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include "Simd/SimdMemory.h"
#include "Simd/SimdStore.h"
#include "Simd/SimdBase.h"
#include "Simd/SimdSse2.h"
namespace Simd
{
#ifdef SIMD_SSE2_ENABLE
namespace Sse2
{
void ReduceColor2x2(const uint8_t * src, size_t srcWidth, size_t srcHeight, size_t srcStride,
uint8_t * dst, size_t dstWidth, size_t dstHeight, size_t dstStride, size_t channelCount)
{
assert((srcWidth + 1) / 2 == dstWidth && (srcHeight + 1) / 2 == dstHeight);
switch (channelCount)
{
case 1: Sse2::ReduceGray2x2(src, srcWidth, srcHeight, srcStride, dst, dstWidth, dstHeight, dstStride); break;
case 2: Base::ReduceColor2x2(src, srcWidth, srcHeight, srcStride, dst, dstWidth, dstHeight, dstStride, channelCount); break;
case 3: Base::ReduceColor2x2(src, srcWidth, srcHeight, srcStride, dst, dstWidth, dstHeight, dstStride, channelCount); break;
case 4: Base::ReduceColor2x2(src, srcWidth, srcHeight, srcStride, dst, dstWidth, dstHeight, dstStride, channelCount); break;
default: assert(0);
}
}
}
#endif// SIMD_SSE2_ENABLE
}
|
SouvikChan/-Leetcode_Souvik | 961-n-repeated-element-in-size-2n-array/961-n-repeated-element-in-size-2n-array.cpp | class Solution {
public:
int repeatedNTimes(vector<int>& nums) {
int occ=nums.size()/2;
unordered_map<int, int> tp;
int ans;
for(auto it:nums)
tp[it]++;
for(auto it:tp)
if(it.second==occ)
ans=it.first;
return ans;
}
}; |
bmeares/Meerschaum | meerschaum/config/_read_config.py | <reponame>bmeares/Meerschaum
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
"""
Import the config yaml file
"""
from __future__ import annotations
from meerschaum.utils.typing import Optional, Dict, Any, List, Tuple, Union
def read_config(
directory : Optional[Dict[str, Any]] = None,
keys : Optional[List[str]] = None,
write_missing : bool = True,
substitute : bool = True,
with_filenames : bool = False,
) -> Union[Dict[str, Any], Tuple[Dict[str, Any], List[str]]]:
"""
Read the configuration directory.
:param directory:
The directory with configuration files (.json and .yaml).
:param keys:
Which configuration files to read.
:param write_missing:
If a keyfile does not exist but is defined in the default configuration,
write the file to disk.
Defaults to True.
:param substitute:
Replace `MRSM{}` syntax with configuration values.
Defaults to True.
:param with_filename:
If True, return a tuple of the configuration dictionary with a list of read filenames.
Defaults to False.
Example:
```
>>> read_config(keys='meerschaum', with_filename=True)
>>> ({...}, ['meerschaum.yaml'])
```
"""
import sys, shutil, os, json, itertools
from meerschaum.utils.packages import attempt_import
from meerschaum.utils.yaml import yaml, _yaml
from meerschaum.config._paths import CONFIG_DIR_PATH
from meerschaum.config.static import _static_config
from meerschaum.config._patch import apply_patch_to_config
if directory is None:
directory = CONFIG_DIR_PATH
if _yaml is None:
print('Could not import YAML! Reverting to default configuration.')
from meerschaum.config._default import default_config
return default_config
### Each key corresponds to a YAML or JSON file.
symlinks_key = _static_config()['config']['symlinks_key']
config = {}
config_to_write = {}
default_filetype = _static_config()['config']['default_filetype']
filetype_loaders = {
'yml' : yaml.load,
'yaml' : yaml.load,
'json' : json.load,
}
### Construct filekeys (files to parse).
filekeys = []
filenames = os.listdir(directory)
missing_keys, found_keys = set(), set()
if keys is None:
_filekeys = filenames
else:
_filekeys = []
for k in keys:
for ft in filetype_loaders:
if str(k) + '.' + str(ft) in filenames:
_filekeys.append(str(k) + '.' + str(ft))
found_keys.add(k)
if k in missing_keys:
missing_keys.remove(k)
elif k not in found_keys:
missing_keys.add(k)
### Check for missing files with default keys.
if len(missing_keys) > 0:
from meerschaum.config._default import default_config
for mk in missing_keys:
if mk in default_config:
_default_dict = (
search_and_substitute_config(default_config) if substitute
else default_config
)
### If default config contains symlinks, add them to the config to write.
try:
_default_symlinks = _default_dict[symlinks_key][mk]
except Exception as e:
_default_symlinks = {}
config[mk] = _default_dict[mk]
if _default_symlinks:
if symlinks_key not in config:
config[symlinks_key] = {}
if mk not in config[symlinks_key]:
config[symlinks_key][mk] = {}
config[symlinks_key][mk] = apply_patch_to_config(
config[symlinks_key][mk],
_default_symlinks
)
if symlinks_key not in config_to_write:
config_to_write[symlinks_key] = {}
config_to_write[symlinks_key][mk] = config[symlinks_key][mk]
### Write the default key.
config_to_write[mk] = config[mk]
### Write missing keys if necessary.
if len(config_to_write) > 0 and write_missing:
from meerschaum.config._edit import write_config
write_config(config_to_write, directory)
### Check for duplicate files.
### Found help on StackOverflow:
### https://stackoverflow.com/questions/26618688/python-iterate-over-a-list-
### of-files-finding-same-filenames-but-different-exten
keygroups = {
key: list(value)
for key, value in itertools.groupby(
sorted(_filekeys, key = lambda e: os.path.splitext(e)[0]),
key = lambda e: os.path.splitext(e)[0]
)
}
for k, v in keygroups.items():
fn = v[0]
if len(v) > 1:
if k + '.' + default_filetype in v:
fn = k + '.' + default_filetype
print(
f"Found multiple config files named '{k}'. " +
f"Will attempt to parse '{fn}' for key '{k}'."
)
filekeys.append(fn)
_seen_keys = []
for filename in filekeys:
filepath = os.path.join(directory, filename)
_parts = filename.split('.')
_type = _parts[-1]
key = '.'.join(_parts[:-1])
### Check if we've seen this key before (e.g. test.yaml, test.yml, test.json).
if key in _seen_keys:
print(
f"Multiple files with the name '{key}' found in '{str(directory)}'. " +
f"Reading from '{filename}'."
)
if len(_parts) < 2 or _type not in filetype_loaders:
print(f"Unknown file '{filename}' in '{str(directory)}'. Skipping...")
while True:
try:
with open(filepath, 'r') as f:
_config_key = filetype_loaders[_type](f)
_single_key_config = (
search_and_substitute_config({key : _config_key}) if substitute
else {key : _config_key}
)
config[key] = _single_key_config[key]
if (
symlinks_key in _single_key_config
and key in _single_key_config[symlinks_key]
):
if symlinks_key not in config:
config[symlinks_key] = {}
config[symlinks_key][key] = _single_key_config[symlinks_key][key]
break
except Exception as e:
print(f"Unable to parse {filename}!")
# import traceback
# traceback.print_exception(e)
# print(e)
input(f"Press [Enter] to open '{filename}' and fix formatting errors.")
from meerschaum.utils.misc import edit_file
edit_file(filepath)
if with_filenames:
return config, filekeys
return config
def search_and_substitute_config(
config : dict,
leading_key : str = "MRSM",
delimiter : str = ":",
begin_key : str = "{",
end_key : str = "}",
literal_key : str = '!',
keep_symlinks : bool = True,
) -> Dict[str, Any]:
"""
Search the config for Meerschaum substitution syntax and substite with value of keys.
:param config:
The Meerschaum configuration dictionary to search through.
:param leading_key:
The string with which to start the search.
Defaults to 'MRSM'.
:param begin_key:
The string to start the keys list.
Defaults to '{'.
:param end_key:
The string to end the keys list.
Defaults to '}'.
:param literal_key:
The string to force an literal interpretation of a value.
When the string is isolated, a literal interpreation is assumed and the surrounding
quotes are replaced.
E.g. Suppose a:b:c produces a dictionary {'d': 1}.
- 'MRSM{a:b:c}' => {'d': 1} : isolated
- ' MRSM{a:b:c} ' => ' "{\'d\': 1}"' : not isolated
- ' MRSM{!a:b:c} ' => ' {"d": 1}' : literal
Defaults to '!'.
:param keep_symlinks:
If True, include the symlinks under the top-level key '_symlinks' (never written to a file).
Defaults to True.
Example:
MRSM{meerschaum:connectors:main:host} => cf['meerschaum']['connectors']['main']['host']
"""
_links = []
def _find_symlinks(d, _keys : List[str] = []):
for k, v in d.items():
if isinstance(v, dict):
_find_symlinks(v, _keys + [k])
elif (leading_key + begin_key) in str(v):
_links.append((_keys + [k], v))
_find_symlinks(config)
import json
needle = leading_key + begin_key
haystack = json.dumps(config, separators=(',', ':'))
from meerschaum.config import get_config
haystack = json.dumps(config)
mod_haystack = list(str(haystack))
buff = str(needle)
max_index = len(haystack) - len(buff)
patterns = dict()
isolated_patterns = dict()
literal_patterns = dict()
begin, end, floor = 0, 0, 0
while needle in haystack[floor:]:
### extract the keys
hs = haystack[floor:]
### the first character of the keys
### MRSM{key1:key2}
### ^
begin = hs.find(needle) + len(needle)
### The character behind the needle.
### "MRSM{key1:key2}"
### ^
prior = haystack[(floor + begin) - (len(needle) + 1)]
### number of characters to end of keys
### (really it's the index of the beginning of the end_key relative to the beginning
### but the math works out)
### MRSM{key1}
### ^ ^ => 4
length = hs[begin:].find(end_key)
### index of the end_key (end of `length` characters)
end = begin + length
### The character after the end_key.
after = haystack[floor + end + 1]
### advance the floor to find the next leading key
floor += end + len(end_key)
pattern_keys = hs[begin:end].split(delimiter)
### Check for isolation key and empty keys (MRSM{}).
force_literal = False
keys = [k for k in pattern_keys]
if str(keys[0]).startswith(literal_key):
keys[0] = str(keys[0])[len(literal_key):]
force_literal = True
if len(keys) == 1 and keys[0] == '':
keys = []
### Evaluate the parsed keys to extract the referenced value.
### TODO This needs to be recursive for chaining symlinks together.
valid, value = get_config(
*keys, substitute=False, as_tuple=True, write_missing=False, sync_files=False
)
if not valid:
continue
### pattern to search and replace
pattern = leading_key + begin_key + delimiter.join(pattern_keys) + end_key
### store patterns and values
patterns[pattern] = value
### Determine whether the pattern occured inside a string or is an isolated, direct symlink.
isolated_patterns[pattern] = (prior == '"' and after == '"')
literal_patterns[pattern] = force_literal
### replace the patterns with the values
for pattern, value in patterns.items():
if isolated_patterns[pattern]:
haystack = haystack.replace(json.dumps(pattern), json.dumps(value))
elif literal_patterns[pattern]:
haystack = haystack.replace(pattern, json.dumps(value).replace('"', '\\"').replace("'", "\\'"))
else:
haystack = haystack.replace(pattern, str(value))
### parse back into dict
parsed_config = json.loads(haystack)
symlinks = dict()
if keep_symlinks:
### Keep track of symlinks for writing back to a file.
for _keys, _pattern in _links:
s = symlinks
for k in _keys[:-1]:
if k not in s:
s[k] = {}
s = s[k]
s[_keys[-1]] = _pattern
from meerschaum.config._patch import apply_patch_to_config
from meerschaum.config.static import _static_config
symlinks_key = _static_config()['config']['symlinks_key']
if symlinks_key not in parsed_config:
parsed_config[symlinks_key] = {}
parsed_config[symlinks_key] = apply_patch_to_config(parsed_config[symlinks_key], symlinks)
return parsed_config
def get_possible_keys() -> List[str]:
"""
Return a list of possible configuration keys.
"""
import os
from meerschaum.config._paths import CONFIG_DIR_PATH
from meerschaum.config._default import default_config
keys = set()
for key in default_config:
keys.add(key)
for filename in os.listdir(CONFIG_DIR_PATH):
keys.add('.'.join(filename.split('.')[:-1]))
return sorted(list(keys))
def get_keyfile_path(key : str, create_new : bool = False) -> Optional[pathlib.Path]:
"""
Determine a key's file path.
"""
import os, pathlib
from meerschaum.config._paths import CONFIG_DIR_PATH
try:
return pathlib.Path(os.path.join(
CONFIG_DIR_PATH,
read_config(keys=[key], with_filenames=True, write_missing=False, substitute=False)[1][0]
))
except IndexError as e:
if create_new:
from meerschaum.config.static import _static_config
default_filetype = _static_config()['config']['default_filetype']
return pathlib.Path(os.path.join(CONFIG_DIR_PATH, key + '.' + default_filetype))
return None
|
precontext/TaobaoUnion | app/src/main/java/com/program/taobaounion/view/IOnSellPageCallback.java | <reponame>precontext/TaobaoUnion<filename>app/src/main/java/com/program/taobaounion/view/IOnSellPageCallback.java
package com.program.taobaounion.view;
import com.program.taobaounion.base.IBaseCallback;
import com.program.taobaounion.model.domain.OnSellContent;
public interface IOnSellPageCallback extends IBaseCallback {
/**
* 特惠内容
* @param result
*/
void onContentLoadedSuccess(OnSellContent result);
/**
* 加载更多结果
* @param moreResult
*/
void onMoreLoaded(OnSellContent moreResult);
/**
* 加载更多失败
*/
void onMoreLoadedError();
/**
* 没有更多内容
*/
void onMoreLoadeEmpty();
}
|
riverar/microsoft-ui-xaml | test/TestAppCX/MainPage.xaml.cpp | <reponame>riverar/microsoft-ui-xaml<filename>test/TestAppCX/MainPage.xaml.cpp
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
//
// MainPage.xaml.cpp
// Implementation of the MainPage class.
//
#include "pch.h"
#include "MainPage.xaml.h"
#include "LeakCycleTestCX.xaml.h"
#include "MenuBarTestPage.xaml.h"
#include "CornerRadiusTestPage.xaml.h"
#include "TreeViewTestPage.xaml.h"
using namespace TestAppCX;
using namespace Platform;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::UI::Xaml;
using namespace Windows::UI::Xaml::Controls;
using namespace Windows::UI::Xaml::Controls::Primitives;
using namespace Windows::UI::Xaml::Data;
using namespace Windows::UI::Xaml::Input;
using namespace Windows::UI::Xaml::Interop;
using namespace Windows::UI::Xaml::Media;
using namespace Windows::UI::Xaml::Navigation;
MainPage::MainPage()
{
InitializeComponent();
}
void TestAppCX::MainPage::GoToLeakTestControlPage(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
auto app = dynamic_cast<App^>(Application::Current);
app->RootFrame->Navigate(TypeName(LeakCycleTestCX::typeid), nullptr);
}
void TestAppCX::MainPage::GoToMenuBarTestPage(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
auto app = dynamic_cast<App^>(Application::Current);
app->RootFrame->Navigate(TypeName(MenuBarTestPage::typeid), nullptr);
}
void TestAppCX::MainPage::GoToCornerRadiusTestPage(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
auto app = dynamic_cast<App^>(Application::Current);
app->RootFrame->Navigate(TypeName(CornerRadiusTestPage::typeid), nullptr);
}
void TestAppCX::MainPage::GoToTreeViewTestPage(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
auto app = dynamic_cast<App^>(Application::Current);
app->RootFrame->Navigate(TypeName(TreeViewTestPage::typeid), nullptr);
}
|
eenurkka/incubator-nuttx | arch/arm/src/lpc31xx/lpc31_resetclks.c | <reponame>eenurkka/incubator-nuttx
/****************************************************************************
* arch/arm/src/lpc31xx/lpc31_resetclks.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <stdint.h>
#include <arch/board/board.h>
#include "lpc31_cgudrvr.h"
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/****************************************************************************
* Private Data
****************************************************************************/
/****************************************************************************
* Public Data
****************************************************************************/
/****************************************************************************
* Private Functions
****************************************************************************/
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: lpc31_resetclks
*
* Description:
* Put all clocks into a known, initial state
*
****************************************************************************/
void lpc31_resetclks(void)
{
uint32_t regaddr;
uint32_t regval;
int bcrndx;
int esrndx;
int i;
/* Switch all domain reference clocks to FFAST */
for (i = 0; i < CGU_NDOMAINS; i++)
{
/* Switch reference clock in to FFAST */
lpc31_selectfreqin((enum lpc31_domainid_e)i, CGU_FS_FFAST);
/* Check if the domain has a BCR */
bcrndx = lpc31_bcrndx((enum lpc31_domainid_e)i);
if (bcrndx != BCRNDX_INVALID)
{
/* Yes.. disable all BCRs */
putreg32(0, LPC31_CGU_BCR(bcrndx));
}
}
/* Disable all clocks except those that are necessary */
for (i = CLKID_FIRST; i <= CLKID_LAST; i++)
{
/* Check if this clock has an ESR register */
esrndx = lpc31_esrndx((enum lpc31_clockid_e)i);
if (esrndx != ESRNDX_INVALID)
{
/* Yes.. Clear the clocks ESR to deselect fractional divider */
putreg32(0, LPC31_CGU_ESR(esrndx));
}
/* Enable external enabling for all possible clocks to conserve power */
lpc31_enableexten((enum lpc31_clockid_e)i);
/* Set enable-out's for only the following clocks */
regaddr = LPC31_CGU_PCR(i);
regval = getreg32(regaddr);
if (i == (int)CLKID_ARM926BUSIFCLK || i == (int)CLKID_MPMCCFGCLK)
{
regval |= CGU_PCR_ENOUTEN;
}
else
{
regval &= ~CGU_PCR_ENOUTEN;
}
putreg32(regval, regaddr);
/* Set/clear the RUN bit in the PCR register of all clocks, depending
* upon if the clock is needed by the board logic or not
*/
lpc31_defclk((enum lpc31_clockid_e)i);
}
/* Disable all fractional dividers */
for (i = 0; i < CGU_NFRACDIV; i++)
{
regaddr = LPC31_CGU_FDC(i);
regval = getreg32(regaddr);
regval &= ~CGU_FDC_RUN;
putreg32(regval, regaddr);
}
}
|
ohmdrj/vax | src/main/java/cz/req/ax/ui/LayoutFiller.java | <reponame>ohmdrj/vax<gh_stars>0
package cz.req.ax.ui;
import com.vaadin.ui.Layout;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
* Date: 26.2.2016
*/
public interface LayoutFiller {
void fillLayout(Layout layout);
}
|
shwilliam/r10-app | js/components/Title.styles.js | import {StyleSheet} from 'react-native'
import THEME from '../theme'
const styles = StyleSheet.create({
title: {
fontFamily: THEME.FONT.FAMILY.REGULAR,
fontSize: THEME.FONT.SIZE.TITLE,
marginTop: THEME.SPACING.VERTICAL / 2,
marginBottom: THEME.SPACING.VERTICAL,
},
})
export default styles
|
Pondorasti/Competitive-Programming | Advent of Code/2021/7/2.py | <filename>Advent of Code/2021/7/2.py
import math
with open('input.txt') as f:
lines = f.readlines()
crabs = []
for interval in lines[0].split(","):
crabs.append(int(interval))
minVal = min(crabs)
maxVal = max(crabs) + 1
crabFreq = [0 for _ in range(maxVal)]
for crab in crabs:
crabFreq[crab] += 1
partialSums = [(0, 0) for _ in range(maxVal)]
reversedPartialSums = [(0, 0) for _ in range(maxVal + 1)]
partialSums[0] = (crabFreq[0], 0)
for i in range(minVal, maxVal):
prevCrab, prevFuel = partialSums[i - 1]
crabCount = prevCrab + crabFreq[i]
for j in range(i, maxVal):
partialSums[j] = (
crabCount,
partialSums[j][1] + crabFreq[i] * (j - i)
)
for i in range(minVal + 1, maxVal):
prevCrab, prevFuel = partialSums[i - 1]
curCrab, curFuel = partialSums[i]
partialSums[i] = (curCrab, prevFuel + curFuel)
reversedPartialSums[maxVal - 1] = (crabFreq[maxVal - 1], 0)
for i in reversed(range(minVal, maxVal)):
prevCrab, prevFuel = reversedPartialSums[i + 1]
crabCount = prevCrab + crabFreq[i]
for j in reversed(range(minVal, i)):
reversedPartialSums[j] = (
crabCount,
reversedPartialSums[j][1] + crabFreq[i] * (i - j)
)
for i in reversed(range(minVal, maxVal - 1)):
prevCrab, prevFuel = reversedPartialSums[i + 1]
curCrab, curFuel = reversedPartialSums[i]
reversedPartialSums[i] = (curCrab, prevFuel + curFuel)
ans = math.inf
for i in range(minVal, maxVal):
ans = min(ans, partialSums[i][1] + reversedPartialSums[i][1])
print(ans)
|
landonreed/GeoGit | src/core/src/main/java/org/geogit/storage/text/TextValueSerializer.java | <reponame>landonreed/GeoGit
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.storage.text;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.geogit.storage.FieldType;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.ParseException;
import com.vividsolutions.jts.io.WKTReader;
/**
* A text serializer for attribute values
*
*/
public class TextValueSerializer {
static interface ValueSerializer {
public Object fromString(String in) throws ParseException;
public String toString(Object value);
}
static abstract class DefaultValueSerializer implements ValueSerializer {
@Override
public String toString(Object value) {
return value.toString();
}
}
static abstract class ArraySerializer implements ValueSerializer {
@Override
public String toString(Object value) {
return "[" + Joiner.on(" ").join((Object[]) value) + "]";
}
}
static Map<FieldType, ValueSerializer> serializers = new HashMap<FieldType, ValueSerializer>();
static {
serializers.put(FieldType.NULL, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return null;
}
@Override
public String toString(Object value) {
return "";
}
});
serializers.put(FieldType.BOOLEAN, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Boolean(in);
}
});
serializers.put(FieldType.BYTE, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Byte(in);
}
});
serializers.put(FieldType.SHORT, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Short(in);
}
});
serializers.put(FieldType.INTEGER, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Integer(in);
}
});
serializers.put(FieldType.LONG, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Long(in);
}
});
serializers.put(FieldType.FLOAT, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Float(in);
}
});
serializers.put(FieldType.DOUBLE, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new Double(in);
}
});
serializers.put(FieldType.STRING, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return in;
}
});
serializers.put(FieldType.BOOLEAN_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Boolean> list = Lists.newArrayList();
for (String token : s) {
list.add(new Boolean(token));
}
return list.toArray(new Boolean[0]);
}
});
serializers.put(FieldType.BYTE_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Byte> list = Lists.newArrayList();
for (String token : s) {
list.add(new Byte(token));
}
return list.toArray(new Byte[0]);
}
});
serializers.put(FieldType.SHORT_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.replace("[", "").replace("]", "").split(" ");
List<Short> list = Lists.newArrayList();
for (String token : s) {
list.add(new Short(token));
}
return list.toArray(new Short[0]);
}
});
serializers.put(FieldType.INTEGER_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Integer> list = Lists.newArrayList();
for (String token : s) {
list.add(new Integer(token));
}
return list.toArray(new Integer[0]);
}
});
serializers.put(FieldType.LONG_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Long> list = Lists.newArrayList();
for (String token : s) {
list.add(new Long(token));
}
return list.toArray(new Long[0]);
}
});
serializers.put(FieldType.FLOAT_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Float> list = Lists.newArrayList();
for (String token : s) {
list.add(new Float(token));
}
return list.toArray(new Float[0]);
}
});
serializers.put(FieldType.DOUBLE_ARRAY, new ArraySerializer() {
@Override
public Object fromString(String in) {
String[] s = in.split(" ");
List<Double> list = Lists.newArrayList();
for (String token : s) {
list.add(new Double(token));
}
return list.toArray(new Byte[0]);
}
});
ValueSerializer geometry = new ValueSerializer() {
@Override
public Object fromString(String in) throws ParseException {
return new WKTReader().read(in);
}
@Override
public String toString(Object value) {
return ((Geometry) value).toText();
}
};
serializers.put(FieldType.GEOMETRY, geometry);
serializers.put(FieldType.POINT, geometry);
serializers.put(FieldType.LINESTRING, geometry);
serializers.put(FieldType.POLYGON, geometry);
serializers.put(FieldType.MULTIPOINT, geometry);
serializers.put(FieldType.MULTILINESTRING, geometry);
serializers.put(FieldType.MULTIPOLYGON, geometry);
serializers.put(FieldType.GEOMETRYCOLLECTION, geometry);
serializers.put(FieldType.UUID, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return UUID.fromString(in);
}
});
serializers.put(FieldType.BIG_INTEGER, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new BigInteger(in);
}
});
serializers.put(FieldType.BIG_DECIMAL, new DefaultValueSerializer() {
@Override
public Object fromString(String in) {
return new BigDecimal(in);
}
});
serializers.put(FieldType.DATETIME, new ValueSerializer() {
@Override
public Object fromString(String in) {
return new java.util.Date(Long.parseLong(in));
}
@Override
public String toString(Object value) {
return String.valueOf(((java.util.Date) value).getTime());
}
});
serializers.put(FieldType.DATE, new ValueSerializer() {
@Override
public Object fromString(String in) {
return new java.sql.Date(Long.parseLong(in));
}
@Override
public String toString(Object value) {
return String.valueOf(((java.sql.Date) value).getTime());
}
});
serializers.put(FieldType.TIME, new ValueSerializer() {
@Override
public Object fromString(String in) {
return new java.sql.Time(Long.parseLong(in));
}
@Override
public String toString(Object value) {
return String.valueOf(((java.sql.Time) value).getTime());
}
});
serializers.put(FieldType.TIMESTAMP, new ValueSerializer() {
@Override
public Object fromString(String in) {
String[] millisnanos = in.split(" ");
java.sql.Timestamp ts = new java.sql.Timestamp(Long.parseLong(millisnanos[0]));
ts.setNanos(Integer.parseInt(millisnanos[1]));
return ts;
}
@Override
public String toString(Object value) {
java.sql.Timestamp ts = (java.sql.Timestamp) value;
return new StringBuilder().append(ts.getTime()).append(' ').append(ts.getNanos())
.toString();
}
});
}
/**
* Returns a string representation of the passed field value
*
* @param opt
*/
public static String asString(Optional<Object> opt) {
final FieldType type = FieldType.forValue(opt);
if (serializers.containsKey(type)) {
return serializers.get(type).toString(opt.orNull());
} else {
throw new IllegalArgumentException("The specified type is not supported: " + type);
}
}
/**
* Creates a value object from its string representation
*
* @param type
* @param in
* @return
*/
public static Object fromString(FieldType type, String in) {
if (serializers.containsKey(type)) {
try {
return serializers.get(type).fromString(in);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse wrong value: " + in);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Unable to parse wrong value: " + in);
}
} else {
throw new IllegalArgumentException("The specified type is not supported");
}
}
}
|
perfectrecall/aws-sdk-cpp | aws-cpp-sdk-comprehendmedical/source/model/InferSNOMEDCTRequest.cpp | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/comprehendmedical/model/InferSNOMEDCTRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::ComprehendMedical::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
InferSNOMEDCTRequest::InferSNOMEDCTRequest() :
m_textHasBeenSet(false)
{
}
Aws::String InferSNOMEDCTRequest::SerializePayload() const
{
JsonValue payload;
if(m_textHasBeenSet)
{
payload.WithString("Text", m_text);
}
return payload.View().WriteReadable();
}
Aws::Http::HeaderValueCollection InferSNOMEDCTRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "ComprehendMedical_20181030.InferSNOMEDCT"));
return headers;
}
|
kawasin73/tcpip-stack | test/raw_tap_test.c | <filename>test/raw_tap_test.c
#include <signal.h>
#include <stdio.h>
#include "raw/tap.h"
volatile sig_atomic_t terminate;
static void on_signal(int s) { terminate = 1; }
static void rx_handler(uint8_t *frame, size_t len, void *arg) {
fprintf(stderr, "receive %zu octets\n", len);
}
int main(int argc, char const *argv[]) {
char *name = "tap1";
struct tap_dev *dev;
uint8_t addr[6];
signal(SIGINT, on_signal);
dev = tap_dev_open(name);
if (dev == NULL) {
return -1;
}
tap_dev_addr(name, addr, sizeof(addr));
fprintf(stderr, "[%s] %02x:%02x:%02x:%02x:%02x:%02x\n", name, addr[0],
addr[1], addr[2], addr[3], addr[4], addr[5]);
while (!terminate) {
tap_dev_rx(dev, rx_handler, dev, 1000);
}
tap_dev_close(dev);
printf("closed");
return 0;
}
|
nitrictech/apis | jvm/src/main/java/io/nitric/proto/resource/v1/TopicResourceOrBuilder.java | <filename>jvm/src/main/java/io/nitric/proto/resource/v1/TopicResourceOrBuilder.java
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: proto/resource/v1/resource.proto
package io.nitric.proto.resource.v1;
public interface TopicResourceOrBuilder extends
// @@protoc_insertion_point(interface_extends:nitric.resource.v1.TopicResource)
com.google.protobuf.MessageOrBuilder {
}
|
aklomp/sse-intrinsics-tests | tests/exhaustive_16bit.c | #include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <emmintrin.h>
#include "../lib/mm_cmple_epu16.h"
#include "../lib/mm_cmpgt_epu16.h"
#include "../lib/mm_cmplt_epu16.h"
#include "../lib/mm_cmpge_epu16.h"
#include "../lib/mm_blendv_si128.h"
#include "../lib/mm_min_epu16.h"
#include "../lib/mm_max_epu16.h"
#include "../lib/mm_absdiff_epu16.h"
#include "../lib/mm_div255_epu16.h"
// Inputs to test function:
struct testcase {
const uint16_t i; // First input value
const uint16_t j; // Second input value
uint16_t expect; // Expected output value
};
// Struct to define a simple testcase:
struct test {
const char *name;
const char *op;
__m128i (*run) (struct testcase *);
};
// Check if all 8 epu16's are identical:
static bool
epu16_all_same (uint16_t buf[8])
{
for (int i = 1; i < 8; i++) {
if (buf[0] != buf[i]) {
return false;
}
}
return true;
}
static bool
test_epu16_two (struct test *test)
{
uint16_t buf[8] __attribute__ ((aligned(16)));
bool pass = true;
puts(test->name);
for (int i = 0; i < 0x10000; i++) {
for (int j = 0; j < 0x10000; j++) {
// Create testcase
struct testcase tc = {
.i = i,
.j = j,
};
// Run testcase:
__m128i c = test->run(&tc);
// Save result to array:
_mm_store_si128((__m128i *)buf, c);
// Check that all elements in the result are identical:
if (!epu16_all_same(buf)) {
printf("FAIL: %d %s %d, not all identical\n", tc.i, test->op, tc.j);
pass = false;
continue;
}
// Does the expected result differ?
if (buf[0] != tc.expect) {
printf("FAIL: %d %s %d, expected %d, got %d\n", tc.i, test->op, tc.j, tc.expect, buf[0]);
pass = false;
continue;
}
}
}
return pass;
}
static __m128i
test_mm_cmplt_epu16 (struct testcase *tc)
{
tc->expect = (tc->i < tc->j) ? 0xFFFF : 0x0000;
return _mm_cmplt_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_cmple_epu16 (struct testcase *tc)
{
tc->expect = (tc->i <= tc->j) ? 0xFFFF : 0x0000;
return _mm_cmple_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_cmpge_epu16 (struct testcase *tc)
{
tc->expect = (tc->i >= tc->j) ? 0xFFFF : 0x0000;
return _mm_cmpge_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_cmpgt_epu16 (struct testcase *tc)
{
tc->expect = (tc->i > tc->j) ? 0xFFFF : 0x0000;
return _mm_cmpgt_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_min_epu16 (struct testcase *tc)
{
tc->expect = (tc->i > tc->j) ? tc->j : tc->i;
return _mm_min_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_max_epu16 (struct testcase *tc)
{
tc->expect = (tc->i > tc->j) ? tc->i : tc->j;
return _mm_max_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static __m128i
test_mm_absdiff_epu16 (struct testcase *tc)
{
tc->expect = (tc->i > tc->j) ? (tc->i - tc->j) : (tc->j - tc->i);
return _mm_absdiff_epu16(
_mm_set1_epi16(tc->i),
_mm_set1_epi16(tc->j));
}
static bool
test_mm_div255_epu16 (void)
{
bool pass = true;
// Only works when i < 256*255 = 65280:
// (result is 8-bit):
puts("_mm_div255_epu16");
for (int i = 0; i < 0xFF00; i++) {
uint16_t c = _mm_extract_epi16(_mm_div255_epu16(_mm_set1_epi16(i)), 1);
if (c != (i / 255)) {
printf("FAIL: div255(%d), got %d, expected %d\n", i, c, (i / 255));
pass = false;
}
}
return pass;
}
int
main (void)
{
// Map for testing simple bytewise functions:
struct test map[] = {
{ "_mm_cmplt_epu16", "<", test_mm_cmplt_epu16 },
{ "_mm_cmple_epu16", "<=", test_mm_cmple_epu16 },
{ "_mm_cmpge_epu16", ">=", test_mm_cmpge_epu16 },
{ "_mm_cmpgt_epu16", ">", test_mm_cmpgt_epu16 },
{ "_mm_min_epu16", "min", test_mm_min_epu16 },
{ "_mm_max_epu16", "max", test_mm_max_epu16 },
{ "_mm_absdiff_epu16", "absdiff", test_mm_absdiff_epu16 },
};
bool pass = true;
for (size_t i = 0; i < sizeof(map) / sizeof(map[0]); i++)
pass &= test_epu16_two(&map[i]);
// Handle this one separately:
pass &= test_mm_div255_epu16();
return (pass) ? 0 : 1;
}
|
bence21/Projector | Projector-server/src/main/java/com/bence/projector/server/backend/service/SongCollectionService.java | <reponame>bence21/Projector<filename>Projector-server/src/main/java/com/bence/projector/server/backend/service/SongCollectionService.java
package com.bence.projector.server.backend.service;
import com.bence.projector.server.backend.model.Language;
import com.bence.projector.server.backend.model.Song;
import com.bence.projector.server.backend.model.SongCollection;
import java.util.Date;
import java.util.List;
public interface SongCollectionService extends BaseService<SongCollection> {
List<SongCollection> findAllByLanguageAndAndModifiedDateGreaterThan(Language language, Date lastModifiedDate);
boolean matches(SongCollection savedSongCollection, SongCollection songCollection1);
List<SongCollection> findAllBySong(Song song);
SongCollection saveWithoutForeign(SongCollection songCollection);
SongCollection findOneByUuid(String uuid);
}
|
mattinsler/com.lowereast.guiceymongo | src/examples/CollectionConfigurationExample.java | /**
* Copyright (C) 2010 Lowereast Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.lowereast.guiceymongo.guice.GuiceyMongo;
import com.lowereast.guiceymongo.guice.annotation.GuiceyMongoCollection;
import com.mongodb.DBCollection;
public class CollectionConfigurationExample {
@Inject
CollectionConfigurationExample(@GuiceyMongoCollection(Collections.Data) DBCollection collection) {
System.out.println(collection.getDB() + " : " + collection);
}
public static void main(String[] args) {
Injector injector = Guice.createInjector(
// create the test configuration
GuiceyMongo.configure(Configurations.Test)
.mapDatabase(Databases.Main).to("test_db")
// collection names are global, you just need to assign the collection to a database
.mapCollection(Collections.Data).to("data").inDatabase(Databases.Main),
// choose the configuration to use
GuiceyMongo.chooseConfiguration(Configurations.Test)
);
injector.getInstance(CollectionConfigurationExample.class);
}
}
|
rpiaggio/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/gemini/editor/targetComponent/details2/ForwardingTelescopePosWatcher.scala | package jsky.app.ot.gemini.editor.targetComponent.details2
import edu.gemini.pot.sp.ISPNode
import edu.gemini.shared.util.immutable.{None => GNone, Option => GOption}
import edu.gemini.spModel.obs.context.ObsContext
import edu.gemini.spModel.target.{SPSkyObject, TelescopePosWatcher, WatchablePos}
import jsky.app.ot.gemini.editor.targetComponent.TelescopePosEditor
final class ForwardingTelescopePosWatcher[T <: SPSkyObject](tpe: TelescopePosEditor[T],
initializer: () => T)
extends TelescopePosEditor[T] with TelescopePosWatcher {
private[this] var spt: T = initializer()
private[this] var ctx: GOption[ObsContext] = GNone.instance[ObsContext]
private[this] var node: ISPNode = null
def edit(obsContext: GOption[ObsContext], spSkyObject: T, ispNode: ISPNode): Unit = {
require(obsContext != null, "obsContext should never be null")
require(spSkyObject != null, "spSkyObject should never be null")
// If this is a new target, switch our watchers
if (spt != spSkyObject) {
spt.deleteWatcher(this)
spSkyObject.addWatcher(this)
}
// Remember the context and target so `telescopePosUpdate` can call `edit`
ctx = obsContext
spt = spSkyObject
node = ispNode
}
def telescopePosUpdate(tp: WatchablePos): Unit =
tpe.edit(ctx, spt, node)
}
|
SnehlataKumari/lems | dist/services/liveClass.service.js | "use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __param = (this && this.__param) || function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LiveClassService = void 0;
const common_1 = require("@nestjs/common");
const db_service_1 = require("./db.service");
const mongoose_1 = require("mongoose");
const mongoose_2 = require("@nestjs/mongoose");
const teachers_service_1 = require("./teachers.service");
let LiveClassService = (() => {
let LiveClassService = class LiveClassService extends db_service_1.DBService {
constructor(model, teacherService) {
super(model);
this.teacherService = teacherService;
}
findAll() {
return super.findAll().populate('posterDocumentId').populate('teacher').populate('user').sort('-_id');
}
async createLiveClass(body, userId) {
const teacher = await this.teacherService.findOne({ userId: userId });
const teacherId = teacher._id;
return this.create(Object.assign(Object.assign({}, body), { teacher: teacherId, user: userId }));
}
async createLiveClassByAdmin(body) {
try {
const response = await this.create(Object.assign(Object.assign({}, body), { hasAcceptedRequest: true, isCreatedByAdmin: true }));
}
catch (error) {
console.log(error);
}
}
async getLiveClassByTeacherId(userId) {
const teacher = await this.teacherService.findOne({ userId: userId });
const teacherId = teacher._id;
const liveClassesList = await this.find({
teacher: teacherId,
}).sort('-_id');
return liveClassesList;
}
async getAllLiveClasses() {
const liveClassesList = await this.findAll().populate('posterDocumentId').populate('teacher').populate('user').sort('-_id');
console.log(liveClassesList.length);
return liveClassesList;
}
async getLiveClassCreatedByTeacher() {
const liveClassesList = await this.find({ isCreatedByAdmin: false }).populate('posterDocumentId').populate('teacher').populate('user').sort('-_id');
return liveClassesList;
}
async getLiveClassCreatedByAdmin() {
const liveClassesList = await this.find({ isCreatedByAdmin: true }).populate('posterDocumentId').populate('teacher').populate('user').sort('-_id');
return liveClassesList;
}
async getLiveDemoClasses() {
const liveDemoClassesList = await this.find({ isDemoClass: true }).populate('posterDocumentId').populate('teacher').populate('user').sort('-_id');
return liveDemoClassesList;
}
async deleteLiveClassById(liveClassId) {
const liveClassModel = await this.findById(liveClassId);
liveClassModel.remove();
return true;
}
};
LiveClassService = __decorate([
common_1.Injectable(),
__param(0, mongoose_2.InjectModel('LiveClass')),
__metadata("design:paramtypes", [mongoose_1.Model,
teachers_service_1.TeachersService])
], LiveClassService);
return LiveClassService;
})();
exports.LiveClassService = LiveClassService;
//# sourceMappingURL=liveClass.service.js.map |
BearerPipelineTest/fae | spec/services/netlify_api_spec.rb | require 'rails_helper'
describe Fae::NetlifyApi, type: :model do
describe '#get_deploys' do
it 'should return deploys' do
expect(Fae::NetlifyApi.new().get_deploys).not_to be_nil
end
end
end
|
AlgoLab/BEETL | src/frontends/BeetlSearch.cpp | /**
** Copyright (c) 2011 Illumina, Inc.
**
**
** This software is covered by the "Illumina Non-Commercial Use Software
** and Source Code License Agreement" and any user of this software or
** source file is bound by the terms therein (see accompanying file
** Illumina_Non-Commercial_Use_Software_and_Source_Code_License_Agreement.pdf)
**
** This file is part of the BEETL software package.
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BeetlSearch.hh"
#include "config.h"
#include "search/SearchUsingBacktracker.hh"
#include "parameters/SearchParameters.hh"
SearchParameters params;
void printUsage()
{
params.printUsage();
cout << "Notes:" << endl;
cout << " -j and -k are mutually exclusive, one of them being required.\n" << endl;
cout << endl;
}
void launchBeetlSearch()
{
SearchUsingBacktracker search( params );
search.run();
}
int main( const int argc, const char **argv )
{
// Generated using: http://patorjk.com/software/taag/#p=display&f=Soft&t=BEETL%20search
clog << ",-----. ,------.,------.,--------.,--. ,--. " << endl;
clog << "| |) /_ | .---'| .---''--. .--'| | ,---. ,---. ,--,--.,--.--. ,---.| ,---. " << endl;
clog << "| .-. \\| `--, | `--, | | | | ( .-' | .-. :' ,-. || .--'| .--'| .-. | " << endl;
clog << "| '--' /| `---.| `---. | | | '--. .-' `)\\ --.\\ '-' || | \\ `--.| | | | " << endl;
clog << "`------' `------'`------' `--' `-----' `----' `----' `--`--'`--' `---'`--' `--' " << endl;
clog << "Version " << PACKAGE_VERSION << endl;
clog << endl;
clog << "Command called:" << endl << " ";
for ( int i = 0; i < argc; ++i )
{
clog << " " << argv[i];
}
clog << "\n" << endl;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
printUsage();
exit( params["help"] == 0 );
}
// Use default parameter values where needed
params.commitDefaultValues();
// Checking for required parameters
if ( ! ( params["kmers input file"].isSet() ^ params["one kmer string"].isSet() ) )
{
cerr << "Error: Missing or incorrect arguments: -i is required; -j and -k are mutually exclusive, one of them being required\n" << endl;
printUsage();
exit( 1 );
}
// Launch
launchBeetlSearch();
return 0;
}
|
hjuinj/RDKit_mETKDG | modified_rdkit/Code/GraphMol/MolTransforms/MolTransforms.cpp | <filename>modified_rdkit/Code/GraphMol/MolTransforms/MolTransforms.cpp
//
// Copyright (C) 2003-2016 <NAME> and Rational Discovery LLC
//
// @@ All Rights Reserved @@
// This file is part of the RDKit.
// The contents are covered by the terms of the BSD license
// which is included in the file license.txt, found at the root
// of the RDKit source tree.
//
#include "MolTransforms.h"
#include <GraphMol/RDKitBase.h>
#include <GraphMol/QueryOps.h>
#include <Numerics/EigenSolvers/PowerEigenSolver.h>
#include <Numerics/SymmMatrix.h>
#include <Numerics/Matrix.h>
#include <Geometry/Transform3D.h>
#include <stack>
#include <boost/dynamic_bitset.hpp>
#include <RDGeneral/Exceptions.h>
#define EIGEN_TOLERANCE 5.0e-2
namespace MolTransforms {
using namespace RDKit;
void transformAtom(Atom *atom, RDGeom::Transform3D &tform) {
PRECONDITION(atom, "no atom");
ROMol &mol = atom->getOwningMol();
for (ROMol::ConstConformerIterator ci = mol.beginConformers();
ci != mol.endConformers(); ci++) {
RDGeom::Point3D &pos = (*ci)->getAtomPos(atom->getIdx());
tform.TransformPoint(pos);
}
// atom->setPos(pos);
}
void transformMolsAtoms(ROMol *mol, RDGeom::Transform3D &tform) {
PRECONDITION(mol, "no molecule");
ROMol::AtomIterator atomIt;
for (atomIt = mol->beginAtoms(); atomIt != mol->endAtoms(); atomIt++) {
transformAtom(*atomIt, tform);
}
}
RDGeom::Point3D computeCentroid(const Conformer &conf, bool ignoreHs) {
RDGeom::Point3D res(0.0, 0.0, 0.0);
const ROMol &mol = conf.getOwningMol();
ROMol::ConstAtomIterator cai;
unsigned int nAtms = 0;
for (cai = mol.beginAtoms(); cai != mol.endAtoms(); cai++) {
if (((*cai)->getAtomicNum() == 1) && (ignoreHs)) {
continue;
}
res += conf.getAtomPos((*cai)->getIdx());
nAtms++;
}
res /= nAtms;
return res;
}
namespace {
void computeCovarianceTerms(const Conformer &conf,
const RDGeom::Point3D ¢er, double &xx,
double &xy, double &xz, double &yy, double &yz,
double &zz, bool normalize, bool ignoreHs,
const std::vector<double> *weights) {
PRECONDITION(!weights || weights->size() >= conf.getNumAtoms(),
"bad weights vector");
xx = xy = xz = yy = yz = zz = 0.0;
const ROMol &mol = conf.getOwningMol();
double wSum = 0.0;
for (ROMol::ConstAtomIterator cai = mol.beginAtoms(); cai != mol.endAtoms();
cai++) {
if (((*cai)->getAtomicNum() == 1) && (ignoreHs)) {
continue;
}
RDGeom::Point3D loc = conf.getAtomPos((*cai)->getIdx());
loc -= center;
double w = 1.0;
if (weights) {
w = (*weights)[(*cai)->getIdx()];
}
wSum += w;
xx += w * loc.x * loc.x;
xy += w * loc.x * loc.y;
xz += w * loc.x * loc.z;
yy += w * loc.y * loc.y;
yz += w * loc.y * loc.z;
zz += w * loc.z * loc.z;
}
if (normalize) {
xx /= wSum;
xy /= wSum;
xz /= wSum;
yy /= wSum;
yz /= wSum;
zz /= wSum;
}
}
RDNumeric::DoubleSymmMatrix *computeCovarianceMatrix(
const Conformer &conf, const RDGeom::Point3D ¢er, bool normalize,
bool ignoreHs) {
double xx, xy, xz, yy, yz, zz;
computeCovarianceTerms(conf, center, xx, xy, xz, yy, yz, zz, normalize,
ignoreHs, nullptr);
auto *res = new RDNumeric::DoubleSymmMatrix(3, 3);
res->setVal(0, 0, xx);
res->setVal(0, 1, xy);
res->setVal(0, 2, xz);
res->setVal(1, 1, yy);
res->setVal(1, 2, yz);
res->setVal(2, 2, zz);
return res;
}
void computeInertiaTerms(const Conformer &conf, const RDGeom::Point3D ¢er,
double &xx, double &xy, double &xz, double &yy,
double &yz, double &zz, bool ignoreHs,
const std::vector<double> *weights) {
PRECONDITION(!weights || weights->size() >= conf.getNumAtoms(),
"bad weights vector");
xx = xy = xz = yy = yz = zz = 0.0;
const ROMol &mol = conf.getOwningMol();
for (ROMol::ConstAtomIterator cai = mol.beginAtoms(); cai != mol.endAtoms();
cai++) {
if (((*cai)->getAtomicNum() == 1) && (ignoreHs)) {
continue;
}
RDGeom::Point3D loc = conf.getAtomPos((*cai)->getIdx());
loc -= center;
double w = 1.0;
if (weights) {
w = (*weights)[(*cai)->getIdx()];
}
xx += w * (loc.y * loc.y + loc.z * loc.z);
yy += w * (loc.x * loc.x + loc.z * loc.z);
zz += w * (loc.y * loc.y + loc.x * loc.x);
xy -= w * loc.x * loc.y;
xz -= w * loc.x * loc.z;
yz -= w * loc.z * loc.y;
}
}
}
#ifdef RDK_HAS_EIGEN3
#include <Eigen/Dense>
bool computePrincipalAxesAndMoments(const RDKit::Conformer &conf,
Eigen::Matrix3d &axes,
Eigen::Vector3d &moments, bool ignoreHs,
bool force,
const std::vector<double> *weights) {
PRECONDITION((!weights || weights->size() >= conf.getNumAtoms()),
"bad weights vector");
const char *axesPropName = ignoreHs ? "_principalAxes_noH" : "_principalAxes";
const char *momentsPropName =
ignoreHs ? "_principalMoments_noH" : "_principalMoments";
if (!weights && !force && conf.getOwningMol().hasProp(axesPropName) &&
conf.getOwningMol().hasProp(momentsPropName)) {
conf.getOwningMol().getProp(axesPropName, axes);
conf.getOwningMol().getProp(momentsPropName, moments);
return true;
}
const ROMol &mol = conf.getOwningMol();
RDGeom::Point3D origin(0, 0, 0);
double wSum = 0.0;
for (unsigned int i = 0; i < conf.getNumAtoms(); ++i) {
if (ignoreHs && mol.getAtomWithIdx(i)->getAtomicNum() == 1) continue;
double w = 1.0;
if (weights) {
w = (*weights)[i];
}
wSum += w;
origin += conf.getAtomPos(i) * w;
}
// std::cerr<<" origin: "<<origin<<" "<<wSum<<std::endl;
origin /= wSum;
double sumXX, sumXY, sumXZ, sumYY, sumYZ, sumZZ;
computeInertiaTerms(conf, origin, sumXX, sumXY, sumXZ, sumYY, sumYZ, sumZZ,
ignoreHs, weights);
Eigen::Matrix3d mat;
mat << sumXX, sumXY, sumXZ, sumXY, sumYY, sumYZ, sumXZ, sumYZ, sumZZ;
// std::cerr<<" matrix: "<<mat<<std::endl;
Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eigensolver(mat);
if (eigensolver.info() != Eigen::Success) {
BOOST_LOG(rdErrorLog) << "eigenvalue calculation did not converge"
<< std::endl;
return false;
}
axes = eigensolver.eigenvectors();
moments = eigensolver.eigenvalues();
if (!weights) {
conf.getOwningMol().setProp(axesPropName, axes, true);
conf.getOwningMol().setProp(momentsPropName, moments, true);
}
return true;
}
bool computePrincipalAxesAndMomentsFromGyrationMatrix(
const RDKit::Conformer &conf, Eigen::Matrix3d &axes,
Eigen::Vector3d &moments, bool ignoreHs, bool force,
const std::vector<double> *weights) {
PRECONDITION((!weights || weights->size() >= conf.getNumAtoms()),
"bad weights vector");
const char *axesPropName =
ignoreHs ? "_principalAxes_noH_cov" : "_principalAxes_cov";
const char *momentsPropName =
ignoreHs ? "_principalMoments_noH_cov" : "_principalMoments_cov";
if (!weights && !force && conf.getOwningMol().hasProp(axesPropName) &&
conf.getOwningMol().hasProp(momentsPropName)) {
conf.getOwningMol().getProp(axesPropName, axes);
conf.getOwningMol().getProp(momentsPropName, moments);
return true;
}
const ROMol &mol = conf.getOwningMol();
RDGeom::Point3D origin(0, 0, 0);
double wSum = 0.0;
for (unsigned int i = 0; i < conf.getNumAtoms(); ++i) {
if (ignoreHs && mol.getAtomWithIdx(i)->getAtomicNum() == 1) continue;
double w = 1.0;
if (weights) {
w = (*weights)[i];
}
wSum += w;
origin += conf.getAtomPos(i) * w;
}
// std::cerr<<" origin: "<<origin<<" "<<wSum<<std::endl;
origin /= wSum;
double sumXX, sumXY, sumXZ, sumYY, sumYZ, sumZZ;
computeCovarianceTerms(conf, origin, sumXX, sumXY, sumXZ, sumYY, sumYZ, sumZZ,
true, ignoreHs, weights);
Eigen::Matrix3d mat;
mat << sumXX, sumXY, sumXZ, sumXY, sumYY, sumYZ, sumXZ, sumYZ, sumZZ;
// std::cerr<<" matrix: "<<mat<<std::endl;
Eigen::SelfAdjointEigenSolver<Eigen::Matrix3d> eigensolver(mat);
if (eigensolver.info() != Eigen::Success) {
BOOST_LOG(rdErrorLog) << "eigenvalue calculation did not converge"
<< std::endl;
return false;
}
axes = eigensolver.eigenvectors();
moments = eigensolver.eigenvalues();
if (!weights) {
conf.getOwningMol().setProp(axesPropName, axes, true);
conf.getOwningMol().setProp(momentsPropName, moments, true);
}
return true;
}
#endif
RDGeom::Transform3D *computeCanonicalTransform(const Conformer &conf,
const RDGeom::Point3D *center,
bool normalizeCovar,
bool ignoreHs) {
RDGeom::Point3D origin;
if (!center) {
origin = computeCentroid(conf, ignoreHs);
} else {
origin = (*center);
}
RDNumeric::DoubleSymmMatrix *covMat =
computeCovarianceMatrix(conf, origin, normalizeCovar, ignoreHs);
// find the eigen values and eigen vectors for the covMat
RDNumeric::DoubleMatrix eigVecs(3, 3);
RDNumeric::DoubleVector eigVals(3);
// if we have a single atom system we don't need to do anyhting other than
// setting translation
// translation
unsigned int nAtms = conf.getNumAtoms();
auto *trans = new RDGeom::Transform3D;
// set the translation
origin *= -1.0;
// trans->SetTranslation(origin);
// if we have a single atom system we don't need to do anyhting setting
// translation is sufficient
if (nAtms > 1) {
RDNumeric::EigenSolvers::powerEigenSolver(3, *covMat, eigVals, eigVecs,
conf.getNumAtoms());
// deal with zero eigen value systems
unsigned int i, j, dim = 3;
for (i = 0; i < 3; ++i) {
// std::cerr<<" ev: "<<i<<": "<<eigVals.getVal(i)<<std::endl;
if (fabs(eigVals.getVal(i)) < EIGEN_TOLERANCE) {
dim--;
}
}
CHECK_INVARIANT(dim >= 1, "");
if (dim < 3) {
RDGeom::Point3D first(eigVecs.getVal(0, 0), eigVecs.getVal(0, 1),
eigVecs.getVal(0, 2));
if (dim == 1) {
// pick an arbitrary eigen vector perpendicular to the first vector
RDGeom::Point3D second(first.getPerpendicular());
eigVecs.setVal(1, 0, second.x);
eigVecs.setVal(1, 1, second.y);
eigVecs.setVal(1, 2, second.z);
if (eigVals.getVal(0) > 1.0) {
eigVals.setVal(1, 1.0);
} else {
eigVals.setVal(1, eigVals.getVal(0) / 2.0);
}
}
RDGeom::Point3D second(eigVecs.getVal(1, 0), eigVecs.getVal(1, 1),
eigVecs.getVal(1, 2));
// pick the third eigen vector perpendicular to the first two
RDGeom::Point3D third = first.crossProduct(second);
eigVecs.setVal(2, 0, third.x);
eigVecs.setVal(2, 1, third.y);
eigVecs.setVal(2, 2, third.z);
if (eigVals.getVal(1) > 1.0) {
eigVals.setVal(2, 1.0);
} else {
eigVals.setVal(2, eigVals.getVal(1) / 2.0);
}
}
// now set the transformation
for (i = 0; i < 3; ++i) {
for (j = 0; j < 3; ++j) {
trans->setVal(i, j, eigVecs.getVal(i, j));
}
}
} // end of multiple atom system
trans->TransformPoint(origin);
trans->SetTranslation(origin);
delete covMat;
return trans;
}
void transformConformer(Conformer &conf, const RDGeom::Transform3D &trans) {
RDGeom::POINT3D_VECT &positions = conf.getPositions();
RDGeom::POINT3D_VECT_I pi;
for (pi = positions.begin(); pi != positions.end(); ++pi) {
trans.TransformPoint(*pi);
}
}
void canonicalizeConformer(Conformer &conf, const RDGeom::Point3D *center,
bool normalizeCovar, bool ignoreHs) {
RDGeom::Transform3D *trans =
computeCanonicalTransform(conf, center, normalizeCovar, ignoreHs);
transformConformer(conf, *trans);
delete trans;
}
void canonicalizeMol(RDKit::ROMol &mol, bool normalizeCovar, bool ignoreHs) {
ROMol::ConformerIterator ci;
for (ci = mol.beginConformers(); ci != mol.endConformers(); ci++) {
canonicalizeConformer(*(*ci), nullptr, normalizeCovar, ignoreHs);
}
}
namespace {
void _toBeMovedIdxList(const ROMol &mol, unsigned int iAtomId,
unsigned int jAtomId, std::list<unsigned int> &alist) {
unsigned int nAtoms = mol.getNumAtoms();
boost::dynamic_bitset<> visitedIdx(nAtoms);
std::stack<unsigned int> stack;
stack.push(jAtomId);
visitedIdx[iAtomId] = 1;
visitedIdx[jAtomId] = 1;
unsigned int tIdx;
unsigned int wIdx;
ROMol::ADJ_ITER nbrIdx;
ROMol::ADJ_ITER endNbrs;
bool doMainLoop;
while (stack.size()) {
doMainLoop = false;
tIdx = stack.top();
const Atom *tAtom = mol.getAtomWithIdx(tIdx);
boost::tie(nbrIdx, endNbrs) = mol.getAtomNeighbors(tAtom);
unsigned int eIdx;
for (eIdx = 0; nbrIdx != endNbrs; ++nbrIdx, ++eIdx) {
wIdx = (mol[*nbrIdx])->getIdx();
if (!visitedIdx[wIdx]) {
visitedIdx[wIdx] = 1;
stack.push(wIdx);
doMainLoop = true;
break;
}
}
if (doMainLoop) {
continue;
}
visitedIdx[tIdx] = 1;
stack.pop();
}
alist.clear();
for (unsigned int i = 0; i < nAtoms; ++i) {
if (visitedIdx[i] && (i != iAtomId)) {
alist.push_back(i);
}
}
}
}
double getBondLength(const Conformer &conf, unsigned int iAtomId,
unsigned int jAtomId) {
const RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
return (pos[iAtomId] - pos[jAtomId]).length();
}
void setBondLength(Conformer &conf, unsigned int iAtomId, unsigned int jAtomId,
double value) {
RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
ROMol &mol = conf.getOwningMol();
Bond *bond = mol.getBondBetweenAtoms(iAtomId, jAtomId);
if (!bond) throw ValueErrorException("atoms i and j must be bonded");
if (queryIsBondInRing(bond))
throw ValueErrorException("bond (i,j) must not belong to a ring");
RDGeom::Point3D v = pos[iAtomId] - pos[jAtomId];
double origValue = v.length();
if (origValue <= 1.e-8)
throw ValueErrorException("atoms i and j have identical 3D coordinates");
// get all atoms bonded to j
std::list<unsigned int> alist;
_toBeMovedIdxList(mol, iAtomId, jAtomId, alist);
v *= (value / origValue - 1.);
for (unsigned int &it : alist) {
pos[it] -= v;
}
}
double getAngleRad(const Conformer &conf, unsigned int iAtomId,
unsigned int jAtomId, unsigned int kAtomId) {
const RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
URANGE_CHECK(kAtomId, pos.size());
RDGeom::Point3D rJI = pos[iAtomId] - pos[jAtomId];
double rJISqLength = rJI.lengthSq();
if (rJISqLength <= 1.e-16)
throw ValueErrorException("atoms i and j have identical 3D coordinates");
RDGeom::Point3D rJK = pos[kAtomId] - pos[jAtomId];
double rJKSqLength = rJK.lengthSq();
if (rJKSqLength <= 1.e-16)
throw ValueErrorException("atoms j and k have identical 3D coordinates");
return rJI.angleTo(rJK);
}
void setAngleRad(Conformer &conf, unsigned int iAtomId, unsigned int jAtomId,
unsigned int kAtomId, double value) {
RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
URANGE_CHECK(kAtomId, pos.size());
ROMol &mol = conf.getOwningMol();
Bond *bondJI = mol.getBondBetweenAtoms(jAtomId, iAtomId);
if (!bondJI) throw ValueErrorException("atoms i and j must be bonded");
Bond *bondJK = mol.getBondBetweenAtoms(jAtomId, kAtomId);
if (!bondJK) throw ValueErrorException("atoms j and k must be bonded");
if (queryIsBondInRing(bondJI) && queryIsBondInRing(bondJK))
throw ValueErrorException(
"bonds (i,j) and (j,k) must not both belong to a ring");
RDGeom::Point3D rJI = pos[iAtomId] - pos[jAtomId];
double rJISqLength = rJI.lengthSq();
if (rJISqLength <= 1.e-16)
throw ValueErrorException("atoms i and j have identical 3D coordinates");
RDGeom::Point3D rJK = pos[kAtomId] - pos[jAtomId];
double rJKSqLength = rJK.lengthSq();
if (rJKSqLength <= 1.e-16)
throw ValueErrorException("atoms j and k have identical 3D coordinates");
// we only need to rotate by delta with respect to the current angle value
value -= rJI.angleTo(rJK);
RDGeom::Point3D &rotAxisBegin = pos[jAtomId];
// our rotation axis is the normal to the plane of atoms i, j, k
RDGeom::Point3D rotAxisEnd = rJI.crossProduct(rJK) + pos[jAtomId];
RDGeom::Point3D rotAxis = rotAxisEnd - rotAxisBegin;
rotAxis.normalize();
// get all atoms bonded to j and loop through them
std::list<unsigned int> alist;
_toBeMovedIdxList(mol, jAtomId, kAtomId, alist);
for (unsigned int &it : alist) {
// translate atom so that it coincides with the origin of rotation
pos[it] -= rotAxisBegin;
// rotate around our rotation axis
RDGeom::Transform3D rotByAngle;
rotByAngle.SetRotation(value, rotAxis);
rotByAngle.TransformPoint(pos[it]);
// translate atom back
pos[it] += rotAxisBegin;
}
}
double getDihedralRad(const Conformer &conf, unsigned int iAtomId,
unsigned int jAtomId, unsigned int kAtomId,
unsigned int lAtomId) {
const RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
URANGE_CHECK(kAtomId, pos.size());
URANGE_CHECK(lAtomId, pos.size());
RDGeom::Point3D rIJ = pos[jAtomId] - pos[iAtomId];
double rIJSqLength = rIJ.lengthSq();
if (rIJSqLength <= 1.e-16)
throw ValueErrorException("atoms i and j have identical 3D coordinates");
RDGeom::Point3D rJK = pos[kAtomId] - pos[jAtomId];
double rJKSqLength = rJK.lengthSq();
if (rJKSqLength <= 1.e-16)
throw ValueErrorException("atoms j and k have identical 3D coordinates");
RDGeom::Point3D rKL = pos[lAtomId] - pos[kAtomId];
double rKLSqLength = rKL.lengthSq();
if (rKLSqLength <= 1.e-16)
throw ValueErrorException("atoms k and l have identical 3D coordinates");
RDGeom::Point3D nIJK = rIJ.crossProduct(rJK);
double nIJKSqLength = nIJK.lengthSq();
RDGeom::Point3D nJKL = rJK.crossProduct(rKL);
double nJKLSqLength = nJKL.lengthSq();
RDGeom::Point3D m = nIJK.crossProduct(rJK);
// we want a signed dihedral, that's why we use atan2 instead of acos
return -atan2(m.dotProduct(nJKL) / sqrt(nJKLSqLength * m.lengthSq()),
nIJK.dotProduct(nJKL) / sqrt(nIJKSqLength * nJKLSqLength));
}
void setDihedralRad(Conformer &conf, unsigned int iAtomId, unsigned int jAtomId,
unsigned int kAtomId, unsigned int lAtomId, double value) {
RDGeom::POINT3D_VECT &pos = conf.getPositions();
URANGE_CHECK(iAtomId, pos.size());
URANGE_CHECK(jAtomId, pos.size());
URANGE_CHECK(kAtomId, pos.size());
URANGE_CHECK(lAtomId, pos.size());
ROMol &mol = conf.getOwningMol();
Bond *bondJK = mol.getBondBetweenAtoms(jAtomId, kAtomId);
if (!bondJK) throw ValueErrorException("atoms j and k must be bonded");
if (queryIsBondInRing(bondJK))
throw ValueErrorException("bond (j,k) must not belong to a ring");
RDGeom::Point3D rIJ = pos[jAtomId] - pos[iAtomId];
double rIJSqLength = rIJ.lengthSq();
if (rIJSqLength <= 1.e-16)
throw ValueErrorException("atoms i and j have identical 3D coordinates");
RDGeom::Point3D rJK = pos[kAtomId] - pos[jAtomId];
double rJKSqLength = rJK.lengthSq();
if (rJKSqLength <= 1.e-16)
throw ValueErrorException("atoms j and k have identical 3D coordinates");
RDGeom::Point3D rKL = pos[lAtomId] - pos[kAtomId];
double rKLSqLength = rKL.lengthSq();
if (rKLSqLength <= 1.e-16)
throw ValueErrorException("atoms k and l have identical 3D coordinates");
RDGeom::Point3D nIJK = rIJ.crossProduct(rJK);
double nIJKSqLength = nIJK.lengthSq();
RDGeom::Point3D nJKL = rJK.crossProduct(rKL);
double nJKLSqLength = nJKL.lengthSq();
RDGeom::Point3D m = nIJK.crossProduct(rJK);
// we only need to rotate by delta with respect to the current dihedral value
value -= -atan2(m.dotProduct(nJKL) / sqrt(nJKLSqLength * m.lengthSq()),
nIJK.dotProduct(nJKL) / sqrt(nIJKSqLength * nJKLSqLength));
// our rotation axis is the (j,k) bond
RDGeom::Point3D &rotAxisBegin = pos[jAtomId];
RDGeom::Point3D &rotAxisEnd = pos[kAtomId];
RDGeom::Point3D rotAxis = rotAxisEnd - rotAxisBegin;
rotAxis.normalize();
// get all atoms bonded to k and loop through them
std::list<unsigned int> alist;
_toBeMovedIdxList(mol, jAtomId, kAtomId, alist);
for (unsigned int &it : alist) {
// translate atom so that it coincides with the origin of rotation
pos[it] -= rotAxisBegin;
// rotate around our rotation axis
RDGeom::Transform3D rotByAngle;
rotByAngle.SetRotation(value, rotAxis);
rotByAngle.TransformPoint(pos[it]);
// translate atom back
pos[it] += rotAxisBegin;
}
}
}
|
tetrafolium/luci-go | common/retry/defaults.go | // Copyright 2015 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package retry
import (
"context"
"time"
)
// defaultIterator defines a template for the default retry parameters that
// should be used throughout the program.
var defaultIteratorTemplate = ExponentialBackoff{
Limited: Limited{
Delay: 200 * time.Millisecond,
Retries: 10,
},
MaxDelay: 10 * time.Second,
Multiplier: 2,
}
// Default is a Factory that returns a new instance of the default iterator
// configuration.
func Default() Iterator {
it := defaultIteratorTemplate
return &it
}
type noneItTemplate struct{}
func (noneItTemplate) Next(context.Context, error) time.Duration { return Stop }
// None is a Factory that returns an Iterator that explicitly calls Stop after
// the first try. This is helpful to pass to libraries which use retry.Default
// if given nil, but where you don't want any retries at all (e.g. tests).
func None() Iterator {
return noneItTemplate{}
}
|
jbruggem/jingles-impro | src/workspace/track.cpp | <filename>src/workspace/track.cpp
#include "track.h"
//xxx do we want the copy constructor to use a new fileRef or just copy the other object's pointer?
Track::Track(const Track& track):
QObject(track.parent()),
fileInfo(track.fileInfo),
loopEnabled(track.loopEnabled),
startTime(track.startTime),
endTime(track.endTime),
fadeInDuration(track.fadeInDuration),
fadeOutDuration(track.fadeOutDuration) //,
//fileRef(track.fileRef)
{
QLOG_TRACE() << "Building Track from other track";
}
Track::Track(const QString &url, bool loop, long startTime, long endTime, int fadeInDuration, int fadeOutDuration,bool showFilename,QObject *parent) :
QObject(parent),
//path(url),
loopEnabled(loop),
startTime(startTime),
endTime(endTime),
fadeInDuration(fadeInDuration),
fadeOutDuration(fadeOutDuration),
showFilename(showFilename)
{
QLOG_TRACE() << "Building Track with full params";
this->extractFilename(url);
// conversion of QString to const char * according to
// http://qt-project.org/faq/answer/how_can_i_convert_a_qstring_to_char_and_vice_versa
//QByteArray temp = url.toLocal8Bit();
//fileRef = new TagLib::FileRef(temp.data());
}
Track::Track(const QString &url,QObject *parent) :
QObject(parent),
//path(url),
loopEnabled(false),
startTime(0),
endTime(0),
fadeInDuration(0),
fadeOutDuration(0),
showFilename(false)
{
QLOG_TRACE() << "Building Track from URL";
this->extractFilename(url);
// conversion of QString to const char * according to
// http://qt-project.org/faq/answer/how_can_i_convert_a_qstring_to_char_and_vice_versa
//QByteArray temp = url.toLocal8Bit();
//fileRef = new TagLib::FileRef(temp.data());
}
void Track::extractFilename(const QString &url){
fileInfo = QFileInfo(url);
}
QString Track::getDisplayName() const{
if(this->getTitle()->length() > 0 && !this->shouldShowFilename()){
return title;
}else{
return getFilename();
}
}
QString Track::getPath() const{
return fileInfo.absoluteFilePath();
}
QString Track::getFilename() const{
return fileInfo.baseName();
}
int Track::compare(const Track &a, const Track &b) {
int returnValue = QString::localeAwareCompare(a.fileInfo.canonicalFilePath(), b.fileInfo.canonicalFilePath());
// if the strings are different, we stop the comparison
if (returnValue) {return returnValue;}
returnValue = a.startTime - b.startTime;
// if the times are different, we stop the comparison
if (returnValue) {return returnValue;}
returnValue = a.endTime - b.endTime;
if (returnValue) {return returnValue;}
returnValue = a.fadeInDuration - b.fadeInDuration;
if (returnValue) {return returnValue;}
returnValue = a.fadeOutDuration - b.fadeOutDuration;
if (returnValue) {return returnValue;}
returnValue = a.loopEnabled - b.loopEnabled;
if (returnValue) {return returnValue;}
// additional checks can be added here
return 0;
}
bool Track::operator<(const Track &other) const {
return compare(*this, other) < 0;
}
void Track::setArtist(const QString * artist) {
//QLOG_TRACE() << this << "Track's artist updated.";
this->artist = *artist;
emit tagUpdated();
emit tagArtistUpdate();
}
void Track::setTitle(const QString * title) {
//QLOG_TRACE() << this << "Track's title updated.";
this->title = *title;
emit tagUpdated();
emit tagTitleUpdate();
}
void Track::print() const {
QLOG_INFO() << "Track::print()";
QLOG_INFO() << "path: " << this->getPath();
QLOG_INFO() << "startTime: " << startTime;
QLOG_INFO() << "endTime: " << endTime;
QLOG_INFO() << "fadeInDuration: " << fadeInDuration;
QLOG_INFO() << "fadeOutDuration:" << fadeOutDuration;
QLOG_INFO() << "loopEnabled: " << loopEnabled;
//QLOG_INFO() << "isValid?: " << isValid();
}
|
pdpdds/sdldualsystem | sdl1/VisualBoyAdvance/src/win32/GBMemoryViewerDlg.cpp | // VisualBoyAdvance - Nintendo Gameboy/GameboyAdvance (TM) emulator.
// Copyright (C) 1999-2003 Forgotten
// Copyright (C) 2004 Forgotten and the VBA development team
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2, or(at your option)
// any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software Foundation,
// Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
// GBMemoryViewerDlg.cpp : implementation file
//
#include "stdafx.h"
#include "vba.h"
#include "FileDlg.h"
#include "GBMemoryViewerDlg.h"
#include "MemoryViewerAddressSize.h"
#include "Reg.h"
#include "WinResUtil.h"
#include "../System.h"
#include "../gb/gbGlobals.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
GBMemoryViewer::GBMemoryViewer()
: MemoryViewer()
{
setAddressSize(1);
}
void GBMemoryViewer::readData(u32 address, int len, u8 *data)
{
u16 addr = address & 0xffff;
if(emulating && gbRom != NULL) {
for(int i = 0; i < len; i++) {
*data++ = gbMemoryMap[addr >> 12][addr & 0xfff];
addr++;
}
} else {
for(int i = 0; i < len; i++) {
*data++ = 0;
addr++;
}
}
}
#define GB_READBYTE_QUICK(addr) \
gbMemoryMap[(addr) >> 12][(addr) & 0xfff]
#define GB_WRITEBYTE_QUICK(addr,v) \
gbMemoryMap[(addr) >> 12][(addr) & 0xfff] = (v)
void GBMemoryViewer::editData(u32 address, int size, int mask, u32 value)
{
u32 oldValue;
u16 addr = (u16)address & 0xffff;
switch(size) {
case 8:
oldValue = GB_READBYTE_QUICK(addr);
oldValue &= mask;
oldValue |= (u8)value;
GB_WRITEBYTE_QUICK(addr, oldValue);
break;
case 16:
oldValue = GB_READBYTE_QUICK(addr) |
(GB_READBYTE_QUICK(addr + 1) << 8);
oldValue &= mask;
oldValue |= (u16)value;
GB_WRITEBYTE_QUICK(addr, (oldValue & 255));
GB_WRITEBYTE_QUICK(addr+1, (oldValue >> 8));
break;
case 32:
oldValue = GB_READBYTE_QUICK(addr) |
(GB_READBYTE_QUICK(addr + 1) << 8) |
(GB_READBYTE_QUICK(addr + 2) << 16) |
(GB_READBYTE_QUICK(addr + 3) << 24);
oldValue &= mask;
oldValue |= (u32)value;
GB_WRITEBYTE_QUICK(addr, (oldValue & 255));
GB_WRITEBYTE_QUICK(addr+1, (oldValue >> 8));
GB_WRITEBYTE_QUICK(addr+2, (oldValue >> 16));
GB_WRITEBYTE_QUICK(addr+3, (oldValue >> 24));
break;
}
}
/////////////////////////////////////////////////////////////////////////////
// GBMemoryViewerDlg dialog
GBMemoryViewerDlg::GBMemoryViewerDlg(CWnd* pParent /*=NULL*/)
: ResizeDlg(GBMemoryViewerDlg::IDD, pParent)
{
//{{AFX_DATA_INIT(GBMemoryViewerDlg)
m_size = -1;
//}}AFX_DATA_INIT
autoUpdate = false;
}
void GBMemoryViewerDlg::DoDataExchange(CDataExchange* pDX)
{
CDialog::DoDataExchange(pDX);
//{{AFX_DATA_MAP(GBMemoryViewerDlg)
DDX_Control(pDX, IDC_CURRENT_ADDRESS, m_current);
DDX_Control(pDX, IDC_ADDRESS, m_address);
DDX_Control(pDX, IDC_ADDRESSES, m_addresses);
DDX_Radio(pDX, IDC_8_BIT, m_size);
//}}AFX_DATA_MAP
DDX_Control(pDX, IDC_VIEWER, m_viewer);
}
BEGIN_MESSAGE_MAP(GBMemoryViewerDlg, CDialog)
//{{AFX_MSG_MAP(GBMemoryViewerDlg)
ON_BN_CLICKED(IDC_CLOSE, OnClose)
ON_BN_CLICKED(IDC_REFRESH, OnRefresh)
ON_BN_CLICKED(IDC_8_BIT, On8Bit)
ON_BN_CLICKED(IDC_16_BIT, On16Bit)
ON_BN_CLICKED(IDC_32_BIT, On32Bit)
ON_BN_CLICKED(IDC_AUTO_UPDATE, OnAutoUpdate)
ON_BN_CLICKED(IDC_GO, OnGo)
ON_CBN_SELCHANGE(IDC_ADDRESSES, OnSelchangeAddresses)
ON_BN_CLICKED(IDC_SAVE, OnSave)
ON_BN_CLICKED(IDC_LOAD, OnLoad)
//}}AFX_MSG_MAP
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// GBMemoryViewerDlg message handlers
BOOL GBMemoryViewerDlg::OnInitDialog()
{
CDialog::OnInitDialog();
DIALOG_SIZER_START( sz )
DIALOG_SIZER_ENTRY( IDC_VIEWER, DS_SizeX | DS_SizeY )
DIALOG_SIZER_ENTRY( IDC_REFRESH, DS_MoveY)
DIALOG_SIZER_ENTRY( IDC_CLOSE, DS_MoveY)
DIALOG_SIZER_ENTRY( IDC_LOAD, DS_MoveY)
DIALOG_SIZER_ENTRY( IDC_SAVE, DS_MoveY)
DIALOG_SIZER_ENTRY( IDC_AUTO_UPDATE, DS_MoveY)
DIALOG_SIZER_ENTRY( IDC_CURRENT_ADDRESS_LABEL, DS_MoveY | DS_MoveX)
DIALOG_SIZER_ENTRY( IDC_CURRENT_ADDRESS, DS_MoveY | DS_MoveX)
DIALOG_SIZER_END()
SetData(sz,
TRUE,
HKEY_CURRENT_USER,
"Software\\Emulators\\VisualBoyAdvance\\Viewer\\GBMemoryView",
NULL);
m_viewer.setDialog(this);
m_viewer.ShowScrollBar(SB_VERT, TRUE);
m_viewer.EnableScrollBar(SB_VERT, ESB_ENABLE_BOTH);
LPCTSTR s[] = {
"0x0000 - ROM",
"0x4000 - ROM",
"0x8000 - VRAM",
"0xA000 - SRAM",
"0xC000 - RAM",
"0xD000 - WRAM",
"0xFF00 - I/O",
"0xFF80 - RAM"
};
for(int i = 0; i < 8; i++)
m_addresses.AddString(s[i]);
m_addresses.SetCurSel(0);
RECT cbSize;
int Height;
m_addresses.GetClientRect(&cbSize);
Height = m_addresses.GetItemHeight(-1);
Height += m_addresses.GetItemHeight(0) * (9);
// Note: The use of SM_CYEDGE assumes that we're using Windows '95
// Now add on the height of the border of the edit box
Height += GetSystemMetrics(SM_CYEDGE) * 2; // top & bottom edges
// The height of the border of the drop-down box
Height += GetSystemMetrics(SM_CYEDGE) * 2; // top & bottom edges
// now set the size of the window
m_addresses.SetWindowPos(NULL,
0, 0,
cbSize.right, Height,
SWP_NOMOVE | SWP_NOZORDER);
m_address.LimitText(8);
m_size = regQueryDwordValue("memViewerDataSize", 0);
if(m_size < 0 || m_size > 2)
m_size = 0;
m_viewer.setSize(m_size);
UpdateData(FALSE);
m_current.SetFont(CFont::FromHandle((HFONT)GetStockObject(SYSTEM_FIXED_FONT)));
return TRUE; // return TRUE unless you set the focus to a control
// EXCEPTION: OCX Property Pages should return FALSE
}
void GBMemoryViewerDlg::OnClose()
{
theApp.winRemoveUpdateListener(this);
DestroyWindow();
}
void GBMemoryViewerDlg::OnRefresh()
{
m_viewer.Invalidate();
}
void GBMemoryViewerDlg::update()
{
OnRefresh();
}
void GBMemoryViewerDlg::On8Bit()
{
m_viewer.setSize(0);
regSetDwordValue("memViewerDataSize", 0);
}
void GBMemoryViewerDlg::On16Bit()
{
m_viewer.setSize(1);
regSetDwordValue("memViewerDataSize", 1);
}
void GBMemoryViewerDlg::On32Bit()
{
m_viewer.setSize(2);
regSetDwordValue("memViewerDataSize", 2);
}
void GBMemoryViewerDlg::OnAutoUpdate()
{
autoUpdate = !autoUpdate;
if(autoUpdate) {
theApp.winAddUpdateListener(this);
} else {
theApp.winRemoveUpdateListener(this);
}
}
void GBMemoryViewerDlg::OnGo()
{
CString buffer;
m_address.GetWindowText(buffer);
u32 address;
sscanf(buffer, "%x", &address);
if(m_viewer.getSize() == 1)
address &= ~1;
else if(m_viewer.getSize() == 2)
address &= ~3;
m_viewer.setAddress(address);
}
void GBMemoryViewerDlg::OnSelchangeAddresses()
{
int cur = m_addresses.GetCurSel();
switch(cur) {
case 0:
m_viewer.setAddress(0x0000);
break;
case 1:
m_viewer.setAddress(0x4000);
break;
case 2:
m_viewer.setAddress(0x8000);
break;
case 3:
m_viewer.setAddress(0xa000);
break;
case 4:
m_viewer.setAddress(0xc000);
break;
case 5:
m_viewer.setAddress(0xd000);
break;
case 6:
m_viewer.setAddress(0xff00);
break;
case 7:
m_viewer.setAddress(0xff80);
break;
}
}
void GBMemoryViewerDlg::setCurrentAddress(u32 address)
{
CString buffer;
buffer.Format("0x%08X", address);
m_current.SetWindowText(buffer);
}
void GBMemoryViewerDlg::OnSave()
{
MemoryViewerAddressSize dlg;
CString buffer;
dlg.setAddress(m_viewer.getCurrentAddress());
LPCTSTR exts[] = { ".dmp" };
CString filter = theApp.winLoadFilter(IDS_FILTER_DUMP);
CString title = winResLoadString(IDS_SELECT_DUMP_FILE);
if(dlg.DoModal() == IDOK) {
FileDlg file(this,
buffer,
filter,
0,
"DMP",
exts,
"",
title,
true);
if(file.DoModal() == IDOK) {
buffer = file.GetPathName();
FILE *f = fopen(buffer, "wb");
if(f == NULL) {
systemMessage(IDS_ERROR_CREATING_FILE, buffer);
return;
}
int size = dlg.getSize();
u16 addr = dlg.getAddress() & 0xffff;
for(int i = 0; i < size; i++) {
fputc(gbMemoryMap[addr >> 12][addr & 0xfff], f);
addr++;
}
fclose(f);
}
}
}
void GBMemoryViewerDlg::OnLoad()
{
CString buffer;
LPCTSTR exts[] = { ".dmp" };
CString filter = theApp.winLoadFilter(IDS_FILTER_DUMP);
CString title = winResLoadString(IDS_SELECT_DUMP_FILE);
FileDlg file(this,
buffer,
filter,
0,
"DMP",
exts,
"",
title,
false);
if(file.DoModal() == IDOK) {
buffer = file.GetPathName();
FILE *f = fopen(buffer, "rb");
if(f == NULL) {
systemMessage(IDS_CANNOT_OPEN_FILE,
"Cannot open file %s",
buffer);
return;
}
MemoryViewerAddressSize dlg;
fseek(f, 0, SEEK_END);
int size = ftell(f);
fseek(f, 0, SEEK_SET);
dlg.setAddress(m_viewer.getCurrentAddress());
dlg.setSize(size);
if(dlg.DoModal() == IDOK) {
int size = dlg.getSize();
u16 addr = dlg.getAddress() & 0xffff;
for(int i = 0; i < size; i++) {
int c = fgetc(f);
if(c == -1)
break;
gbMemoryMap[addr >> 12][addr & 0xfff] = c;
addr++;
}
OnRefresh();
}
fclose(f);
}
}
void GBMemoryViewerDlg::PostNcDestroy()
{
delete this;
}
|
atmelino/JATexperimental | src/jat/coreNOSA/cm/FiniteBurn.java | /* JAT: Java Astrodynamics Toolkit
*
* Copyright (c) 2003 National Aeronautics and Space Administration. All rights reserved.
*
* This file is part of JAT. JAT is free software; you can
* redistribute it and/or modify it under the terms of the
* NASA Open Source Agreement
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* NASA Open Source Agreement for more details.
*
* You should have received a copy of the NASA Open Source Agreement
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*
* File Created on Aug 28, 2003
*/
package jat.coreNOSA.cm;
import jat.coreNOSA.math.MatrixVector.data.VectorN;
import java.io.Serializable;
/**
* The FiniteBurn.java Class represents a single finite burn.
*
* @author
* @version 1.0
*/
public class FiniteBurn implements Serializable {
/**
*
*/
private static final long serialVersionUID = 8812477331675719590L;
/** Burn start time in sim time (seconds) */
public double tstart;
/** Burn stop time in sim time (seconds) */
public double tstop;
/** Burn acceleration in m/s^2 */
public double accel;
/** Thrust direction unit vector */
public VectorN unitVector;
/** Constructor
* @param t0 Time of burn initiation in sim time (sec).
* @param tf time of burn cutoff in sim time (sec)
* @param unitv thrust direction unit vector
*/
public FiniteBurn(double t0, double tf, double acc, VectorN unitv) {
this.tstart = t0;
this.tstop = tf;
this.accel = acc;
this.unitVector = unitv;
}
}
|
tangjwtj/jparsec | jparsec/src/main/java/org/jparsec/ParseContext.java | /*****************************************************************************
* Copyright (C) jparsec.org *
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package org.jparsec;
import static org.jparsec.internal.util.Checks.checkState;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.jparsec.error.ParseErrorDetails;
import org.jparsec.error.ParserException;
import org.jparsec.internal.annotations.Private;
import org.jparsec.internal.util.Lists;
/**
* Represents the context state during parsing.
*
* @author <NAME>
*/
abstract class ParseContext {
static final String EOF = "EOF";
final String module;
final CharSequence source;
final SourceLocator locator;
/** The current position of the input. Points to the token array for token level. */
int at;
/** The current logical step. */
int step;
/** The current parse result. */
Object result;
private ParserTrace trace = new ParserTrace() {
@Override public void push(String name) {}
@Override public void pop() {}
@Override public TreeNode getCurrentNode() { return null; }
@Override public void setCurrentResult(Object result) {}
@Override public TreeNode getLatestChild() { return null; }
@Override public void setLatestChild(TreeNode node) {}
@Override public void startFresh(ParseContext context) {}
@Override public void setStateAs(ParserTrace that) {}
};
enum ErrorType {
/** Default value, no error. */
NONE(false),
/** When the error is mostly lenient (as a delimiter of repetitions for example). */
DELIMITING(false),
/** When {@link Parser#not()} is called. Signals that something isn't expected. */
UNEXPECTED(false),
/** When any expected input isn't found. */
MISSING(true),
/** When {@link Parser#label()} is called. Signals that a logical stuff isn't found. */
EXPECTING(true),
/** When {@link Parsers#fail(String)} is called. Signals a serious problem. */
FAILURE(false);
ErrorType(boolean mergeable) {
this.mergeable = mergeable;
}
final boolean mergeable;
}
private ErrorType currentErrorType = ErrorType.NONE;
private int currentErrorAt;
private int currentErrorIndex = 0; // TODO: is it necessary to set this to the starting index?
private final ArrayList<Object> errors = Lists.arrayList(32);
private String encountered = null; // for explicitly setting encountered token into ScannerState.
private TreeNode currentErrorNode = null;
// explicit suppresses error recording if true.
private boolean errorSuppressed = false;
private ErrorType overrideErrorType = ErrorType.NONE;
//caller should not change input after it is passed in.
ParseContext(CharSequence source, int at, String module, SourceLocator locator) {
this(source, null, at, module, locator);
}
ParseContext(
CharSequence source, Object ret, int at, String module, SourceLocator locator) {
this.source = source;
this.result = ret;
this.step = 0;
this.at = at;
this.module = module;
this.locator = locator;
this.currentErrorAt = at;
}
/** Runs {@code parser} with error recording suppressed. */
final boolean withErrorSuppressed(Parser<?> parser) {
boolean oldValue = errorSuppressed;
errorSuppressed = true;
boolean ok = parser.apply(this);
errorSuppressed = oldValue;
return ok;
}
/** Runs {@code parser} with error recording suppressed. */
final boolean applyAsDelimiter(Parser<?> parser) {
ErrorType oldValue = overrideErrorType;
overrideErrorType = ErrorType.DELIMITING;
int oldStep = step;
boolean ok = parser.apply(this);
if (ok) step = oldStep;
overrideErrorType = oldValue;
return ok;
}
/**
* Applies {@code parser} as a new tree node with {@code name}, and if fails, reports
* "expecting $name".
*/
final boolean applyNewNode(Parser<?> parser, String name) {
int physical = at;
int logical = step;
TreeNode latestChild = trace.getLatestChild();
trace.push(name);
if (parser.apply(this)) {
trace.setCurrentResult(result);
trace.pop();
return true;
}
if (stillThere(physical, logical)) expected(name);
trace.pop();
// On failure, the erroneous path shouldn't be counted in the parse tree.
trace.setLatestChild(latestChild);
return false;
}
final boolean applyNested(Parser<?> parser, ParseContext nestedState) {
// nested is either the token-level parser, or the inner scanner of a subpattern.
try {
if (parser.apply(nestedState)) {
set(nestedState.step, at, nestedState.result);
return true;
}
// index on token level is the "at" on character level
set(step, nestedState.getIndex(), null);
// always copy error because there could be false alarms in the character level.
// For example, a "or" parser nested in a "many" failed in one of its branches.
copyErrorFrom(nestedState);
return false;
} finally {
trace.setStateAs(nestedState.trace);
}
}
final boolean repeat(Parser<?> parser, int n) {
for (int i = 0; i < n; i++) {
if (!parser.apply(this)) return false;
}
return true;
}
final <T> boolean repeat(
Parser<? extends T> parser, int n, Collection<T> collection) {
for (int i = 0; i < n; i++) {
if (!parser.apply(this)) return false;
collection.add(parser.getReturn(this));
}
return true;
}
final ParserTrace getTrace() {
return trace;
}
/** The physical index of the current most relevant error, {@code 0} if none. */
final int errorIndex() {
return currentErrorIndex;
}
final ParseTree buildParseTree() {
TreeNode currentNode = trace.getCurrentNode();
if (currentNode == null) return null;
return currentNode.freeze(getIndex()).toParseTree();
}
final ParseTree buildErrorParseTree() {
// The current node is partially done because there was an error.
// So orphanize it. But at the same time, all ancestor nodes should have their endIndex set to
// where we are now.
if (currentErrorNode == null) return null;
return currentErrorNode.orphanize().freeze(getIndex()).toParseTree();
}
/** Only called when rendering the error in {@link ParserException}. */
final ParseErrorDetails renderError() {
final int errorIndex = toIndex(currentErrorAt);
final String encounteredName = getEncountered();
final ArrayList<String> errorStrings = Lists.arrayList(errors.size());
for (Object error : errors) {
errorStrings.add(String.valueOf(error));
}
switch (currentErrorType) {
case UNEXPECTED :
return new EmptyParseError(errorIndex, encounteredName) {
@Override public String getUnexpected() {
return errorStrings.get(0);
}
};
case FAILURE :
return new EmptyParseError(errorIndex, encounteredName) {
@Override public String getFailureMessage() {
return errorStrings.get(0);
}
};
case EXPECTING:
case MISSING:
case DELIMITING:
return new EmptyParseError(errorIndex, encounteredName) {
@Override public List<String> getExpected() {
return errorStrings;
}
};
default:
return new EmptyParseError(errorIndex, encounteredName);
}
}
private String getEncountered() {
if (encountered != null) {
return encountered;
}
return getInputName(currentErrorAt);
}
/** Returns the string representation of the current input (character or token). */
abstract String getInputName(int pos);
abstract boolean isEof();
/** Returns the current index in the original source. */
final int getIndex() {
return toIndex(at);
}
/** Returns the current token. Only applicable to token level parser. */
abstract Token getToken();
/** Peeks the current character. Only applicable to character level parser. */
abstract char peekChar();
/** Translates the logical position to physical index in the original source. */
abstract int toIndex(int pos);
@Private final void raise(ErrorType type, Object subject) {
if (errorSuppressed) return;
if (at < currentErrorAt) return;
if (overrideErrorType != ErrorType.NONE) type = overrideErrorType;
if (at > currentErrorAt) {
setErrorState(at, getIndex(), type);
errors.add(subject);
return;
}
// now error location is same
if (type.ordinal() < currentErrorType.ordinal()) {
return;
}
if (type.ordinal() > currentErrorType.ordinal()) {
setErrorState(at, getIndex(), type);
errors.add(subject);
return;
}
// now even error type is same
if (type.mergeable) {
// merge expected error.
errors.add(subject);
}
}
final void fail(String message) {
raise(ErrorType.FAILURE, message);
}
final void missing(Object what) {
raise(ErrorType.MISSING, what);
}
final void expected(Object what) {
raise(ErrorType.EXPECTING, what);
}
final void unexpected(String what) {
raise(ErrorType.UNEXPECTED, what);
}
final boolean stillThere(int wasAt, int originalStep) {
if (step == originalStep) {
// logical step didn't change, so logically we are still there, undo any physical offset
setAt(originalStep, wasAt);
return true;
}
return false;
}
final void set(int step, int at, Object ret) {
this.step = step;
this.at = at;
this.result = ret;
}
final void setAt(int step, int at) {
this.step = step;
this.at = at;
}
final void next() {
at ++;
step ++;
}
final void next(int n) {
at += n;
if (n > 0) step++;
}
/** Enables parse tree tracing with {@code rootName} as the name of the root node. */
final void enableTrace(final String rootName) {
this.trace = new ParserTrace() {
private TreeNode current = new TreeNode(rootName, getIndex());
@Override public void push(String name) {
this.current = current.addChild(name, getIndex());
}
@Override public void pop() {
current.setEndIndex(getIndex());
this.current = current.parent();
}
@Override public TreeNode getCurrentNode() {
return current;
}
@Override public void setCurrentResult(Object result) {
current.setResult(result);
}
@Override public TreeNode getLatestChild() {
return current.latestChild;
}
@Override public void setLatestChild(TreeNode latest) {
checkState(latest == null || latest.parent() == current,
"Trying to set a child node not owned by the parent node");
current.latestChild = latest;
}
@Override public void startFresh(ParseContext context) {
context.enableTrace(rootName);
}
@Override public void setStateAs(ParserTrace that) {
current = that.getCurrentNode();
}
};
}
/** Allows tracing of parsing progress during error condition, to ease debugging. */
interface ParserTrace {
/**
* Upon applying a parser with {@link Parser#label}, the label name is used to create a new
* child node under the current node. The new child node is set to be the current node.
*/
void push(String name);
/** When a parser finishes, the current node is popped so we are back to the parent parser. */
void pop();
/** Returns the current node, that is being parsed (not necessarily finished). */
TreeNode getCurrentNode();
/** Whenever a labeled parser succeeds, it calls this method to set its result in the trace. */
void setCurrentResult(Object result);
/**
* Called by branching parsers, to save the current state of tree, before trying parsers that
* could modify the tree state.
*/
TreeNode getLatestChild();
/**
* Called by labeled parser to reset the current child node when the current node failed.
* Also called by {@link BestParser} to set the optimum parse tree.
*/
void setLatestChild(TreeNode node);
/** Called when tokenizer passes on to token-level parser. */
void startFresh(ParseContext context);
/**
* Set the enclosing parser's tree state into the nested parser's state. Called for both nested
* token-level parser and nested scanner.
*/
void setStateAs(ParserTrace that);
}
private void setErrorState(
int errorAt, int errorIndex, ErrorType errorType, List<Object> errors) {
setErrorState(errorAt, errorIndex, errorType);
this.errors.addAll(errors);
}
private void setErrorState(int errorAt, int errorIndex, ErrorType errorType) {
this.currentErrorIndex = errorIndex;
this.currentErrorAt = errorAt;
this.currentErrorType = errorType;
this.currentErrorNode = trace.getCurrentNode();
this.encountered = null;
this.errors.clear();
}
private void copyErrorFrom(ParseContext that) {
int errorIndex = that.errorIndex();
setErrorState(errorIndex, errorIndex, that.currentErrorType, that.errors);
if (!that.isEof()) {
this.encountered = that.getEncountered();
}
currentErrorNode = that.currentErrorNode;
}
/** Reads the characters as input. Only applicable to character level parsers. */
abstract CharSequence characters();
@Override public String toString() {
return source.subSequence(getIndex(), source.length()).toString();
}
} |
inodeman/kie-tools | packages/stunner-editors/kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-api/kie-wb-common-stunner-client-api/src/main/java/org/kie/workbench/common/stunner/core/client/canvas/controls/keyboard/shortcut/KeyboardShortcut.java | /*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.core.client.canvas.controls.keyboard.shortcut;
import org.appformer.client.keyboardShortcuts.KeyboardShortcutsApiOpts;
import org.kie.workbench.common.stunner.core.client.canvas.CanvasHandler;
import org.kie.workbench.common.stunner.core.client.event.keyboard.KeyboardEvent;
import org.kie.workbench.common.stunner.core.graph.Element;
/**
* Interface representing an action that is invoked by pressing some Keys Combination on the keyboard
* @param <H>
*/
public interface KeyboardShortcut<H extends CanvasHandler> {
/**
* @param pressedKeys
* @return true if pressed keys should invoke the action
*/
boolean matchesPressedKeys(final KeyboardEvent.Key... pressedKeys);
/**
* @param selectedElement
* @return true if action can be executed for the selected element
*/
boolean matchesSelectedElement(final Element selectedElement);
void executeAction(final H canvasHandler,
final String selectedNodeId);
KeyboardEvent.Key[] getKeyCombination();
String getLabel();
default KeyboardShortcutsApiOpts getOpts() {
return KeyboardShortcutsApiOpts.DEFAULT;
}
}
|
PavelZX/rekit-studio | src/features/plugin-default/core/file.js | const { vio, refactor } = rekit.core;
function add(filePath) {
if (vio.fileExists(filePath)) throw new Error('File already exists: ' + filePath);
vio.save(filePath, '');
}
function move(source, target) {
if (vio.fileExists(target)) throw new Error('File already exists: ' + target);
if (!vio.fileExists(source)) throw new Error('File doesn\'t exist: ' + source);
vio.move(source, target);
}
function remove(filePath) {
vio.del(filePath);
}
module.exports = {
add,
remove,
move,
};
|
joeistyping/runelite | runescape-client/src/main/java/class233.java | import net.runelite.mapping.ObfuscatedGetter;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("hx")
public class class233 extends Node {
@ObfuscatedName("o")
@ObfuscatedGetter(
intValue = 1927342867
)
int field2762;
@ObfuscatedName("k")
@ObfuscatedSignature(
signature = "[Ldp;"
)
RawAudioNode[] field2757;
@ObfuscatedName("t")
short[] field2759;
@ObfuscatedName("d")
byte[] field2756;
@ObfuscatedName("h")
byte[] field2760;
@ObfuscatedName("m")
@ObfuscatedSignature(
signature = "[Lhq;"
)
class228[] field2763;
@ObfuscatedName("z")
byte[] field2761;
@ObfuscatedName("i")
int[] field2758;
class233(byte[] var1) {
this.field2757 = new RawAudioNode[128];
this.field2759 = new short[128];
this.field2756 = new byte[128];
this.field2760 = new byte[128];
this.field2763 = new class228[128];
this.field2761 = new byte[128];
this.field2758 = new int[128];
Buffer var2 = new Buffer(var1);
int var3;
for(var3 = 0; var2.payload[var3 + var2.offset] != 0; ++var3) {
;
}
byte[] var4 = new byte[var3];
int var5;
for(var5 = 0; var5 < var3; ++var5) {
var4[var5] = var2.readByte();
}
++var2.offset;
++var3;
var5 = var2.offset;
var2.offset += var3;
int var6;
for(var6 = 0; var2.payload[var6 + var2.offset] != 0; ++var6) {
;
}
byte[] var7 = new byte[var6];
int var8;
for(var8 = 0; var8 < var6; ++var8) {
var7[var8] = var2.readByte();
}
++var2.offset;
++var6;
var8 = var2.offset;
var2.offset += var6;
int var9;
for(var9 = 0; var2.payload[var9 + var2.offset] != 0; ++var9) {
;
}
byte[] var10 = new byte[var9];
for(int var11 = 0; var11 < var9; ++var11) {
var10[var11] = var2.readByte();
}
++var2.offset;
++var9;
byte[] var36 = new byte[var9];
int var12;
int var14;
if(var9 > 1) {
var36[1] = 1;
int var13 = 1;
var12 = 2;
for(var14 = 2; var14 < var9; ++var14) {
int var15 = var2.readUnsignedByte();
if(var15 == 0) {
var13 = var12++;
} else {
if(var15 <= var13) {
--var15;
}
var13 = var15;
}
var36[var14] = (byte)var13;
}
} else {
var12 = var9;
}
class228[] var37 = new class228[var12];
class228 var38;
for(var14 = 0; var14 < var37.length; ++var14) {
var38 = var37[var14] = new class228();
int var16 = var2.readUnsignedByte();
if(var16 > 0) {
var38.field2680 = new byte[var16 * 2];
}
var16 = var2.readUnsignedByte();
if(var16 > 0) {
var38.field2684 = new byte[var16 * 2 + 2];
var38.field2684[1] = 64;
}
}
var14 = var2.readUnsignedByte();
byte[] var45 = var14 > 0?new byte[var14 * 2]:null;
var14 = var2.readUnsignedByte();
byte[] var39 = var14 > 0?new byte[var14 * 2]:null;
int var17;
for(var17 = 0; var2.payload[var17 + var2.offset] != 0; ++var17) {
;
}
byte[] var18 = new byte[var17];
int var19;
for(var19 = 0; var19 < var17; ++var19) {
var18[var19] = var2.readByte();
}
++var2.offset;
++var17;
var19 = 0;
int var20;
for(var20 = 0; var20 < 128; ++var20) {
var19 += var2.readUnsignedByte();
this.field2759[var20] = (short)var19;
}
var19 = 0;
for(var20 = 0; var20 < 128; ++var20) {
var19 += var2.readUnsignedByte();
this.field2759[var20] = (short)(this.field2759[var20] + (var19 << 8));
}
var20 = 0;
int var21 = 0;
int var22 = 0;
int var23;
for(var23 = 0; var23 < 128; ++var23) {
if(var20 == 0) {
if(var21 < var18.length) {
var20 = var18[var21++];
} else {
var20 = -1;
}
var22 = var2.readVarInt();
}
this.field2759[var23] = (short)(this.field2759[var23] + ((var22 - 1 & 2) << 14));
this.field2758[var23] = var22;
--var20;
}
var20 = 0;
var21 = 0;
var23 = 0;
int var24;
for(var24 = 0; var24 < 128; ++var24) {
if(this.field2758[var24] != 0) {
if(var20 == 0) {
if(var21 < var4.length) {
var20 = var4[var21++];
} else {
var20 = -1;
}
var23 = var2.payload[var5++] - 1;
}
this.field2761[var24] = (byte)var23;
--var20;
}
}
var20 = 0;
var21 = 0;
var24 = 0;
for(int var25 = 0; var25 < 128; ++var25) {
if(this.field2758[var25] != 0) {
if(var20 == 0) {
if(var21 < var7.length) {
var20 = var7[var21++];
} else {
var20 = -1;
}
var24 = var2.payload[var8++] + 16 << 2;
}
this.field2760[var25] = (byte)var24;
--var20;
}
}
var20 = 0;
var21 = 0;
class228 var40 = null;
int var26;
for(var26 = 0; var26 < 128; ++var26) {
if(this.field2758[var26] != 0) {
if(var20 == 0) {
var40 = var37[var36[var21]];
if(var21 < var10.length) {
var20 = var10[var21++];
} else {
var20 = -1;
}
}
this.field2763[var26] = var40;
--var20;
}
}
var20 = 0;
var21 = 0;
var26 = 0;
int var27;
for(var27 = 0; var27 < 128; ++var27) {
if(var20 == 0) {
if(var21 < var18.length) {
var20 = var18[var21++];
} else {
var20 = -1;
}
if(this.field2758[var27] > 0) {
var26 = var2.readUnsignedByte() + 1;
}
}
this.field2756[var27] = (byte)var26;
--var20;
}
this.field2762 = var2.readUnsignedByte() + 1;
class228 var28;
int var29;
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2680 != null) {
for(var29 = 1; var29 < var28.field2680.length; var29 += 2) {
var28.field2680[var29] = var2.readByte();
}
}
if(var28.field2684 != null) {
for(var29 = 3; var29 < var28.field2684.length - 2; var29 += 2) {
var28.field2684[var29] = var2.readByte();
}
}
}
if(var45 != null) {
for(var27 = 1; var27 < var45.length; var27 += 2) {
var45[var27] = var2.readByte();
}
}
if(var39 != null) {
for(var27 = 1; var27 < var39.length; var27 += 2) {
var39[var27] = var2.readByte();
}
}
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2684 != null) {
var19 = 0;
for(var29 = 2; var29 < var28.field2684.length; var29 += 2) {
var19 = var19 + 1 + var2.readUnsignedByte();
var28.field2684[var29] = (byte)var19;
}
}
}
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2680 != null) {
var19 = 0;
for(var29 = 2; var29 < var28.field2680.length; var29 += 2) {
var19 = 1 + var19 + var2.readUnsignedByte();
var28.field2680[var29] = (byte)var19;
}
}
}
byte var30;
int var32;
int var33;
int var34;
int var42;
byte var44;
if(var45 != null) {
var19 = var2.readUnsignedByte();
var45[0] = (byte)var19;
for(var27 = 2; var27 < var45.length; var27 += 2) {
var19 = 1 + var19 + var2.readUnsignedByte();
var45[var27] = (byte)var19;
}
var44 = var45[0];
byte var41 = var45[1];
for(var29 = 0; var29 < var44; ++var29) {
this.field2756[var29] = (byte)(var41 * this.field2756[var29] + 32 >> 6);
}
for(var29 = 2; var29 < var45.length; var29 += 2) {
var30 = var45[var29];
byte var31 = var45[var29 + 1];
var32 = var41 * (var30 - var44) + (var30 - var44) / 2;
for(var33 = var44; var33 < var30; ++var33) {
var34 = World.method1701(var32, var30 - var44);
this.field2756[var33] = (byte)(var34 * this.field2756[var33] + 32 >> 6);
var32 += var31 - var41;
}
var44 = var30;
var41 = var31;
}
for(var42 = var44; var42 < 128; ++var42) {
this.field2756[var42] = (byte)(var41 * this.field2756[var42] + 32 >> 6);
}
var38 = null;
}
if(var39 != null) {
var19 = var2.readUnsignedByte();
var39[0] = (byte)var19;
for(var27 = 2; var27 < var39.length; var27 += 2) {
var19 = 1 + var19 + var2.readUnsignedByte();
var39[var27] = (byte)var19;
}
var44 = var39[0];
int var47 = var39[1] << 1;
for(var29 = 0; var29 < var44; ++var29) {
var42 = var47 + (this.field2760[var29] & 255);
if(var42 < 0) {
var42 = 0;
}
if(var42 > 128) {
var42 = 128;
}
this.field2760[var29] = (byte)var42;
}
int var43;
for(var29 = 2; var29 < var39.length; var29 += 2) {
var30 = var39[var29];
var43 = var39[var29 + 1] << 1;
var32 = var47 * (var30 - var44) + (var30 - var44) / 2;
for(var33 = var44; var33 < var30; ++var33) {
var34 = World.method1701(var32, var30 - var44);
int var35 = var34 + (this.field2760[var33] & 255);
if(var35 < 0) {
var35 = 0;
}
if(var35 > 128) {
var35 = 128;
}
this.field2760[var33] = (byte)var35;
var32 += var43 - var47;
}
var44 = var30;
var47 = var43;
}
for(var42 = var44; var42 < 128; ++var42) {
var43 = var47 + (this.field2760[var42] & 255);
if(var43 < 0) {
var43 = 0;
}
if(var43 > 128) {
var43 = 128;
}
this.field2760[var42] = (byte)var43;
}
Object var46 = null;
}
for(var27 = 0; var27 < var12; ++var27) {
var37[var27].field2676 = var2.readUnsignedByte();
}
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2680 != null) {
var28.field2679 = var2.readUnsignedByte();
}
if(var28.field2684 != null) {
var28.field2677 = var2.readUnsignedByte();
}
if(var28.field2676 > 0) {
var28.field2681 = var2.readUnsignedByte();
}
}
for(var27 = 0; var27 < var12; ++var27) {
var37[var27].field2683 = var2.readUnsignedByte();
}
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2683 > 0) {
var28.field2682 = var2.readUnsignedByte();
}
}
for(var27 = 0; var27 < var12; ++var27) {
var28 = var37[var27];
if(var28.field2682 > 0) {
var28.field2678 = var2.readUnsignedByte();
}
}
}
@ObfuscatedName("k")
@ObfuscatedSignature(
signature = "(Lde;[B[II)Z",
garbageValue = "-349740639"
)
boolean method4307(class110 var1, byte[] var2, int[] var3) {
boolean var4 = true;
int var5 = 0;
RawAudioNode var6 = null;
for(int var7 = 0; var7 < 128; ++var7) {
if(var2 == null || var2[var7] != 0) {
int var8 = this.field2758[var7];
if(var8 != 0) {
if(var8 != var5) {
var5 = var8--;
if((var8 & 1) == 0) {
var6 = var1.method2275(var8 >> 2, var3);
} else {
var6 = var1.method2276(var8 >> 2, var3);
}
if(var6 == null) {
var4 = false;
}
}
if(var6 != null) {
this.field2757[var7] = var6;
this.field2758[var7] = 0;
}
}
}
}
return var4;
}
@ObfuscatedName("t")
@ObfuscatedSignature(
signature = "(I)V",
garbageValue = "1623352355"
)
void method4308() {
this.field2758 = null;
}
}
|
offlinehacker/NCD | client/DPReceive.c | /**
* @file DPReceive.c
* @author <NAME> <<EMAIL>>
*
* @section LICENSE
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the author nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stddef.h>
#include <limits.h>
#include <protocol/dataproto.h>
#include <misc/byteorder.h>
#include <misc/offset.h>
#include <base/BLog.h>
#include <client/DPReceive.h>
#include <generated/blog_channel_DPReceive.h>
static DPReceivePeer * find_peer (DPReceiveDevice *o, peerid_t id)
{
for (LinkedList2Node *node = LinkedList2_GetFirst(&o->peers_list); node; node = LinkedList2Node_Next(node)) {
DPReceivePeer *p = UPPER_OBJECT(node, DPReceivePeer, list_node);
if (p->peer_id == id) {
return p;
}
}
return NULL;
}
static void receiver_recv_handler_send (DPReceiveReceiver *o, uint8_t *packet, int packet_len)
{
DebugObject_Access(&o->d_obj);
DPReceivePeer *peer = o->peer;
DPReceiveDevice *device = peer->device;
ASSERT(packet_len >= 0)
ASSERT(packet_len <= device->packet_mtu)
uint8_t *data = packet;
int data_len = packet_len;
int local = 0;
DPReceivePeer *src_peer;
DPReceivePeer *relay_dest_peer = NULL;
// check header
if (data_len < sizeof(struct dataproto_header)) {
BLog(BLOG_WARNING, "no dataproto header");
goto out;
}
struct dataproto_header *header = (struct dataproto_header *)data;
data += sizeof(*header);
data_len -= sizeof(*header);
uint8_t flags = ltoh8(header->flags);
peerid_t from_id = ltoh16(header->from_id);
int num_ids = ltoh16(header->num_peer_ids);
// check destination ID
if (!(num_ids == 0 || num_ids == 1)) {
BLog(BLOG_WARNING, "wrong number of destinations");
goto out;
}
peerid_t to_id;
if (num_ids == 1) {
if (data_len < sizeof(to_id)) {
BLog(BLOG_WARNING, "missing destination");
goto out;
}
to_id = ((struct dataproto_peer_id *)data)->id;
data += sizeof(to_id);
data_len -= sizeof(to_id);
}
// check remaining data
if (data_len > device->device_mtu) {
BLog(BLOG_WARNING, "frame too large");
goto out;
}
// inform sink of received packet
if (peer->dp_sink) {
DataProtoSink_Received(peer->dp_sink, !!(flags & DATAPROTO_FLAGS_RECEIVING_KEEPALIVES));
}
if (num_ids == 1) {
// find source peer
if (!(src_peer = find_peer(device, from_id))) {
BLog(BLOG_INFO, "source peer %d not known", (int)from_id);
goto out;
}
// is frame for device or another peer?
if (device->have_peer_id && to_id == device->peer_id) {
// let the frame decider analyze the frame
FrameDeciderPeer_Analyze(src_peer->decider_peer, data, data_len);
// pass frame to device
local = 1;
} else {
// check if relaying is allowed
if (!peer->is_relay_client) {
BLog(BLOG_WARNING, "relaying not allowed");
goto out;
}
// provided source ID must be the peer sending the frame
if (src_peer != peer) {
BLog(BLOG_WARNING, "relay source must be the sending peer");
goto out;
}
// find destination peer
DPReceivePeer *dest_peer = find_peer(device, to_id);
if (!dest_peer) {
BLog(BLOG_INFO, "relay destination peer not known");
goto out;
}
// destination cannot be source
if (dest_peer == src_peer) {
BLog(BLOG_WARNING, "relay destination cannot be the source");
goto out;
}
relay_dest_peer = dest_peer;
}
}
out:
// accept packet
PacketPassInterface_Done(&o->recv_if);
// pass packet to device
if (local) {
o->device->output_func(o->device->output_func_user, data, data_len);
}
// relay frame
if (relay_dest_peer) {
DPRelayRouter_SubmitFrame(&device->relay_router, &src_peer->relay_source, &relay_dest_peer->relay_sink, data, data_len, device->relay_flow_buffer_size, device->relay_flow_inactivity_time);
}
}
int DPReceiveDevice_Init (DPReceiveDevice *o, int device_mtu, DPReceiveDevice_output_func output_func, void *output_func_user, BReactor *reactor, int relay_flow_buffer_size, int relay_flow_inactivity_time)
{
ASSERT(device_mtu >= 0)
ASSERT(device_mtu <= INT_MAX - DATAPROTO_MAX_OVERHEAD)
ASSERT(output_func)
ASSERT(relay_flow_buffer_size > 0)
// init arguments
o->device_mtu = device_mtu;
o->output_func = output_func;
o->output_func_user = output_func_user;
o->reactor = reactor;
o->relay_flow_buffer_size = relay_flow_buffer_size;
o->relay_flow_inactivity_time = relay_flow_inactivity_time;
// remember packet MTU
o->packet_mtu = DATAPROTO_MAX_OVERHEAD + o->device_mtu;
// init relay router
if (!DPRelayRouter_Init(&o->relay_router, o->device_mtu, o->reactor)) {
BLog(BLOG_ERROR, "DPRelayRouter_Init failed");
goto fail0;
}
// have no peer ID
o->have_peer_id = 0;
// init peers list
LinkedList2_Init(&o->peers_list);
DebugObject_Init(&o->d_obj);
return 1;
fail0:
return 0;
}
void DPReceiveDevice_Free (DPReceiveDevice *o)
{
DebugObject_Free(&o->d_obj);
ASSERT(LinkedList2_IsEmpty(&o->peers_list))
// free relay router
DPRelayRouter_Free(&o->relay_router);
}
void DPReceiveDevice_SetPeerID (DPReceiveDevice *o, peerid_t peer_id)
{
DebugObject_Access(&o->d_obj);
// remember peer ID
o->peer_id = peer_id;
o->have_peer_id = 1;
}
void DPReceivePeer_Init (DPReceivePeer *o, DPReceiveDevice *device, peerid_t peer_id, FrameDeciderPeer *decider_peer, int is_relay_client)
{
DebugObject_Access(&device->d_obj);
ASSERT(is_relay_client == 0 || is_relay_client == 1)
// init arguments
o->device = device;
o->peer_id = peer_id;
o->decider_peer = decider_peer;
o->is_relay_client = is_relay_client;
// init relay source
DPRelaySource_Init(&o->relay_source, &device->relay_router, o->peer_id, device->reactor);
// init relay sink
DPRelaySink_Init(&o->relay_sink, o->peer_id);
// have no sink
o->dp_sink = NULL;
// insert to peers list
LinkedList2_Append(&device->peers_list, &o->list_node);
DebugCounter_Init(&o->d_receivers_ctr);
DebugObject_Init(&o->d_obj);
}
void DPReceivePeer_Free (DPReceivePeer *o)
{
DebugObject_Free(&o->d_obj);
DebugCounter_Free(&o->d_receivers_ctr);
ASSERT(!o->dp_sink)
// remove from peers list
LinkedList2_Remove(&o->device->peers_list, &o->list_node);
// free relay sink
DPRelaySink_Free(&o->relay_sink);
// free relay source
DPRelaySource_Free(&o->relay_source);
}
void DPReceivePeer_AttachSink (DPReceivePeer *o, DataProtoSink *dp_sink)
{
DebugObject_Access(&o->d_obj);
ASSERT(!o->dp_sink)
ASSERT(dp_sink)
// attach relay sink
DPRelaySink_Attach(&o->relay_sink, dp_sink);
o->dp_sink = dp_sink;
}
void DPReceivePeer_DetachSink (DPReceivePeer *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->dp_sink)
// detach relay sink
DPRelaySink_Detach(&o->relay_sink);
o->dp_sink = NULL;
}
void DPReceiveReceiver_Init (DPReceiveReceiver *o, DPReceivePeer *peer)
{
DebugObject_Access(&peer->d_obj);
DPReceiveDevice *device = peer->device;
// init arguments
o->peer = peer;
// remember device
o->device = device;
// init receive interface
PacketPassInterface_Init(&o->recv_if, device->packet_mtu, (PacketPassInterface_handler_send)receiver_recv_handler_send, o, BReactor_PendingGroup(device->reactor));
DebugCounter_Increment(&peer->d_receivers_ctr);
DebugObject_Init(&o->d_obj);
}
void DPReceiveReceiver_Free (DPReceiveReceiver *o)
{
DebugObject_Free(&o->d_obj);
DebugCounter_Decrement(&o->peer->d_receivers_ctr);
// free receive interface
PacketPassInterface_Free(&o->recv_if);
}
PacketPassInterface * DPReceiveReceiver_GetInput (DPReceiveReceiver *o)
{
DebugObject_Access(&o->d_obj);
return &o->recv_if;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.