repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
mcopik/perf-taint | benchmarks/milc/milc_qcd-7.8.1/generic_form/load_momentum.c | <filename>benchmarks/milc/milc_qcd-7.8.1/generic_form/load_momentum.c
/**************************** load_momentum.c ****************************/
/* MIMD version 7 */
/*
* Scalar code to load in the spatial momentum from a disk file.
*
* The total number of momentum components is returned
*/
#include "generic_form_includes.h"
#include <string.h>
#ifndef IF_OK
#define IF_OK if(status==0)
#endif
int load_momentum_from_disk(int mom_in[][3], char filename[], int max_mom)
{
int countmom = -1 ;
int i , j ;
FILE *fp;
int v1,v2,v3 ;
/***--------------------------------------------------***/
for(i=0 ; i < max_mom ; ++i)
for(j=0; j < 4 ; ++j)
mom_in[i][j] = 0 ;
/*** open the file *****/
if( (fp = fopen(filename ,"r")) == NULL )
{
printf("ERROR::load_momentum_from_disk::Could not open the file %s\n",filename);
terminate(1);
}
/*** read in the momentum and compute the fourth component ****/
while( fscanf(fp,"%d %d %d",&v1,&v2,&v3) == 3 && countmom < max_mom )
{
++countmom ;
mom_in[countmom][XUP ] = v1 ;
mom_in[countmom][YUP ] = v2 ;
mom_in[countmom][ZUP ] = v3 ;
}
if( countmom < 0 )
{
printf("ERROR: No momenta have been read from %s\n",filename);
terminate(1);
}
else if( countmom >= max_mom )
{
printf("ERROR: Not enough space for the momentum in %s \n",filename);
terminate(1);
}
++countmom ;
/*** close the file ****/
if( fclose(fp) != 0 )
{
printf("There was an error during the closing of %s \n",filename);
terminate(1);
}
printf("The number of momentum read in = %d\n",countmom);
printf("p_x p_y p_z\n");
for(i=0 ; i < countmom ; ++i)
{
printf("%d %d %d\n", mom_in[i][0],mom_in[i][1],mom_in[i][2] );
}
return countmom ;
}
/*
* Routine to load in the momentum values
* from the input file.
*
* Subroutine arguments
* prompt :: whether to print stuff before the input is required
* no_mom :: return the number of momentum values to run
* mom_in :: array containing the momentum (as integers)
* max_mom :: the maxium number of momentum values allowed by the
* code
*/
int load_momentum(int prompt, char *label, int *no_mom, int mom_in[][3], int max_mom)
{
int status = 0 ;
char savebuf[128] ;
char checklabel[4] ;
int what_to_do ;
enum what_to_do_choices { read_momentum = 50 , stop_reading_momentum } ;
int n_mom ;
/***------------------------------------------------------------****/
/*** read in all the momentum that the user wants to calculate ***/
IF_OK if (prompt==1)
printf("enter 'start_of_momentum %s' \n",label);
IF_OK scanf("%s",savebuf);
IF_OK scanf("%s",checklabel);
IF_OK printf("%s ",savebuf);
IF_OK printf("%s\n",checklabel);
IF_OK {
if(strcmp("start_of_momentum",savebuf) == 0
&& strcmp(label,checklabel) == 0)
{
what_to_do = read_momentum ;
}
else
{
printf("error in input: start_of_momentum %s required, but got %s %s\n",
label,savebuf,checklabel);
status++ ;
what_to_do = stop_reading_momentum ;
}
}
n_mom = 0 ;
/*** now read the momentum to compute ***/
while( what_to_do == read_momentum )
{
IF_OK if (prompt==1)
printf("enter three integers for the momentum\n");
IF_OK scanf("%s",savebuf);
if(strcmp("end_momentum",savebuf) == 0 )
{
what_to_do = stop_reading_momentum ;
}
else if( n_mom >= max_mom )
{
++status ;
what_to_do = stop_reading_momentum ;
printf("Too many momentum values (maximum = %d)\n",max_mom) ;
}
else
{
mom_in[n_mom][0] = atoi(savebuf) ;
if( scanf("%d %d",&mom_in[n_mom][1] ,&mom_in[n_mom][2] ) == 2 )
{
printf("%s= %d %d %d\n",label,mom_in[n_mom][0], mom_in[n_mom][1], mom_in[n_mom][2] ) ;
++n_mom ;
}
else
{
++status ;
what_to_do = stop_reading_momentum ;
printf("Error reading momentum values\n");
}
}
} /*** end the loop over the momentum (while loop) ***/
*no_mom = n_mom ;
return status ;
}
|
Dbevan/SunderingShadows | d/shadow/room/farm/room/kitchen.c | #include <std.h>
#include "../farm.h"
inherit ROOM;
void create(){
::create();
set_property("indoors",1);
set_property("light",1);
set_property("no sticks",1);
set_terrain(CITY);
set_travel(PAVED_ROAD);
set_name("%^CYAN%^Piaf Huffelmuffin's Kitchen");
set_short("%^CYAN%^Piaf Huffelmuffin's Kitchen");
set_long("%^CYAN%^This small area is reserved for the "+
"kitchen. A %^GREEN%^soapstone%^CYAN%^ sink"+
" and spigot are mounted into the wall. The "+
"sink is currently full of %^ORANGE%^dirty "+
"crusted dishes%^CYAN%^ which are soaking in "+
"a %^RESET%^grayish %^CYAN%^water. Hanging "+
"%^ORANGE%^copper%^CYAN%^ baskets are filled"+
" with cheeses, loaves of breads, and some "+
"fruit and vegetables. A loud roaring %^BLUE%^"+
"box%^CYAN%^ with numerous gears and hoses is "+
"attached to a pump in the kitchen. The window,"+
" in the shape of a goose, allows some light to"+
" filter through it's dirty panes.%^RESET%^\n");
set_smell("default","The kitchen smell putrid!");
set_listen("default","The roaring of the box drowns out any noise.");
set_items(([
({"basket","fruit","cheese","bread"}) : "%^YELLOW%^The hanging"+
" copper basket holds a selection of cheeses, breads, "+
"fruits and vegetables. Some of the food has spoiled and"+
" began to sprout greenish mold.",
({"sink","dishes","spigot"}) : "%^GREEN%^The gnome sized sink is "+
"crafted from soapstone and mounted to the wall. A wooden "+
"spigot allows for water to be drawn from the stream into the"+
" sink. Currently the sink is full of dirty dishes that are "+
"soaking in a gray tinged water. No telling how long those "+
"have been there!",
({"box","pump","gears","hoses"}) : "%^BLUE%^This strange wooden box"+
" rest in the kitchen, taking up nearly all of the space. "+
"Strange gears and hoses are connected to the box. One thick"+
" hose leads from the box to a pump next to it. Opening the "+
"box up confronts you with a blast of cold air and lumps of "+
"frozen meat."
]));
set_exits(([ "southwest" : ROOMDIR"foyer",
"northwest" : ROOMDIR"bedroom"
]));
}
|
Mr-Hei7enberg/google-apps-script-snippets | snippets/standalone/standalone_ramda-demo/ramda-demo.js | <reponame>Mr-Hei7enberg/google-apps-script-snippets
/* globals R */
/**
*
*/
function run() {
// `prop` takes two arguments. If I just give it one, I get a function back
var moo = R.prop('moo');
// when I call that function with one argument, I get the result.
var value = moo({ moo: 'cow' }); // => 'cow'
Logger.log(value);
}
|
phax/ph-masterdata | ph-masterdata/src/main/java/com/helger/masterdata/telephone/TelephoneNumberMicroTypeConverter.java | <gh_stars>1-10
/*
* Copyright (C) 2014-2021 <NAME> (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.masterdata.telephone;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.helger.xml.microdom.IMicroElement;
import com.helger.xml.microdom.IMicroQName;
import com.helger.xml.microdom.MicroElement;
import com.helger.xml.microdom.MicroQName;
import com.helger.xml.microdom.convert.IMicroTypeConverter;
public final class TelephoneNumberMicroTypeConverter implements IMicroTypeConverter <TelephoneNumber>
{
private static final IMicroQName ATTR_TYPE = new MicroQName ("type");
private static final IMicroQName ATTR_COUNTRYCODE = new MicroQName ("countrycode");
private static final IMicroQName ATTR_AREACODE = new MicroQName ("areacode");
private static final IMicroQName ATTR_LINE = new MicroQName ("line");
private static final IMicroQName ATTR_DIRECTDIAL = new MicroQName ("directdial");
@Nonnull
public IMicroElement convertToMicroElement (@Nonnull final TelephoneNumber aTelNo,
@Nullable final String sNamespaceURI,
@Nonnull final String sTagName)
{
final IMicroElement eTelNo = new MicroElement (sNamespaceURI, sTagName);
if (aTelNo.getType () != null)
eTelNo.setAttribute (ATTR_TYPE, aTelNo.getType ().getID ());
eTelNo.setAttribute (ATTR_COUNTRYCODE, aTelNo.getCountryCode ());
eTelNo.setAttribute (ATTR_AREACODE, aTelNo.getAreaCode ());
eTelNo.setAttribute (ATTR_LINE, aTelNo.getLine ());
eTelNo.setAttribute (ATTR_DIRECTDIAL, aTelNo.getDirectDial ());
return eTelNo;
}
@Nonnull
public TelephoneNumber convertToNative (@Nonnull final IMicroElement eTelNo)
{
final ETelephoneType eType = ETelephoneType.getFromIDOrNull (eTelNo.getAttributeValue (ATTR_TYPE));
final String sCountryCode = eTelNo.getAttributeValue (ATTR_COUNTRYCODE);
final String sAreaCode = eTelNo.getAttributeValue (ATTR_AREACODE);
final String sLine = eTelNo.getAttributeValue (ATTR_LINE);
final String sDirectDial = eTelNo.getAttributeValue (ATTR_DIRECTDIAL);
return new TelephoneNumber (eType, sCountryCode, sAreaCode, sLine, sDirectDial);
}
}
|
svvladimir-ru/ugc_sprint_1 | services/kafka_to_clickhouse/kafka_to_clickhouse/conf.py | <filename>services/kafka_to_clickhouse/kafka_to_clickhouse/conf.py
from pydantic import BaseSettings, PyObject
from typing import Any
class Settings(BaseSettings):
KAFKA_HOST: str = '0.0.0.0'
KAFKA_PORT: int = 9092
kafka_group: str = 'from-kafka-to-clickhouse-etl'
CLICKHOUSE_HOST: str = '0.0.0.0'
CLICKHOUSE_POST: int = 9000
KAFKA_TOPICS: str = 'movies'
class Config:
env_file = '.env.dev'
env_prefix = 'UGC_'
settings = Settings()
|
DanilDr/Dota2Guide | Dota2Guide/src/ru/russianbravedev/dotamania/SteamInventory.java | package ru.russianbravedev.dotamania;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.json.JSONException;
import org.json.JSONObject;
import ru.russianbravedev.dotamania.R;
import com.danildr.androidcomponents.LoadingJSONfromURL;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Button;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.graphics.Bitmap;
public class SteamInventory extends FullScreenActivity {
private JSONObject accountInfo;
private String playerName;
@SuppressLint("SetJavaScriptEnabled")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (steamid == 0) {
Intent nextActivity = new Intent(SteamInventory.this, SteamEnterAccount.class);
nextActivity.putExtra("lang", lang);
nextActivity.putExtra("classname", SteamInventory.class.toString());
SteamInventory.this.startActivity(nextActivity);
} else {
setContentView(R.layout.activity_dota_webpage);
String playerUrl = "http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?key=" + getString(R.string.steamkey) + "&steamids=" + steamid;
try {
accountInfo = new LoadingJSONfromURL().execute(playerUrl).get(20, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException
| TimeoutException e) {
e.printStackTrace();
}
if (accountInfo != null) {
try {
JSONObject reponseJSON = accountInfo.getJSONObject("response");
JSONObject playersJSON = reponseJSON.getJSONArray("players").getJSONObject(0);
playerName = playersJSON.getString("personaname");
} catch (JSONException e) {
e.printStackTrace();
}
}
if (playerName != null) {
String playerurl = "http://steamcommunity.com/id/" + playerName + "/inventory/" + getDota2LangLink() + "#570";
WebView dotaPlayerItems = (WebView) findViewById(R.id.dotaWebView);
dotaPlayerItems.getSettings().setJavaScriptEnabled(true);
Button prevAcivity = (Button) findViewById(R.id.prevAcivity);
prevAcivity.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
returnBack();
}
});
dotaPlayerItems.setWebViewClient(new WebViewClient() {
public boolean shouldOverrideUrlLoading(WebView view, String url) {
view.loadUrl(url);
return false;
}
@Override
public void onPageStarted(WebView view, String url, Bitmap favicon) {
super.onPageStarted(view, url, favicon);
view.setVisibility(View.GONE);
}
@Override
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
view.loadUrl("javascript:document.getElementsByTagName('head')[0].innerHTML='<style>" +
"#global_header, .games_list_tabs, .filter_ctn.inventory_filters, #footer, #footer_spacer, .view_inventory_logo { display: none; }" +
"* { margin: 0;}" +
".maincontent, #mainContents {width: 640px !important; padding: 0 !important; }" +
".inventory_page_left, .inventory_page_right, .inventory_ctn, .trade_item_box, .view_inventory_page {width: 320px !important;}" +
".profile_small_header_bg { margin-bottom: 0px !important; height: 100px !important; }" +
"#mainContents { margin: 0px auto; }" +
"</style>" +
"<meta name=\"viewport\" content=\"width=width-display, initial-scale=0.5, user-scalable=no\">' + document.getElementsByTagName('head')[0].innerHTML;");
view.setVisibility(View.VISIBLE);
}
});
dotaPlayerItems.loadUrl(playerurl);
}
}
}
@Override
public void onBackPressed() {
returnBack();
}
private void returnBack() {
Intent showMm = new Intent(SteamInventory.this, Steam.class);
showMm.putExtra("lang", lang);
SteamInventory.this.startActivity(showMm);
}
}
|
vemaeg/urlaubsverwaltung | src/test/java/org/synyx/urlaubsverwaltung/web/AbstractNoResultFoundExceptionTest.java | <reponame>vemaeg/urlaubsverwaltung
package org.synyx.urlaubsverwaltung.web;
import org.junit.Assert;
import org.junit.Test;
public class AbstractNoResultFoundExceptionTest {
@Test
public void ensureCorrectExceptionMessage() {
TestException exception = new TestException(42, "person");
Assert.assertEquals("Wrong exception message", "No person found for ID = 42", exception.getMessage());
}
@Test
public void ensureCorrectAlternateExceptionMessage() {
TestException exception = new TestException("username", "person");
Assert.assertEquals("Wrong exception message", "No person found for identifier = username",
exception.getMessage());
}
private class TestException extends AbstractNoResultFoundException {
public TestException(Integer id, String type) {
super(id, type);
}
public TestException(String id, String type) {
super(id, type);
}
}
}
|
janbodnar/Java-Advanced | UniqueNames.java | package com.zetcode;
import java.util.ArrayList;
import java.util.List;
public class UniqueNames {
public static void main(String[] args) {
List<String> names = List.of("Martin", "Lucy", "Peter",
"Martin", "Robert", "Peter");
List<String> uniqueNames = new ArrayList<>();
names.forEach(e -> {
if (!uniqueNames.contains(e)) {
uniqueNames.add(e);
}
});
System.out.println(names);
System.out.println(uniqueNames);
}
}
|
sonali-mishra-22/arangodb | arangod/Aql/ConditionFinder.cpp | <gh_stars>1-10
////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author <NAME>
////////////////////////////////////////////////////////////////////////////////
#include "ConditionFinder.h"
#include "Aql/ExecutionPlan.h"
#include "Aql/IndexNode.h"
#include "Aql/SortCondition.h"
#include "Aql/SortNode.h"
using namespace arangodb::aql;
using EN = arangodb::aql::ExecutionNode;
bool ConditionFinder::before(ExecutionNode* en) {
switch (en->getType()) {
case EN::ENUMERATE_LIST:
case EN::COLLECT:
case EN::SCATTER:
case EN::DISTRIBUTE:
case EN::GATHER:
case EN::REMOTE:
case EN::SUBQUERY:
case EN::INDEX:
case EN::RETURN:
case EN::TRAVERSAL:
case EN::K_SHORTEST_PATHS:
case EN::SHORTEST_PATH:
case EN::ENUMERATE_IRESEARCH_VIEW:
{
// in these cases we simply ignore the intermediate nodes, note
// that we have taken care of nodes that could throw exceptions
// above.
break;
}
case EN::INSERT:
case EN::REMOVE:
case EN::REPLACE:
case EN::UPDATE:
case EN::UPSERT:
case EN::LIMIT: {
// LIMIT or modification invalidates the sort expression we already found
_sorts.clear();
_filters.clear();
break;
}
case EN::SINGLETON:
case EN::NORESULTS: {
// in all these cases we better abort
return true;
}
case EN::FILTER: {
// register which variable is used in a FILTER
_filters.emplace(ExecutionNode::castTo<FilterNode const*>(en)->inVariable()->id);
break;
}
case EN::SORT: {
// register which variables are used in a SORT
if (_sorts.empty()) {
for (auto& it : ExecutionNode::castTo<SortNode const*>(en)->elements()) {
_sorts.emplace_back(it.var, it.ascending);
TRI_IF_FAILURE("ConditionFinder::sortNode") {
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
}
}
}
break;
}
case EN::CALCULATION: {
_variableDefinitions.emplace(
ExecutionNode::castTo<CalculationNode const*>(en)->outVariable()->id,
ExecutionNode::castTo<CalculationNode const*>(en)->expression()->node());
TRI_IF_FAILURE("ConditionFinder::variableDefinition") {
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
}
break;
}
case EN::ENUMERATE_COLLECTION: {
auto node = ExecutionNode::castTo<EnumerateCollectionNode const*>(en);
if (_changes->find(node->id()) != _changes->end()) {
// already optimized this node
break;
}
auto condition = std::make_unique<Condition>(_plan->getAst());
bool ok = handleFilterCondition(en, condition);
if (!ok) {
break;
}
std::unique_ptr<SortCondition> sortCondition;
handleSortCondition(en, node->outVariable(), condition, sortCondition);
if (condition->isEmpty() && sortCondition->isEmpty()) {
// no filter conditions left
break;
}
std::vector<transaction::Methods::IndexHandle> usedIndexes;
auto canUseIndex = condition->findIndexes(node, usedIndexes, sortCondition.get());
if (canUseIndex.first /*filtering*/ || canUseIndex.second /*sorting*/) {
bool descending = false;
if (canUseIndex.second && sortCondition->isUnidirectional()) {
descending = sortCondition->isDescending();
}
if (!canUseIndex.first) {
// index cannot be used for filtering, but only for sorting
// remove the condition now
TRI_ASSERT(canUseIndex.second);
condition.reset(new Condition(_plan->getAst()));
condition->normalize(_plan);
}
TRI_ASSERT(!usedIndexes.empty());
// We either can find indexes for everything or findIndexes
// will clear out usedIndexes
IndexIteratorOptions opts;
opts.ascending = !descending;
std::unique_ptr<ExecutionNode> newNode(
new IndexNode(_plan, _plan->nextId(), node->collection(),
node->outVariable(), usedIndexes, std::move(condition), opts));
TRI_IF_FAILURE("ConditionFinder::insertIndexNode") {
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
}
// We keep this node's change
_changes->emplace(node->id(), newNode.get());
newNode.release();
}
break;
}
default: {
// should not reach this point
TRI_ASSERT(false);
}
}
return false;
}
bool ConditionFinder::enterSubquery(ExecutionNode*, ExecutionNode*) {
return false;
}
bool ConditionFinder::handleFilterCondition(ExecutionNode* en,
std::unique_ptr<Condition> const& condition) {
bool foundCondition = false;
for (auto& it : _variableDefinitions) {
if (_filters.find(it.first) == _filters.end()) {
continue;
}
// a variable used in a FILTER
AstNode* var = const_cast<AstNode*>(it.second);
if (var->isDeterministic() && var->isSimple()) {
// replace all variables inside the FILTER condition with the
// expressions represented by the variables
var = it.second->clone(_plan->getAst());
auto func = [&](AstNode* node) -> AstNode* {
if (node->type == NODE_TYPE_REFERENCE) {
auto variable = static_cast<Variable*>(node->getData());
if (variable != nullptr) {
auto setter = _plan->getVarSetBy(variable->id);
if (setter != nullptr && setter->getType() == EN::CALCULATION) {
auto s = ExecutionNode::castTo<CalculationNode*>(setter);
auto filterExpression = s->expression();
AstNode* inNode = filterExpression->nodeForModification();
if (inNode->isDeterministic() && inNode->isSimple()) {
return inNode;
}
}
}
}
return node;
};
var = Ast::traverseAndModify(var, func);
}
condition->andCombine(var);
foundCondition = true;
}
// normalize the condition
condition->normalize(_plan);
TRI_IF_FAILURE("ConditionFinder::normalizePlan") {
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
}
bool const conditionIsImpossible = (foundCondition && condition->isEmpty());
if (conditionIsImpossible) {
// condition is always false
for (auto const& x : en->getParents()) {
auto noRes = new NoResultsNode(_plan, _plan->nextId());
_plan->registerNode(noRes);
_plan->insertDependency(x, noRes);
*_hasEmptyResult = true;
}
return false;
}
auto const& varsValid = en->getVarsValid();
// remove all invalid variables from the condition
if (condition->removeInvalidVariables(varsValid)) {
// removing left a previously non-empty OR block empty...
// this means we can't use the index to restrict the results
return false;
}
return true;
}
void ConditionFinder::handleSortCondition(ExecutionNode* en, Variable const* outVar,
std::unique_ptr<Condition> const& condition,
std::unique_ptr<SortCondition>& sortCondition) {
if (!en->isInInnerLoop()) {
// we cannot optimize away a sort if we're in an inner loop ourselves
sortCondition.reset(new SortCondition(_plan, _sorts,
condition->getConstAttributes(outVar, false),
condition->getNonNullAttributes(outVar),
_variableDefinitions));
} else {
sortCondition.reset(new SortCondition());
}
}
|
gaybro8777/usaspending-api | usaspending_api/common/sqs_helpers.py | <filename>usaspending_api/common/sqs_helpers.py<gh_stars>0
import boto3
from django.conf import settings
def get_sqs_queue_resource(queue_name, region_name=None):
"""
return a boto3 SQS queue by providing a SQS queue name
"""
aws_region = region_name or settings.USASPENDING_AWS_REGION
sqs_resource = boto3.resource("sqs", region_name=aws_region)
sqs_queue = sqs_resource.get_queue_by_name(QueueName=queue_name)
return sqs_queue
|
scmilee/elrn-universal-wallet | src/components/ShapeShiftTo.js | <reponame>scmilee/elrn-universal-wallet
import React from 'react'
import { connect } from 'react-redux'
import SwipeableViews from 'react-swipeable-views'
import { generateWalletAddress } from '../actions/walletActions'
import { setShapeShiftToSymbol } from '../actions/shapeShiftActions'
import ShapeShiftToSymbol from './ShapeShiftToSymbol'
import styles from '../styles.js'
import Coin from './Coin'
const mapStateToProps = ({shapeShift, wallet}) => {
return {
shapeShift: shapeShift,
mnemonic: wallet.mnemonic,
}
}
const mapDispatchToProps = (dispatch, ownProps) => {
return {
handleButtonPush: (mnemonic, coin) => {
dispatch(generateWalletAddress(mnemonic, coin))
dispatch(setShapeShiftToSymbol(coin.symbol))
}
}
}
export const ShapeShiftTo = ({shapeShift, mnemonic, handleButtonPush, ...rest}) => {
return (
<div id="coins">
Receive
<br></br>
<ShapeShiftToSymbol></ShapeShiftToSymbol>
<SwipeableViews containerStyle={Object.assign({}, styles.slide, styles.slideContainer, {})}>
<div>
{(
shapeShift.coins.map( coin => {
return (
<Coin
key={coin.name}
onClick={() => handleButtonPush(mnemonic, coin)}
{...coin}
>
</Coin>
)
})
)}
</div>
</SwipeableViews>
</div>
)
}
export default connect(mapStateToProps, mapDispatchToProps)(ShapeShiftTo) |
iotauth/iotauth | entity/node/accessors/SecureCommServer.js | /*
* Copyright (c) 2016, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* IOTAUTH_COPYRIGHT_VERSION_1
*/
/**
* SecureCommServer accessor for accessing Auth services and listening to SecureCommClients.
* @author <NAME>
*/
"use strict";
var fs = require('fs');
var iotAuth = require('iotAuth')
var common = require('common');
var util = require('util');
var msgType = iotAuth.msgType;
// to be loaded from config file
var entityConfig;
var currentDistributionKey;
// for managing connected clients, can be accessed using socketID
var connectedClients = [];
// parameters for SecureCommServer
var parameters = {
migrationEnabled: false,
authFailureThreshold: 3,
migrationFailureThreshold: 3
};
// migration related variables
var authFailureCount = 0;
var currentMigrationInfoIndex = 0;
var migrationFailureCount = 0;
var trustedAuthPublicKeyList = [];
// session keys for publish-subscribe experiments based individual secure connection using proposed approach
var sessionKeyCacheForClients = [];
var publishSeqNum = 0;
var outputs = {};
var outputHandlers = {};
// constructor
function SecureCommServer(configFilePath) {
entityConfig = iotAuth.loadEntityConfig(configFilePath);
if (entityConfig.authInfo.publicKey != null) {
trustedAuthPublicKeyList.push(entityConfig.authInfo.publicKey);
}
}
function handleSessionKeyResp(sessionKeyList, receivedDistKey, callbackParams) {
if (parameters.migrationEnabled) {
authFailureCount = 0;
console.log('handleSessionKeyResp: session key request succeeded! authFailureCount: ' + authFailureCount);
}
if (receivedDistKey != null) {
console.log('updating distribution key: ' + util.inspect(receivedDistKey));
currentDistributionKey = receivedDistKey;
}
console.log('received ' + sessionKeyList.length + ' keys');
if (callbackParams.targetSessionKeyCache == 'Clients') {
sessionKeyCacheForClients = sessionKeyCacheForClients.concat(sessionKeyList);
}
// session key request was triggered by a client request
else if (callbackParams.targetSessionKeyCache == 'none') {
if (sessionKeyList[0].id == callbackParams.keyId) {
console.log('Session key id is as expected');
callbackParams.sendHandshake2Callback(callbackParams.handshake1Payload,
callbackParams.serverSocket, sessionKeyList[0]);
}
else {
console.error('Session key id is NOT as expected');
}
}
}
function onServerError(message) {
if (parameters.migrationEnabled) {
if (message.includes('Error occurred in migration request')) {
migrationFailureCount++;
console.log('failure in migration to another Auth : migrationFailureCount: ' + migrationFailureCount);
}
else if (message.includes('Error occurred in session key request')) {
authFailureCount++;
console.log('failure in connection with Auth : authFailureCount: ' + authFailureCount);
if (authFailureCount >= parameters.authFailureThreshold) {
console.log('failure count reached threshold (' + parameters.authFailureThreshold + '), try migration...');
sendMigrationRequest();
}
}
}
var info = 'Error in server - details: ' + message;
outputs.error = info;
if (outputHandlers.error) {
outputHandlers.error(info);
}
}
function sendSessionKeyRequest(purpose, numKeys, callbackParams) {
var options = iotAuth.getSessionKeyReqOptions(entityConfig, currentDistributionKey, purpose, numKeys);
var eventHandlers = {
onError: onServerError
};
iotAuth.sendSessionKeyReq(options, handleSessionKeyResp, eventHandlers, callbackParams);
}
function handleMigrationResp(newAuthId, newCredential) {
authFailureCount = 0;
migrationFailureCount = 0;
entityConfig.authInfo.id = newAuthId;
if (entityConfig.entityInfo.usePermanentDistKey) {
currentDistributionKey = newCredential;
}
else {
entityConfig.authInfo.publicKey = newCredential;
trustedAuthPublicKeyList.push(newCredential);
currentDistributionKey = null; // previous distribution key should be invalidated
}
var currentMigrationInfo = entityConfig.migrationInfo[currentMigrationInfoIndex];
entityConfig.authInfo.host = currentMigrationInfo.host;
entityConfig.authInfo.port = currentMigrationInfo.port;
console.log('migration completed!');
console.log('new Auth info: !');
console.log(util.inspect(entityConfig.authInfo));
rotateMigrationInfoIndex('received migration response, for next round of migration, ');
}
function rotateMigrationInfoIndex(message) {
console.log(message + 'rotate migration info index from: ' + currentMigrationInfoIndex);
currentMigrationInfoIndex = ((currentMigrationInfoIndex + 1) % entityConfig.migrationInfo.length);
console.log('to: ' + currentMigrationInfoIndex);
migrationFailureCount = 0;
}
function sendMigrationRequest() {
if (entityConfig.migrationInfo == null || entityConfig.migrationInfo.length == 0) {
console.log('Failed to migrate! no information for migration.');
return;
}
if (migrationFailureCount >= parameters.migrationFailureThreshold) {
rotateMigrationInfoIndex('reached migration failure threshold, ');
migrationFailureCount = 0;
}
var currentMigrationInfo = entityConfig.migrationInfo[currentMigrationInfoIndex];
if ((entityConfig.authInfo.host == currentMigrationInfo.host)
&& (entityConfig.authInfo.port == currentMigrationInfo.port))
{
console.log('Failed to migrate! host/port of current Auth is the same as host of the Auth which we migrate to');
}
else {
var options = iotAuth.getMigrationReqOptions(entityConfig, currentMigrationInfo, trustedAuthPublicKeyList);
var eventHandlers = {
onError: onServerError
};
iotAuth.migrateToTrustedAuth(options, handleMigrationResp, eventHandlers);
}
}
// event handlers for listening server
function onServerListening() {
outputs.listening = entityConfig.listeningServerInfo.port;
if (outputHandlers.listening) {
outputHandlers.listening(entityConfig.listeningServerInfo.port);
}
}
function onClientRequest(handshake1Payload, serverSocket, sendHandshake2Callback) {
var keyId = handshake1Payload.readUIntBE(0, common.SESSION_KEY_ID_SIZE);
console.log('session key id: ' + keyId);
var sessionKeyFound = false;
for (var i = 0; i < sessionKeyCacheForClients.length; i++) {
if (sessionKeyCacheForClients[i].id == keyId) {
console.log('found session key');
sendHandshake2Callback(handshake1Payload, serverSocket, sessionKeyCacheForClients[i]);
sessionKeyFound = true;
break;
}
}
if (!sessionKeyFound) {
console.log('session key NOT found! sending session key id to AuthService');
var callbackParams = {
targetSessionKeyCache: 'none',
keyId: keyId,
sendHandshake2Callback: sendHandshake2Callback,
handshake1Payload: handshake1Payload,
serverSocket: serverSocket
}
sendSessionKeyRequest({keyId: keyId}, 1, callbackParams);
}
}
// event handlers for individual sockets
function onClose(socketID) {
connectedClients[socketID] = null;
var info = 'secure connection with the client closed.\n' + 'socket #' + socketID + ' closed';
outputs.connection = info;
if (outputHandlers.connection) {
outputHandlers.connection(info);
}
}
function onError(message, socketID) {
var info = 'Error in secure server socket #' + socketID + ' details: ' + message;
outputs.error = info;
if (outputHandlers.error) {
outputHandlers.error(info);
}
}
function onConnection(socketInstance, entityServerSocket) {
// registering clients as potential subscribers
connectedClients[socketInstance.id] = entityServerSocket;
var info = 'secure connection with the client established.\n' + util.inspect(socketInstance);
outputs.connection = info;
if (outputHandlers.connection) {
outputHandlers.connection(info);
}
}
function onData(data, socketID) {
console.log('data received from server via secure communication');
outputs.received = data;
outputs.receivedID = data;
if (outputHandlers.received) {
outputHandlers.received({data: data, id: socketID});
}
}
/*
toSend = {
data: Buffer,
id: Int
}
*/
function toSendInputHandler(toSend) {
console.log('toSend: ' + util.inspect(toSend));
if (toSend.id != null) {
console.log('specified socketID: ' + toSend.id);
if (connectedClients[toSend.id] == null) {
console.log('client does not exist!');
return;
}
if (!connectedClients[toSend.id].checkSessionKeyValidity()) {
console.log('session key expired!');
return;
}
try {
connectedClients[toSend.id].send(toSend.data);
}
catch (err) {
console.log('error while sending to client#' + toSend.id + ': ' + err.message);
console.log('removing this client from the list...');
connectedClients[toSend.id] = null;
}
}
else {
var securePublish = null;
for (var i = 0; i < connectedClients.length; i++) {
if (connectedClients[i] == null) {
continue;
}
// for shared key publish
if (sessionKeyCacheForClients.length > 0
&& sessionKeyCacheForClients[0].id == connectedClients[i].sessionKey.id) {
if (securePublish != null) {
connectedClients[i].sendRaw(securePublish);
}
else {
var enc = common.serializeEncryptSessionMessage(
{seqNum: publishSeqNum, data: toSend.data},
sessionKeyCacheForClients[0], entityConfig.cryptoInfo.sessionCryptoSpec);
publishSeqNum++;
securePublish = common.serializeIoTSP({
msgType: msgType.SECURE_COMM_MSG,
payload: enc
});
connectedClients[i].sendRaw(securePublish);
}
continue;
}
// for sending to all with different session keys
try{
connectedClients[i].send(toSend.data);
}
catch (err) {
console.log('error while sending to client#' + i + ': ' + err.message);
console.log('removing this client from the list...');
connectedClients[i] = null;
}
}
}
}
//////// Main interfaces
SecureCommServer.prototype.initialize = function() {
if (entityConfig.entityInfo.usePermanentDistKey) {
currentDistributionKey = entityConfig.entityInfo.permanentDistKey;
}
else {
currentDistributionKey = null;
}
outputs = {
connection: null,
error: null,
listening: null,
received: null,
receivedID: null
};
outputHandlers = {
connection: null,
error: null,
listening: null,
received: null // this also outputs receivedID for simplicity, i.e., received = {data: buffer, id: int}
};
publishSeqNum = 0; // for experiments with shared key and individual secure connections
console.log('initializing secure comm server...');
var options = {
serverPort: entityConfig.listeningServerInfo.port,
sessionCryptoSpec: entityConfig.cryptoInfo.sessionCryptoSpec,
sessionProtocol: entityConfig.entityInfo.distProtocol,
handshakeTimeout: entityConfig.entityInfo.connectionTimeout
};
var eventHandlers = {
onServerError: onServerError, // for server
onServerListening: onServerListening,
onClientRequest: onClientRequest, // for client's communication initialization request
onClose: onClose, // for individual sockets
onError: onError,
onData: onData,
onConnection: onConnection
};
iotAuth.initializeSecureServer(options, eventHandlers);
}
SecureCommServer.prototype.provideInput = function(port, input) {
if (port == 'toSend') {
toSendInputHandler(input);
}
}
SecureCommServer.prototype.setParameter = function(key, value) {
parameters[key] = value;
console.log('current parameters: ' + util.inspect(parameters));
}
SecureCommServer.prototype.latestOutput = function(key) {
return outputs[key];
}
SecureCommServer.prototype.setOutputHandler = function(key, handler) {
return outputHandlers[key] = handler;
}
//////// Supportive interfaces
SecureCommServer.prototype.getEntityInfo = function() {
return entityConfig.entityInfo;
}
SecureCommServer.prototype.getSessionKeysForFutureClients = function(numKeys) {
// specify auth ID as a value
sendSessionKeyRequest({cachedKeys: 101}, numKeys, {targetSessionKeyCache: 'Clients'});
}
SecureCommServer.prototype.getSessionKeysForPublish = function(numKeys) {
sendSessionKeyRequest({pubTopic: 'Ptopic'}, numKeys, {targetSessionKeyCache: 'Clients'});
}
SecureCommServer.prototype.showKeys = function() {
var result = '';
result += 'distribution key: '+ util.inspect(currentDistributionKey) + '\n';
result += 'Session keys for Clients: \n';
result += util.inspect(sessionKeyCacheForClients) + '\n';
return result;
}
SecureCommServer.prototype.showSocket = function() {
var result = '';
result += 'showSocket command. current client sockets [client count: ' + connectedClients.length + ']: \n';
for (var i = 0; i < connectedClients.length; i++) {
result += 'socket ' + i + ': ' + util.inspect(connectedClients[i]) + '\n';
result += 'socket sessionKey:' + util.inspect(connectedClients[i].sessionKey) + '\n\n';
}
return result;
}
SecureCommServer.prototype.migrateToTrustedAuth = function() {
sendMigrationRequest();
}
SecureCommServer.prototype.setEntityInfo = function(key, value) {
entityConfig.entityInfo[key] = value;
console.log('current entityInfo: ' + util.inspect(entityConfig.entityInfo));
}
module.exports = SecureCommServer;
|
nowkoai/test | app/services/bulk_imports/lfs_objects_export_service.rb | # frozen_string_literal: true
module BulkImports
class LfsObjectsExportService
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
def initialize(portable, export_path)
@portable = portable
@export_path = export_path
@lfs_json = {}
end
def execute
portable.lfs_objects.find_in_batches(batch_size: BATCH_SIZE) do |batch| # rubocop: disable CodeReuse/ActiveRecord
batch.each do |lfs_object|
save_lfs_object(lfs_object)
end
append_lfs_json_for_batch(batch)
end
write_lfs_json
end
private
attr_reader :portable, :export_path, :lfs_json
def save_lfs_object(lfs_object)
destination_filepath = File.join(export_path, lfs_object.oid)
if lfs_object.local_store?
copy_files(lfs_object.file.path, destination_filepath)
else
download(lfs_object.file.url, destination_filepath)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def append_lfs_json_for_batch(lfs_objects_batch)
lfs_objects_projects = LfsObjectsProject
.select('lfs_objects.oid, array_agg(distinct lfs_objects_projects.repository_type) as repository_types')
.joins(:lfs_object)
.where(project: portable, lfs_object: lfs_objects_batch)
.group('lfs_objects.oid')
lfs_objects_projects.each do |group|
oid = group.oid
lfs_json[oid] ||= []
lfs_json[oid] += group.repository_types
end
end
# rubocop: enable CodeReuse/ActiveRecord
def write_lfs_json
filepath = File.join(export_path, "#{BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION}.json")
File.write(filepath, lfs_json.to_json)
end
end
end
|
ldqcarbon/RTK | code/rtkFourDSARTConeBeamReconstructionFilter.hxx | <filename>code/rtkFourDSARTConeBeamReconstructionFilter.hxx
/*=========================================================================
*
* Copyright RTK Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef __rtkFourDSARTConeBeamReconstructionFilter_hxx
#define __rtkFourDSARTConeBeamReconstructionFilter_hxx
#include "rtkFourDSARTConeBeamReconstructionFilter.h"
#include <algorithm>
#include <itkTimeProbe.h>
namespace rtk
{
template<class VolumeSeriesType, class ProjectionStackType>
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::FourDSARTConeBeamReconstructionFilter()
{
this->SetNumberOfRequiredInputs(2);
// Set default parameters
m_EnforcePositivity = false;
m_NumberOfIterations = 3;
m_Lambda = 0.3;
m_ProjectionsOrderInitialized = false;
// Create each filter of the composite filter
m_ExtractFilter = ExtractFilterType::New();
m_ZeroMultiplyFilter = MultiplyFilterType::New();
m_SubtractFilter = SubtractFilterType::New();
m_AddFilter = AddFilterType::New();
m_AddFilter2 = AddFilterType::New();
m_MultiplyFilter = MultiplyFilterType::New();
m_ConstantVolumeSeriesSource = ConstantVolumeSeriesSourceType::New();
m_FourDToProjectionStackFilter = FourDToProjectionStackFilterType::New();
m_ProjectionStackToFourDFilter = ProjectionStackToFourDFilterType::New();
// Create the filters required for correct weighting of the difference
// projection
m_ExtractFilterRayBox = ExtractFilterType::New();
m_RayBoxFilter = RayBoxIntersectionFilterType::New();
m_DivideFilter = DivideFilterType::New();
m_ConstantProjectionStackSource = ConstantProjectionStackSourceType::New();
// Create the filter that enforces positivity
m_ThresholdFilter = ThresholdFilterType::New();
//Permanent internal connections
m_ZeroMultiplyFilter->SetInput1( itk::NumericTraits<typename InputImageType::PixelType>::ZeroValue() );
m_ZeroMultiplyFilter->SetInput2( m_ExtractFilter->GetOutput() );
m_MultiplyFilter->SetInput1( itk::NumericTraits<typename InputImageType::PixelType>::ZeroValue() );
m_MultiplyFilter->SetInput2( m_SubtractFilter->GetOutput() );
m_ExtractFilterRayBox->SetInput(m_ConstantProjectionStackSource->GetOutput());
m_RayBoxFilter->SetInput(m_ExtractFilterRayBox->GetOutput());
m_DivideFilter->SetInput1(m_MultiplyFilter->GetOutput());
m_DivideFilter->SetInput2(m_RayBoxFilter->GetOutput());
// Default parameters
m_ExtractFilter->SetDirectionCollapseToSubmatrix();
m_ExtractFilterRayBox->SetDirectionCollapseToSubmatrix();
m_NumberOfProjectionsPerSubset = 1; //Default is the SART behavior
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::SetInputVolumeSeries(const VolumeSeriesType* VolumeSeries)
{
this->SetNthInput(0, const_cast<VolumeSeriesType*>(VolumeSeries));
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::SetInputProjectionStack(const ProjectionStackType* Projection)
{
this->SetNthInput(1, const_cast<ProjectionStackType*>(Projection));
}
template<class VolumeSeriesType, class ProjectionStackType>
typename VolumeSeriesType::ConstPointer
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::GetInputVolumeSeries()
{
return static_cast< const VolumeSeriesType * >
( this->itk::ProcessObject::GetInput(0) );
}
template<class VolumeSeriesType, class ProjectionStackType>
typename ProjectionStackType::Pointer
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::GetInputProjectionStack()
{
return static_cast< ProjectionStackType * >
( this->itk::ProcessObject::GetInput(1) );
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::SetForwardProjectionFilter (int _arg)
{
if( _arg != this->GetForwardProjectionFilter() )
{
Superclass::SetForwardProjectionFilter( _arg );
m_ForwardProjectionFilter = this->InstantiateForwardProjectionFilter( _arg );
m_FourDToProjectionStackFilter->SetForwardProjectionFilter( m_ForwardProjectionFilter );
}
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::SetBackProjectionFilter (int _arg)
{
if( _arg != this->GetBackProjectionFilter() )
{
Superclass::SetBackProjectionFilter( _arg );
m_BackProjectionFilter = this->InstantiateBackProjectionFilter( _arg );
m_ProjectionStackToFourDFilter->SetBackProjectionFilter( m_BackProjectionFilter );
}
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::SetWeights(const itk::Array2D<float> _arg)
{
m_ProjectionStackToFourDFilter->SetWeights(_arg);
m_FourDToProjectionStackFilter->SetWeights(_arg);
this->Modified();
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::GenerateInputRequestedRegion()
{
typename Superclass::InputImagePointer inputPtr =
const_cast< VolumeSeriesType * >( this->GetInput() );
if ( !inputPtr )
return;
if(m_EnforcePositivity)
{
m_ThresholdFilter->GetOutput()->SetRequestedRegion(this->GetOutput()->GetRequestedRegion() );
m_ThresholdFilter->GetOutput()->PropagateRequestedRegion();
}
else
{
m_AddFilter2->GetOutput()->SetRequestedRegion(this->GetOutput()->GetRequestedRegion() );
m_AddFilter2->GetOutput()->PropagateRequestedRegion();
}
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::GenerateOutputInformation()
{
const unsigned int Dimension = ProjectionStackType::ImageDimension;
unsigned int numberOfProjections = this->GetInputProjectionStack()->GetLargestPossibleRegion().GetSize(Dimension-1);
if(!m_ProjectionsOrderInitialized)
{
// Fill and shuffle randomly the projection order.
// Should be tunable with other solutions.
m_ProjectionsOrder.clear();
for(unsigned int i = 0; i < numberOfProjections; i++)
{
m_ProjectionsOrder.push_back(i);
}
std::random_shuffle( m_ProjectionsOrder.begin(), m_ProjectionsOrder.end() );
m_ProjectionsOrderInitialized = true;
}
// We only set the first sub-stack at that point, the rest will be
// requested in the GenerateData function
typename ProjectionStackType::RegionType projRegion;
projRegion = this->GetInputProjectionStack()->GetLargestPossibleRegion();
projRegion.SetSize(Dimension-1,1);
projRegion.SetIndex(Dimension-1,m_ProjectionsOrder[0]);
m_ExtractFilter->SetExtractionRegion(projRegion);
m_ExtractFilterRayBox->SetExtractionRegion(projRegion);
// Links with the forward and back projection filters should be set here
// and not in the constructor, as these filters are set at runtime
m_ConstantVolumeSeriesSource->SetInformationFromImage(const_cast<VolumeSeriesType *>(this->GetInput(0)));
m_ConstantVolumeSeriesSource->SetConstant(0);
m_ConstantVolumeSeriesSource->UpdateOutputInformation();
m_ProjectionStackToFourDFilter->SetInputVolumeSeries( this->GetInputVolumeSeries() );
m_ProjectionStackToFourDFilter->SetInputProjectionStack( m_DivideFilter->GetOutput() );
m_AddFilter->SetInput1(m_ProjectionStackToFourDFilter->GetOutput());
m_AddFilter->SetInput2(m_ConstantVolumeSeriesSource->GetOutput());
m_AddFilter2->SetInput1(m_AddFilter->GetOutput());
m_AddFilter2->SetInput2(this->GetInputVolumeSeries());
m_ExtractFilter->SetInput( this->GetInputProjectionStack() );
m_FourDToProjectionStackFilter->SetInputProjectionStack( m_ZeroMultiplyFilter->GetOutput() );
m_FourDToProjectionStackFilter->SetInputVolumeSeries( this->GetInputVolumeSeries() );
m_SubtractFilter->SetInput2(m_FourDToProjectionStackFilter->GetOutput() );
m_SubtractFilter->SetInput1(m_ExtractFilter->GetOutput() );
// For the same reason, set geometry now
// Check and set geometry
if(this->GetGeometry().GetPointer() == NULL)
{
itkGenericExceptionMacro(<< "The geometry of the reconstruction has not been set");
}
m_FourDToProjectionStackFilter->SetGeometry(this->m_Geometry);
m_ProjectionStackToFourDFilter->SetGeometry(this->m_Geometry.GetPointer());
m_ConstantProjectionStackSource->SetInformationFromImage(const_cast<ProjectionStackType *>(this->GetInputProjectionStack().GetPointer()));
m_ConstantProjectionStackSource->SetConstant(0);
m_ConstantProjectionStackSource->UpdateOutputInformation();
// Create the m_RayBoxFiltersectionImageFilter
m_RayBoxFilter->SetGeometry(this->GetGeometry().GetPointer());
itk::Vector<double, 3> Corner1, Corner2;
Corner1[0] = this->GetInput(0)->GetOrigin()[0];
Corner1[1] = this->GetInput(0)->GetOrigin()[1];
Corner1[2] = this->GetInput(0)->GetOrigin()[2];
Corner2[0] = this->GetInput(0)->GetOrigin()[0] + this->GetInput(0)->GetLargestPossibleRegion().GetSize()[0] * this->GetInput(0)->GetSpacing()[0];
Corner2[1] = this->GetInput(0)->GetOrigin()[1] + this->GetInput(0)->GetLargestPossibleRegion().GetSize()[1] * this->GetInput(0)->GetSpacing()[1];
Corner2[2] = this->GetInput(0)->GetOrigin()[2] + this->GetInput(0)->GetLargestPossibleRegion().GetSize()[2] * this->GetInput(0)->GetSpacing()[2];
m_RayBoxFilter->SetBoxMin(Corner1);
m_RayBoxFilter->SetBoxMax(Corner2);
m_RayBoxFilter->UpdateOutputInformation();
m_ExtractFilter->UpdateOutputInformation();
m_ZeroMultiplyFilter->UpdateOutputInformation();
m_FourDToProjectionStackFilter->UpdateOutputInformation();
m_DivideFilter->UpdateOutputInformation();
if(m_EnforcePositivity)
{
m_ThresholdFilter->SetOutsideValue(0);
m_ThresholdFilter->ThresholdBelow(0);
m_ThresholdFilter->SetInput(m_AddFilter2->GetOutput() );
// Update output information
m_ThresholdFilter->UpdateOutputInformation();
this->GetOutput()->SetOrigin( m_ThresholdFilter->GetOutput()->GetOrigin() );
this->GetOutput()->SetSpacing( m_ThresholdFilter->GetOutput()->GetSpacing() );
this->GetOutput()->SetDirection( m_ThresholdFilter->GetOutput()->GetDirection() );
this->GetOutput()->SetLargestPossibleRegion( m_ThresholdFilter->GetOutput()->GetLargestPossibleRegion() );
}
else
{
// Update output information
m_AddFilter2->UpdateOutputInformation();
this->GetOutput()->SetOrigin( m_AddFilter2->GetOutput()->GetOrigin() );
this->GetOutput()->SetSpacing( m_AddFilter2->GetOutput()->GetSpacing() );
this->GetOutput()->SetDirection( m_AddFilter2->GetOutput()->GetDirection() );
this->GetOutput()->SetLargestPossibleRegion( m_AddFilter2->GetOutput()->GetLargestPossibleRegion() );
}
// Set memory management flags
m_ZeroMultiplyFilter->ReleaseDataFlagOn();
m_FourDToProjectionStackFilter->ReleaseDataFlagOn();
m_SubtractFilter->ReleaseDataFlagOn();
m_MultiplyFilter->ReleaseDataFlagOn();
m_RayBoxFilter->ReleaseDataFlagOn();
m_DivideFilter->ReleaseDataFlagOn();
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::GenerateData()
{
const unsigned int Dimension = ProjectionStackType::ImageDimension;
typename ProjectionStackType::RegionType subsetRegion;
subsetRegion = this->GetInputProjectionStack()->GetLargestPossibleRegion();
unsigned int nProj = subsetRegion.GetSize(Dimension-1);
subsetRegion.SetSize(Dimension-1, 1);
m_MultiplyFilter->SetInput1( (const float) m_Lambda/(double)m_NumberOfProjectionsPerSubset );
// Create the zero projection stack used as input by RayBoxIntersectionFilter
m_ConstantProjectionStackSource->Update();
// Declare the image used in the main loop
typename VolumeSeriesType::Pointer pimg;
typename VolumeSeriesType::Pointer pimg2;
// For each iteration, go over each projection
for(unsigned int iter = 0; iter < m_NumberOfIterations; iter++)
{
unsigned int projectionsProcessedInSubset = 0;
for(unsigned int i = 0; i < nProj; i++)
{
// When we reach the number of projections per subset:
// - plug the output of the pipeline back into the Forward projection filter
// - set the input of the Back projection filter to zero
// - reset the projectionsProcessedInSubset to zero
if (projectionsProcessedInSubset == m_NumberOfProjectionsPerSubset)
{
if (m_EnforcePositivity)
pimg2 = m_ThresholdFilter->GetOutput();
else
pimg2 = m_AddFilter2->GetOutput();
pimg2->DisconnectPipeline();
m_FourDToProjectionStackFilter->SetInputVolumeSeries( pimg2 );
m_AddFilter2->SetInput2( pimg2 );
m_AddFilter->SetInput2(m_ConstantVolumeSeriesSource->GetOutput());
projectionsProcessedInSubset = 0;
}
// Otherwise, just plug the output of the add filter
// back as its input
else
{
if (i)
{
pimg = m_AddFilter->GetOutput();
pimg->DisconnectPipeline();
m_AddFilter->SetInput2(pimg);
}
else
{
m_AddFilter->SetInput2(m_ConstantVolumeSeriesSource->GetOutput());
}
}
// Change projection subset
subsetRegion.SetIndex( Dimension-1, m_ProjectionsOrder[i] );
m_ExtractFilter->SetExtractionRegion(subsetRegion);
m_ExtractFilterRayBox->SetExtractionRegion(subsetRegion);
// This is required to reset the full pipeline
m_ProjectionStackToFourDFilter->GetOutput()->UpdateOutputInformation();
m_ProjectionStackToFourDFilter->GetOutput()->PropagateRequestedRegion();
m_ExtractProbe.Start();
m_ExtractFilter->Update();
m_ExtractFilterRayBox->Update();
m_ExtractProbe.Stop();
m_ZeroMultiplyProbe.Start();
m_ZeroMultiplyFilter->Update();
m_ZeroMultiplyProbe.Stop();
m_ForwardProjectionProbe.Start();
m_FourDToProjectionStackFilter->Update();
m_ForwardProjectionProbe.Stop();
m_SubtractProbe.Start();
m_SubtractFilter->Update();
m_SubtractProbe.Stop();
m_MultiplyProbe.Start();
m_MultiplyFilter->Update();
m_MultiplyProbe.Stop();
m_RayBoxProbe.Start();
m_RayBoxFilter->Update();
m_RayBoxProbe.Stop();
m_DivideProbe.Start();
m_DivideFilter->Update();
m_DivideProbe.Stop();
m_BackProjectionProbe.Start();
m_ProjectionStackToFourDFilter->Update();
m_BackProjectionProbe.Stop();
m_AddProbe.Start();
m_AddFilter->Update();
m_AddProbe.Stop();
projectionsProcessedInSubset++;
if ((projectionsProcessedInSubset == m_NumberOfProjectionsPerSubset) || (i == nProj - 1))
{
m_AddProbe.Start();
m_AddFilter2->SetInput1(m_AddFilter->GetOutput());
m_AddFilter2->Update();
m_AddProbe.Stop();
if (m_EnforcePositivity)
{
m_ThresholdProbe.Start();
m_ThresholdFilter->Update();
m_ThresholdProbe.Stop();
}
}
}
}
if (m_EnforcePositivity)
{
this->GraftOutput( m_ThresholdFilter->GetOutput() );
}
else
{
this->GraftOutput( m_AddFilter2->GetOutput() );
}
}
template<class VolumeSeriesType, class ProjectionStackType>
void
FourDSARTConeBeamReconstructionFilter<VolumeSeriesType, ProjectionStackType>
::PrintTiming(std::ostream & os) const
{
os << "FourDSARTConeBeamReconstructionFilter timing:" << std::endl;
os << " Extraction of projection sub-stacks: " << m_ExtractProbe.GetTotal()
<< ' ' << m_ExtractProbe.GetUnit() << std::endl;
os << " Multiplication by zero: " << m_ZeroMultiplyProbe.GetTotal()
<< ' ' << m_ZeroMultiplyProbe.GetUnit() << std::endl;
os << " Forward projection: " << m_ForwardProjectionProbe.GetTotal()
<< ' ' << m_ForwardProjectionProbe.GetUnit() << std::endl;
os << " Subtraction: " << m_SubtractProbe.GetTotal()
<< ' ' << m_SubtractProbe.GetUnit() << std::endl;
os << " Multiplication by lambda: " << m_MultiplyProbe.GetTotal()
<< ' ' << m_MultiplyProbe.GetUnit() << std::endl;
os << " Ray box intersection: " << m_RayBoxProbe.GetTotal()
<< ' ' << m_RayBoxProbe.GetUnit() << std::endl;
os << " Division: " << m_DivideProbe.GetTotal()
<< ' ' << m_DivideProbe.GetUnit() << std::endl;
os << " Back projection: " << m_BackProjectionProbe.GetTotal()
<< ' ' << m_BackProjectionProbe.GetUnit() << std::endl;
os << " Volume update: " << m_AddProbe.GetTotal()
<< ' ' << m_AddProbe.GetUnit() << std::endl;
if (m_EnforcePositivity)
{
os << " Positivity enforcement: " << m_ThresholdProbe.GetTotal()
<< ' ' << m_ThresholdProbe.GetUnit() << std::endl;
}
}
} // end namespace rtk
#endif // __rtkFourDSARTConeBeamReconstructionFilter_hxx
|
wuweiweiwu/babel | packages/babel-parser/test/fixtures/flow/classes/good_01/input.js | <reponame>wuweiweiwu/babel
class C { field:*=null }
|
BrUnOXaVIeRLeiTE/Unreal-Magic-Nodes-CS | Plugins/MagicNodeSharp/Source/MagicNodeSharpEditor/Private/CS_Toolkit.cpp | <reponame>BrUnOXaVIeRLeiTE/Unreal-Magic-Nodes-CS
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////
/// Copyright 2021 (C) <NAME>
//////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include "CS_Toolkit.h"
#include "CS_EditorStyle.h"
#include "CS_SourceTreeView.h"
#include "KCS_MonoAnalyzer.h"
#include "IMagicNodeSharpKismet.h"
#include "IMagicNodeSharpEditor.h"
#include "MagicNodeSharpEditor_Shared.h"
#include "Runtime/Core/Public/Misc/DateTime.h"
#include "Runtime/SlateCore/Public/Widgets/SOverlay.h"
#include "Runtime/CoreUObject/Public/UObject/Package.h"
#include "Runtime/Slate/Public/Widgets/Notifications/SNotificationList.h"
#include "EditorReimportHandler.h"
#include "Editor/UnrealEd/Public/FileHelpers.h"
#include "Editor/UnrealEd/Public/SourceCodeNavigation.h"
#include "Editor/KismetWidgets/Public/SPinTypeSelector.h"
#include "Editor/KismetWidgets/Public/SSingleObjectDetailsPanel.h"
#include "Interfaces/IPluginManager.h"
#include "GenericPlatform/GenericPlatformFile.h"
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#define LOCTEXT_NAMESPACE "Synaptech"
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// CS Widget Extensions:
class SCS_DetailsTab : public SSingleObjectDetailsPanel {
private:
TWeakPtr<FCS_Toolkit>CodeEditor;
public:
SLATE_BEGIN_ARGS(SCS_DetailsTab)
{}
SLATE_END_ARGS()
public:
virtual UObject* GetObjectToObserve() const override {
return CodeEditor.Pin()->GET();
}///
public:
void Construct(const FArguments &InArgs,TSharedPtr<FCS_Toolkit>CS_Toolkit) {
CodeEditor = CS_Toolkit;
//
SSingleObjectDetailsPanel::Construct(
SSingleObjectDetailsPanel::FArguments()
.HostCommandList(CS_Toolkit->GetToolkitCommands()).HostTabManager(CS_Toolkit->GetTabManager()), true, true
);//
}///
};
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// CS Toolkit Constructors:
FCS_Toolkit::FCS_Toolkit() {
static FDirectoryWatcherModule &DirectoryWatcherModule = FModuleManager::LoadModuleChecked<FDirectoryWatcherModule>(TEXT("DirectoryWatcher"));
WatcherHandle = FDelegateHandle();
//
UMagicNodeSharpSource::OnScriptSourceExported.AddRaw(this,&FCS_Toolkit::OnScriptExported);
UMagicNodeSharpSource::OnScriptSourceDeleted.AddRaw(this,&FCS_Toolkit::OnScriptDeleted);
FEditorDelegates::OnAssetsDeleted.AddRaw(this,&FCS_Toolkit::OnAssetDeleted);
//
DirectoryWatcherModule.Get()->RegisterDirectoryChangedCallback_Handle(
CS_SCRIPT_DIR,IDirectoryWatcher::FDirectoryChanged::CreateRaw(
this, &FCS_Toolkit::OnProjectDirectoryChanged
), WatcherHandle, IDirectoryWatcher::WatchOptions::IncludeDirectoryChanges
);//
//
//
if (FCS_Toolkit::SourceViewCount()==0) {
RefreshScriptTreeView();
}///
//
Search.Reset();
}
FCS_Toolkit::~FCS_Toolkit() {
static FDirectoryWatcherModule &DirectoryWatcherModule = FModuleManager::LoadModuleChecked<FDirectoryWatcherModule>(TEXT("DirectoryWatcher"));
DirectoryWatcherModule.Get()->UnregisterDirectoryChangedCallback_Handle(CS_SCRIPT_DIR,WatcherHandle);
//
UMagicNodeSharpSource::OnScriptSourceExported.RemoveAll(this);
UMagicNodeSharpSource::OnScriptSourceDeleted.RemoveAll(this);
//
FEditorDelegates::OnAssetsDeleted.RemoveAll(this);
//
FReimportManager::Instance()->OnPostReimport().RemoveAll(this);
FReimportManager::Instance()->OnPreReimport().RemoveAll(this);
//
GEditor->UnregisterForUndo(this);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// CS Toolkit API:
void FCS_Toolkit::INIT(const EToolkitMode::Type Mode, const TSharedPtr<IToolkitHost>&InitToolkitHost, UMagicNodeSharpSource* WithScriptSource) {
GEditor->GetEditorSubsystem<UAssetEditorSubsystem>()->CloseOtherEditors(WithScriptSource,this);
GEditor->RegisterForUndo(this); check(WithScriptSource);
//
ScriptSource = WithScriptSource;
ScriptSource->SetFlags(RF_Transactional);
//
//
const TSharedRef<FTabManager::FLayout>FCodeEditorLayout = FTabManager::NewLayout("CS_CodeEditorLayout_V0001")
->AddArea
(
FTabManager::NewPrimaryArea()
->SetOrientation(Orient_Vertical)
->Split
(
FTabManager::NewStack()
->SetHideTabWell(true)->SetSizeCoefficient(0.1f)
->AddTab(GetToolbarTabId(),ETabState::OpenedTab)
)
->Split
(
FTabManager::NewSplitter()
->SetOrientation(Orient_Horizontal)
->SetSizeCoefficient(0.7f)
->Split
(
FTabManager::NewStack()
->SetHideTabWell(true)->SetSizeCoefficient(0.15f)
->AddTab(FCodeEditorTAB::TAB_TreeView,ETabState::OpenedTab)
)
->Split
(
FTabManager::NewSplitter()
->SetOrientation(Orient_Vertical)
->SetSizeCoefficient(0.8f)
->Split
(
FTabManager::NewStack()
->SetHideTabWell(true)->SetSizeCoefficient(0.75f)
->AddTab(FCodeEditorTAB::TAB_Script,ETabState::OpenedTab)
)
)
->Split
(
FTabManager::NewSplitter()
->SetOrientation(Orient_Vertical)
->SetSizeCoefficient(0.2f)
->Split
(
FTabManager::NewStack()
->SetHideTabWell(true)->SetSizeCoefficient(1.f)
->AddTab(FCodeEditorTAB::TAB_Details,ETabState::OpenedTab)
)
)
)
->Split
(
FTabManager::NewSplitter()
->SetOrientation(Orient_Horizontal)
->SetSizeCoefficient(0.1f)
->Split
(
FTabManager::NewStack()
->SetHideTabWell(false)->SetSizeCoefficient(1.f)
->AddTab(FCodeEditorTAB::TAB_Logs,ETabState::OpenedTab)
)
)
);//
//
//
InitAssetEditor(Mode,InitToolkitHost,CS_APP,FCodeEditorLayout,true,true,ScriptSource);
//
BindCommands();
ExtendMenu();
ExtendToolbar();
RegenerateMenusAndToolbars();
//
//
RefreshScriptTreeView();
if (SourceTreeWidget.IsValid()) {
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
}///
}
void FCS_Toolkit::SET(UMagicNodeSharpSource* NewScriptSource) {
if ((NewScriptSource!=ScriptSource)&&(NewScriptSource!=nullptr)) {
UMagicNodeSharpSource* OldScriptSource = ScriptSource;
ScriptSource = NewScriptSource;
//
RemoveEditingObject(OldScriptSource);
AddEditingObject(NewScriptSource);
}///
}
UMagicNodeSharpSource* FCS_Toolkit::GET() const {
return ScriptSource;
}
UMagicNodeSharp* FCS_Toolkit::GetScriptCDO() {
if (ScriptSource==nullptr) {return nullptr;}
//
if (!Instance.IsValid()||Instance.IsStale()) {
const FName CheckedID = MakeUniqueObjectName(GetTransientPackage(),UMagicNodeSharp::StaticClass(),*ScriptSource->GetScriptName());
Instance = NewObject<UMagicNodeSharp>(GetTransientPackage(),UMagicNodeSharp::StaticClass(),CheckedID,RF_Transient);
}///
//
return Instance.Get();
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::RegisterTabSpawners(const TSharedRef<FTabManager>&TABManager) {
WorkspaceMenuCategory = TABManager->AddLocalWorkspaceMenuCategory(LOCTEXT("CS_MagicNode_Workspace","Code Editor"));
auto Workspace = WorkspaceMenuCategory.ToSharedRef();
FAssetEditorToolkit::RegisterTabSpawners(TABManager);
//
TABManager->RegisterTabSpawner(FCodeEditorTAB::TAB_TreeView,FOnSpawnTab::CreateSP(this,&FCS_Toolkit::TABSpawn_TreeView))
.SetDisplayName(LOCTEXT("CS_Types_TabName","Source")).SetGroup(Workspace);
//
TABManager->RegisterTabSpawner(FCodeEditorTAB::TAB_Script,FOnSpawnTab::CreateSP(this,&FCS_Toolkit::TABSpawn_Script))
.SetDisplayName(LOCTEXT("CS_Script_TabName","Script")).SetGroup(Workspace);
//
TABManager->RegisterTabSpawner(FCodeEditorTAB::TAB_Details,FOnSpawnTab::CreateSP(this,&FCS_Toolkit::TABSpawn_Details))
.SetDisplayName(LOCTEXT("CS_Details_TabName","Details")).SetGroup(Workspace);
//
TABManager->RegisterTabSpawner(FCodeEditorTAB::TAB_Logs,FOnSpawnTab::CreateSP(this,&FCS_Toolkit::TABSpawn_Logs))
.SetDisplayName(LOCTEXT("CS_Logs_TabName","Results")).SetGroup(Workspace);
//
CS_TabManager = TABManager;
}
void FCS_Toolkit::UnregisterTabSpawners(const TSharedRef<FTabManager>&TABManager) {
FAssetEditorToolkit::UnregisterTabSpawners(TABManager);
//
TABManager->UnregisterTabSpawner(FCodeEditorTAB::TAB_TreeView);
TABManager->UnregisterTabSpawner(FCodeEditorTAB::TAB_Details);
TABManager->UnregisterTabSpawner(FCodeEditorTAB::TAB_Script);
TABManager->UnregisterTabSpawner(FCodeEditorTAB::TAB_Logs);
}
TSharedRef<SDockTab>FCS_Toolkit::TABSpawn_Script(const FSpawnTabArgs &Args) {
SAssignNew(CS_CodeEditor,SCS_CodeEditorCore,ScriptSource);
//
const auto Label = FText(LOCTEXT("CS_Script.Watermark","C#"));
//
return SNew(SDockTab)
.Label(LOCTEXT("CS_ScriptTitle","Script"))
.IsEnabled(this,&FCS_Toolkit::CanCompileScript)
.Icon(FMagicNodeSharpEditorStyle::Get().Get()->GetBrush("SourceView.Script"))
[
SNew(SOverlay)
+SOverlay::Slot()
.HAlign(HAlign_Fill)
.VAlign(VAlign_Fill)
[
CS_CodeEditor.ToSharedRef()
]
+SOverlay::Slot()
.HAlign(HAlign_Left)
.VAlign(VAlign_Bottom)
.Padding(4,4,4,30)
[
SNew(SVerticalBox)
+SVerticalBox::Slot().AutoHeight()
[
SNew(SBox)
.VAlign(VAlign_Fill)
.HAlign(HAlign_Fill)
[
SAssignNew(ScriptNotify,SNotificationList)
.Visibility(EVisibility::SelfHitTestInvisible)
]
]
]
+SOverlay::Slot()
.HAlign(HAlign_Right)
.VAlign(VAlign_Bottom)
.Padding(4,4,10,20)
[
SNew(STextBlock).Text(Label)
.Visibility(EVisibility::HitTestInvisible)
.TextStyle(FEditorStyle::Get(),"Graph.CornerText")
]
];
}
TSharedRef<SDockTab>FCS_Toolkit::TABSpawn_Details(const FSpawnTabArgs &Args) {
TSharedPtr<FCS_Toolkit>FCodeEditor = SharedThis(this);
//
return SNew(SDockTab)
.Label(LOCTEXT("CS_DetailsTitle","Details"))
.Icon(FEditorStyle::GetBrush("LevelEditor.Tabs.Details"))
[
SNew(SCS_DetailsTab,FCodeEditor)
];
}
TSharedRef<SDockTab>FCS_Toolkit::TABSpawn_Logs(const FSpawnTabArgs &Args) {
return SNew(SDockTab).Label(LOCTEXT("CS_LogsTitle","Results"))
.Icon(FEditorStyle::GetBrush(TEXT("LevelEditor.Tabs.StatsViewer")))
[
SNew(SBorder)
.BorderImage(FEditorStyle::GetBrush("Menu.Background"))
[
SAssignNew(CompilerResultsWidget,SListView<TSharedPtr<FCompilerResults>>)
.OnMouseButtonDoubleClick(this,&FCS_Toolkit::OnClickedCompilerResultItem)
.OnGenerateRow(this,&FCS_Toolkit::OnGenerateCompilerResultRow)
.SelectionMode(ESelectionMode::Single)
.ListItemsSource(&CompilerResults)
]
];
}
TSharedRef<SDockTab>FCS_Toolkit::TABSpawn_TreeView(const FSpawnTabArgs &Args) {
TSharedPtr<FCS_Toolkit>FCodeEditor = SharedThis(this);
Search = MakeShared<FString>(TEXT(""));
//
//
SAssignNew(SourceTreeWidget,STreeView<TSharedPtr<FSourceTreeNode>>)
.OnMouseButtonDoubleClick(this,&FCS_Toolkit::OnClickedSourceViewItem)
///.OnContextMenuOpening(this,&FCS_Toolkit::OnGetSourceViewContextMenu)
.OnSelectionChanged(this,&FCS_Toolkit::OnSelectedSourceViewItem)
.OnExpansionChanged(this,&FCS_Toolkit::OnExpansionChanged)
.OnGenerateRow(this,&FCS_Toolkit::OnGenerateSourceViewRow)
.OnGetChildren(this,&FCS_Toolkit::OnGetSourceViewChildren)
.SelectionMode(ESelectionMode::Single)
.TreeItemsSource(&ScriptSourcePaths);
//
RefreshScriptTreeView();
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
//
//
return SNew(SDockTab)
.Label(LOCTEXT("CS_TreeViewTitle","Source"))
.Icon(FEditorStyle::GetBrush("LevelEditor.Tabs.Details"))
[
SNew(SVerticalBox)
+SVerticalBox::Slot()
.AutoHeight().Padding(0,2,0,2)
.VAlign(VAlign_Top).HAlign(HAlign_Fill)
[
SAssignNew(SourceViewSearchBox,SSearchBox)
.OnTextCommitted(this,&FCS_Toolkit::OnSearchCommitted)
.OnTextChanged(this,&FCS_Toolkit::OnSearchChanged)
.SelectAllTextWhenFocused(true)
]
+SVerticalBox::Slot()
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
[
SNew(SHorizontalBox)
+SHorizontalBox::Slot()
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
[
SNew(SBorder)
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
.Visibility(this,&FCS_Toolkit::GetSourceTreeViewVisibility)
[
SourceTreeWidget.ToSharedRef()
]
]
+SHorizontalBox::Slot()
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
[
SNew(SBorder)
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
.Visibility(this,&FCS_Toolkit::GetSourceTreeSearchVisibility)
[
SAssignNew(SourceSearchWidget,STreeView<TSharedPtr<FSourceTreeNode>>)
.OnMouseButtonDoubleClick(this,&FCS_Toolkit::OnClickedSourceViewItem)
.OnGenerateRow(this,&FCS_Toolkit::OnGenerateSourceViewRow)
.OnGetChildren(this,&FCS_Toolkit::OnGetSourceViewChildren)
.SelectionMode(ESelectionMode::Single)
.TreeItemsSource(&SourceViewSearch)
]
]
]
];//
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
TSharedRef<ITableRow>FCS_Toolkit::OnGenerateSourceViewRow(TSharedPtr<FSourceTreeNode>InItem, const TSharedRef<STableViewBase>&OwnerTable) {
FText Tooltip;
//
TSharedPtr<SImage>Icon = SNew(SImage)
.Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("SourceView.FolderClosed"));
//
if (InItem.IsValid()) {
Tooltip = FText::FromString(InItem->FullPath);
//
if (InItem->Path.Len()>InItem->FullPath.Len()){
Tooltip=FText::FromString(InItem->Path);
}///
//
if (InItem->Path==TEXT("SCRIPTS")) {
Icon = SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("SourceView.CsApp"));
}///
//
if (InItem->Path.EndsWith(".cs")) {
Icon = SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("SourceView.Script"));
}///
//
if (InItem->Path.EndsWith(".txt")) {
Icon = SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("SourceView.Text"));
}///
//
if (InItem->Path.EndsWith(".ini")||InItem->Path.EndsWith(".uproject")||InItem->Path.EndsWith(".uplugin")) {
Icon = SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("SourceView.ConfigFile"));
}///
}///
//
//
return SNew(STableRow<TSharedPtr<FSourceTreeNode>>,OwnerTable)
[
SNew(SHorizontalBox)
+SHorizontalBox::Slot()
.Padding(1.f,0,1.f,0.f)
.HAlign(HAlign_Left)
.AutoWidth()
[
SNew(SBorder)
.Padding(FMargin(0.f))
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
.BorderImage(FEditorStyle::GetBrush("ToolPanel.DarkGroupBorder"))
[
Icon.ToSharedRef()
]
]
+SHorizontalBox::Slot()
.HAlign(HAlign_Fill)
[
SNew(SBorder)
.BorderImage(FEditorStyle::GetBrush("Menu.Background"))
[
SNew(STextBlock)
.Text(FText::FromString(InItem->Path))
.ToolTip(FSlateApplication::Get().MakeToolTip(Tooltip))
]
]
];//
}
TSharedRef<ITableRow>FCS_Toolkit::OnGenerateCompilerResultRow(TSharedPtr<FCompilerResults>InItem, const TSharedRef<STableViewBase>&OwnerTable) {
const FString Date = FDateTime::Now().GetTimeOfDay().ToString().RightChop(1).LeftChop(4);
//
const FText Label = FText::FromString(FString::Printf(TEXT("%s %s"),*Date,*InItem->ErrorMessage));
//
TSharedPtr<SImage>Icon = SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("CS.Error"));
if (InItem->Result==EMonoCompilerResult::Warning) {Icon=SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("CS.Warning"));}
if (InItem->Result==EMonoCompilerResult::Success) {Icon=SNew(SImage).Image(FMagicNodeSharpEditorStyle::Get()->GetBrush("CS.Success"));}
//
return SNew(STableRow<TSharedPtr<FCompilerResults>>,OwnerTable)
[
SNew(SHorizontalBox)
+SHorizontalBox::Slot()
.Padding(1.f,0,1.f,0.f)
.HAlign(HAlign_Left)
.AutoWidth()
[
SNew(SBorder)
.Padding(FMargin(0.f))
.VAlign(VAlign_Fill).HAlign(HAlign_Fill)
.BorderImage(FEditorStyle::GetBrush("ToolPanel.DarkGroupBorder"))
[
Icon.ToSharedRef()
]
]
+SHorizontalBox::Slot()
.HAlign(HAlign_Fill)
[
SNew(SBorder)
.BorderImage(FEditorStyle::GetBrush("Menu.Background"))
[
SNew(STextBlock).Text(Label)
.ToolTip(FSlateApplication::Get().MakeToolTip(FText::FromString(InItem->ErrorMessage)))
]
]
];//
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::BindCommands() {
const FMagicNodeSharpEditorCommands &Commands = FMagicNodeSharpEditorCommands::Get();
const TSharedRef<FUICommandList>&UICommandList = GetToolkitCommands();
{
UICommandList->MapAction(Commands.Compile,
FExecuteAction::CreateSP(this,&FCS_Toolkit::CompileScript),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
//
UICommandList->MapAction(Commands.VSCode,
FExecuteAction::CreateSP(this,&FCS_Toolkit::LaunchVSCode),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
//
UICommandList->MapAction(Commands.VSGen,
FExecuteAction::CreateSP(this,&FCS_Toolkit::GenerateVSCode),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
//
UICommandList->MapAction(Commands.VSLaunch,
FExecuteAction::CreateSP(this,&FCS_Toolkit::LaunchVStudio),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
//
UICommandList->MapAction(Commands.DiffTool,
FExecuteAction::CreateSP(this,&FCS_Toolkit::LaunchSourceCodeDIFF),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
//
UICommandList->MapAction(Commands.Help,
FExecuteAction::CreateSP(this,&FCS_Toolkit::LaunchHelpWiki),
FCanExecuteAction::CreateSP(this,&FCS_Toolkit::CanCompileScript)
);//
}///
}
void FCS_Toolkit::ExtendMenu() {
////... @ToDo
}
void FCS_Toolkit::ExtendToolbar() {
struct Local {
static void FillToolbar(FToolBarBuilder &ToolbarBuilder) {
//if (FSourceCodeNavigation::IsCompilerAvailable()) {
ToolbarBuilder.BeginSection("Compile");
{
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().Compile);
}
ToolbarBuilder.EndSection();
//
ToolbarBuilder.BeginSection("Tools");
{
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().VSCode);
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().VSGen);
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().VSLaunch);
}
ToolbarBuilder.EndSection();
//
ToolbarBuilder.BeginSection("Utility");
{
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().DiffTool);
}
ToolbarBuilder.EndSection();
//
ToolbarBuilder.BeginSection("Documentation");
{
ToolbarBuilder.AddToolBarButton(FMagicNodeSharpEditorCommands::Get().Help);
}
ToolbarBuilder.EndSection();
//}///
}///
};//
//
TSharedPtr<FExtender>ToolbarExtender = MakeShareable(new FExtender);
ToolbarExtender->AddToolBarExtension(
"Asset", EExtensionHook::After, GetToolkitCommands(),
FToolBarExtensionDelegate::CreateStatic(&Local::FillToolbar)
);//
//
AddToolbarExtender(ToolbarExtender);
IMagicNodeSharpEditor* EditorModule = &FModuleManager::LoadModuleChecked<IMagicNodeSharpEditor>("MagicNodeSharpEditor");
AddToolbarExtender(EditorModule->GetMagicNodeSharpEditorToolBarExtensibilityManager()->GetAllExtenders());
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::FocusToolkitTab(const FName &Tab) {
FGlobalTabmanager::Get()->DrawAttentionToTabManager(CS_TabManager.ToSharedRef());
TSharedPtr<SDockTab>TAB = CS_TabManager->FindExistingLiveTab(Tab);
//
if (TAB.IsValid()) {CS_TabManager->DrawAttention(TAB.ToSharedRef());} else {
TAB = CS_TabManager->TryInvokeTab(Tab);
if (TAB.IsValid()) {CS_TabManager->DrawAttention(TAB.ToSharedRef());}
}///
}
void FCS_Toolkit::CloseToolkitTab(const FName &Tab) {
FGlobalTabmanager::Get()->DrawAttentionToTabManager(CS_TabManager.ToSharedRef());
TSharedPtr<SDockTab>TAB = CS_TabManager->FindExistingLiveTab(Tab);
//
if (TAB.IsValid()) {TAB->RequestCloseTab();}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::CompileScript() {
static IMagicNodeSharpKismet &MonoKismet = FMagicNodeSharpKismet::Get();
//
if (!MonoKismet.CanCompile()) {return;}
if (!CS_CodeEditor.IsValid()) {return;}
//
if (GET()==nullptr) {
LOG::CS_CHAR(ESeverity::Error,TEXT("{C#}:: Compiler unreachable, Script is invalid.")); return;
}///
//
if (CS_TabManager.IsValid()) {
FGlobalTabmanager::Get()->DrawAttentionToTabManager(CS_TabManager.ToSharedRef());
TSharedPtr<SDockTab>TAB = CS_TabManager->FindExistingLiveTab(FCodeEditorTAB::TAB_Script);
//
if (TAB.IsValid()) {CS_TabManager->DrawAttention(TAB.ToSharedRef());} else {
TSharedPtr<SDockTab>SCRIPT = CS_TabManager->TryInvokeTab(FCodeEditorTAB::TAB_Script);
if (SCRIPT.IsValid()) {CS_TabManager->DrawAttention(SCRIPT.ToSharedRef());} else {
LOG::CS_CHAR(ESeverity::Error,TEXT("Core Script Tab is unreachable or invalid. Compilation aborted."));
if (GEditor){GEditor->PlayEditorSound(TEXT("/Engine/EditorSounds/Notifications/CompileFailed_Cue.CompileFailed_Cue"));}
return;}
}///
} else {
LOG::CS_CHAR(ESeverity::Error,TEXT("Core Script Tab Manager is unreachable or invalid. Compilation aborted."));
if (GEditor) {GEditor->PlayEditorSound(TEXT("/Engine/EditorSounds/Notifications/CompileFailed_Cue.CompileFailed_Cue"));}
return;}
//
FCompilerResults Fail;
Fail.Result = EMonoCompilerResult::Error;
Fail.ErrorMessage = TEXT("{C#}:: Compiler unreachable, Script is invalid.");
//
if (!GET()->IsValidLowLevel()) {
AppendCompilerResults(Fail); return;
} else if (GET()==UMagicNodeSharpSource::StaticClass()->ClassDefaultObject) {
Fail.ErrorMessage = TEXT("{C#}:: Script is invalid (referencing Base Script Class is not allowed).");
AppendCompilerResults(Fail); return;
}///
//
if (!MonoKismet.MonoKismet_INIT()) {
Fail.ErrorMessage = TEXT("{C#}:: Compiler unreachable, Mono is busy or not properly initialized.");
AppendCompilerResults(Fail); LOG::CS_STR(ESeverity::Error,Fail.ErrorMessage); return;
}///
//
MonoKismet.CompilerResult.BindRaw(this,&FCS_Toolkit::OnCompilationFinished);
MonoKismet.CompileNode(GET(),GetScriptCDO());
}
void FCS_Toolkit::OnCompilationFinished(const UMagicNodeSharpSource* Script, const FCompilerResults Results) {
static IMagicNodeSharpKismet &MonoKismet = FMagicNodeSharpKismet::Get();
//
if (Script==nullptr) {return;}
if (Script->HasAnyFlags(RF_ClassDefaultObject|RF_ArchetypeObject|RF_BeginDestroyed)) {return;}
{
if (!Results.ErrorMessage.IsEmpty()) {
FNotificationInfo NInfo = FNotificationInfo(FText::FromString(Results.ErrorMessage));
NInfo.Image = FEditorStyle::GetBrush(TEXT("NotificationList.DefaultMessage"));
NInfo.bUseSuccessFailIcons = false;
NInfo.bFireAndForget = true;
NInfo.bUseThrobber = false;
//
NInfo.ExpireDuration = (Results.ErrorMessage.Len()<=11) ? Results.ErrorMessage.Len() : 10.f;
//
if (Results.Result==EMonoCompilerResult::Error) {NInfo.Image=FEditorStyle::GetBrush(TEXT("Kismet.Status.Error"));}
if (Results.Result==EMonoCompilerResult::Warning) {NInfo.Image=FEditorStyle::GetBrush(TEXT("Kismet.Status.Warning"));}
//
if (CS_CodeEditor.IsValid()) {CS_CodeEditor->SetScriptError(Results.Result,Results.ErrorInfo);}
if ((GEditor)&&(Results.Result==EMonoCompilerResult::Error)) {GEditor->PlayEditorSound(TEXT("/Engine/EditorSounds/Notifications/CompileFailed_Cue.CompileFailed_Cue"));}
if ((GEditor)&&(Results.Result==EMonoCompilerResult::Warning)) {GEditor->PlayEditorSound(TEXT("/Engine/EditorSounds/Notifications/CompileSuccess_Cue.CompileSuccess_Cue"));}
//
AddNotification(NInfo,false);
AppendCompilerResults(Results);
} else {
FNotificationInfo NInfo = FNotificationInfo(FText::FromString(TEXT("Success")));
NInfo.Image = FEditorStyle::GetBrush(TEXT("NotificationList.SuccessImage"));
NInfo.bUseSuccessFailIcons = false;
NInfo.bFireAndForget = true;
NInfo.bUseThrobber = false;
NInfo.ExpireDuration = 2.f;
//
if (Results.Result==EMonoCompilerResult::Success) {NInfo.Image=FEditorStyle::GetBrush(TEXT("Kismet.Status.Good"));}
if (Results.Result==EMonoCompilerResult::Warning) {NInfo.Image=FEditorStyle::GetBrush(TEXT("Kismet.Status.Warning"));}
//
if (CS_CodeEditor.IsValid()) {CS_CodeEditor->SetScriptError(Results.Result,Results.ErrorInfo);}
if (GEditor) {GEditor->PlayEditorSound(TEXT("/Engine/EditorSounds/Notifications/CompileSuccess_Cue.CompileSuccess_Cue"));}
//
AddNotification(NInfo,true);
AppendCompilerResults(Results);
}///
}
//
if (Results.Result != EMonoCompilerResult::Error) {
FocusToolkitTab(FCodeEditorTAB::TAB_Logs);
//
UPackage* Package = GET()->GetOutermost();
//
if (Package->IsDirty()) {
TArray<UPackage*>PackagesToSave; PackagesToSave.Add(Package);
FEditorFileUtils::PromptForCheckoutAndSave(PackagesToSave,true,false);
}///
}///
//
//
RefreshScriptTreeView();
//
if (SourceTreeWidget.IsValid()) {
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
}///
//
//
MonoKismet.CompilerResult.Unbind();
MonoKismet.MonoKismetDomain_STOP();
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::LaunchSourceCodeEditor() {
IPlatformFile &PlatformFM = FPlatformFileManager::Get().GetPlatformFile();
FAssetRegistryModule &AssetRegistry=FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
//
TArray<FAssetData>AssetData;
FARFilter AssetFilter;
//
AssetFilter.bRecursiveClasses = true;
AssetFilter.ClassNames.Add(UMagicNodeSharpSource::StaticClass()->GetFName());
//
if (AssetRegistry.Get().GetAssets(AssetFilter,AssetData)&&(AssetData.Num()>0)) {
for (auto &Data : AssetData) {
if (UObject*CDO=Data.GetAsset()) {
LaunchTargetSource.RemoveFromEnd(CS_SCRIPT_EXT);
if (LaunchTargetSource.Equals(CDO->GetName(),ESearchCase::CaseSensitive)) {
GEditor->GetEditorSubsystem<UAssetEditorSubsystem>()->OpenEditorForAsset(CDO);
}///
}///
}///
}///
}
void FCS_Toolkit::LaunchVSCode() {
if (!ScriptSource->IsValidLowLevelFast()) {return;}
//
const FString SCN = ScriptSource->GetScriptName();
//
const FString LC = FString::Printf(TEXT(":%i:%i"),CS_CodeEditor->GetCursorOffset().GetLineIndex()+1,CS_CodeEditor->GetCursorOffset().GetOffset()+1);
const FString IURL = FPaths::Combine(TEXT("vscode://file"),CS_SCRIPT_DIR,SCN+CS_SCRIPT_EXT+LC);
//
LOG::CS_STR(ESeverity::Info,IURL);
UKismetSystemLibrary::LaunchURL(IURL);
}
void FCS_Toolkit::GenerateVSCode() {
IPlatformFile &PlatformManager = FPlatformFileManager::Get().GetPlatformFile();
if (!ScriptSource->IsValidLowLevelFast()) {return;}
//
static const FString Content = FPaths::ConvertRelativePathToFull(IPluginManager::Get().FindPlugin(CS_PLUGIN_NAME)->GetContentDir());
static const FString SGuid = TEXT("F0000000-0000-0000-0000-000000000000");
static const FString PGuid = TEXT("E0000000-0000-0000-0000-000000000000");
//
static const FString CINC = TEXT("<Compile Include=\"..\\{PROJECT}.cs\" />");
static const FString RINC = TEXT("<Reference Include=\"Microsoft.CSharp\" />");
//
const FString SCN = ScriptSource->GetScriptName();
const FString DIR = FPaths::Combine(CS_SCRIPT_DIR,TEXT("SLN_")+SCN);
//
if (!PlatformManager.DirectoryExists(*DIR)) {
PlatformManager.CreateDirectory(*DIR);
PlatformManager.CopyFile(
*FPaths::Combine(CS_SCRIPT_DIR,TEXT(".vscode"),TEXT("settings.json")),
*FPaths::Combine(Content,TEXT("Gen"),TEXT("settings.json"))
);///
}///
//
FString SLN = TEXT("\0");
FString PRJ = TEXT("\0");
FString INF = TEXT("\0");
FString SET = TEXT("\0");
//
bool Generated = true;
const bool INF_Loaded = FFileHelper::LoadFileToString(INF,*FPaths::Combine(Content,TEXT("Gen"),TEXT("INFO.gen")));
const bool PRJ_Loaded = FFileHelper::LoadFileToString(PRJ,*FPaths::Combine(Content,TEXT("Gen"),TEXT("PROJECT.gen")));
const bool SLN_Loaded = FFileHelper::LoadFileToString(SLN,*FPaths::Combine(Content,TEXT("Gen"),TEXT("SOLUTION.gen")));
const bool SET_Loaded = FFileHelper::LoadFileToString(SET,*FPaths::Combine(Content,TEXT("Gen"),TEXT("SETTINGS.gen")));
//
if (SLN_Loaded && PRJ_Loaded && INF_Loaded) {
const FString SLNG = IKCS_MonoAnalyzer::GenerateGUID(SGuid);
const FString PRJG = IKCS_MonoAnalyzer::GenerateGUID(PGuid);
const FString INFG = IKCS_MonoAnalyzer::GenerateGUID(PGuid);
//
for (const auto &INC : ScriptSource->Include) {
if (INC==nullptr||INC->GetScriptName().Equals(SCN)) {continue;}
//
const FString Include = FString::Printf(TEXT(" <Compile Include=\"..\\%s.cs\" />"),*INC->GetScriptName());
//
PRJ.ReplaceInline(*CINC,*(CINC+TEXT("\n")+Include));
}///
//
for (const auto &REF : ScriptSource->References) {
if (REF.FilePath.EndsWith(TEXT("UnrealEngine.dll"))) {continue;}
if (REF.FilePath.EndsWith(TEXT("MagicNodes.dll"))) {continue;}
//
const FString PATH = FPaths::ConvertRelativePathToFull(REF.FilePath);
const FString LIB = FPaths::GetBaseFilename(PATH);
//
const FString Reference = FString::Printf(
TEXT(" <Reference Include=\"%s, Version=1.0.0.0, Culture=neutral, processorArchitecture=AMD64\">\n <SpecificVersion>False</SpecificVersion>\n <HintPath>%s</HintPath>\n </Reference>"),
*LIB, *PATH
);///
//
PRJ.ReplaceInline(*RINC,*(RINC+TEXT("\n")+Reference));
}///
//
INF.ReplaceInline(*PGuid,*PRJG,ESearchCase::CaseSensitive);
PRJ.ReplaceInline(*PGuid,*PRJG,ESearchCase::CaseSensitive);
SLN.ReplaceInline(*PGuid,*PRJG,ESearchCase::CaseSensitive);
SLN.ReplaceInline(*SGuid,*SLNG,ESearchCase::CaseSensitive);
//
INF.ReplaceInline(TEXT("{PROJECT}"),*SCN,ESearchCase::CaseSensitive);
PRJ.ReplaceInline(TEXT("{PROJECT}"),*SCN,ESearchCase::CaseSensitive);
SLN.ReplaceInline(TEXT("{PROJECT}"),*SCN,ESearchCase::CaseSensitive);
SLN.ReplaceInline(TEXT("{SOLUTION}"),*(TEXT("SLN_")+SCN),ESearchCase::CaseSensitive);
//
Generated = (Generated && FFileHelper::SaveStringToFile(PRJ,*FPaths::Combine(DIR,SCN+TEXT(".csproj"))));
Generated = (Generated && FFileHelper::SaveStringToFile(SLN,*FPaths::Combine(DIR,TEXT("SLN_")+SCN+TEXT(".sln"))));
Generated = (Generated && FFileHelper::SaveStringToFile(INF,*FPaths::Combine(DIR,TEXT("Properties"),TEXT("AssemblyInfo.cs"))));
//
FFileHelper::SaveStringToFile(SET,*FPaths::Combine(CS_SCRIPT_DIR,TEXT(".vscode"),TEXT("settings.json")));
}///
//
if (Generated) {
LOG::CS_STR(ESeverity::Info,DIR);
//
LOG::CS_CHAR(ESeverity::Info,TEXT("--------------------------------------------------"));
LOG::CS_STR(ESeverity::Info,SLN);
LOG::CS_CHAR(ESeverity::Info,TEXT("--------------------------------------------------"));
LOG::CS_STR(ESeverity::Info,PRJ);
LOG::CS_CHAR(ESeverity::Info,TEXT("--------------------------------------------------"));
LOG::CS_STR(ESeverity::Info,INF);
LOG::CS_CHAR(ESeverity::Info,TEXT("--------------------------------------------------"));
//
static FString IURL = FPaths::Combine(TEXT("vscode://file"),CS_SCRIPT_DIR);
UKismetSystemLibrary::LaunchURL(IURL);
}///
}
void FCS_Toolkit::LaunchVStudio() {
IPlatformFile &PlatformManager = FPlatformFileManager::Get().GetPlatformFile();
if (!ScriptSource->IsValidLowLevelFast()) {return;}
//
const FString SCN = ScriptSource->GetScriptName();
const FString DIR = FPaths::Combine(CS_SCRIPT_DIR,TEXT("SLN_")+SCN);
const FString TARGET = FPaths::Combine(DIR,TEXT("SLN_")+SCN+TEXT(".sln"));
//
if (PlatformManager.FileExists(*TARGET)) {
UKismetSystemLibrary::LaunchURL(FPaths::Combine(TEXT("file:///")+TARGET));
}///
}
void FCS_Toolkit::LaunchSourceCodeDIFF() {
if (UObject*CDO=FMagicNodeSharpEditor::GetSelectedAsset()) {
if (UMagicNodeSharpSource*Source=Cast<UMagicNodeSharpSource>(CDO)) {
if (Source!=GET()) {FMagicNodeSharpEditor::DIFF_InvokeTAB(Source,GET());}
else {LOG::CS_CHAR(ESeverity::Info,TEXT("Script and selection are the same."));}
} else {LOG::CS_CHAR(ESeverity::Info,TEXT("Selection is not a script to compare."));}
} else {LOG::CS_CHAR(ESeverity::Info,TEXT("No script selected to compare sources."));}
}
void FCS_Toolkit::LaunchHelpWiki() {
UKismetSystemLibrary::LaunchURL(TEXT("https://github.com/BrUnOXaVIeRLeiTE/Unreal-Magic-Nodes/wiki"));
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::OnScriptExported(UMagicNodeSharpSource* Source) {
RefreshScriptTreeView();
//
if (SourceTreeWidget.IsValid()) {
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
}///
}
void FCS_Toolkit::OnScriptDeleted() {
RefreshScriptTreeView();
//
if (SourceTreeWidget.IsValid()) {
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
}///
}
void FCS_Toolkit::OnAssetDeleted(const TArray<UClass*>&DeletedAssetClasses) {
if (DeletedAssetClasses.Num()>0) {
RefreshScriptTreeView();
//
if (SourceTreeWidget.IsValid()) {
SourceTreeWidget->RequestTreeRefresh();
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
SourceTreeWidget->SetItemExpansion(Node,true);
}///
}///
}///
}
void FCS_Toolkit::OnProjectDirectoryChanged(const TArray<FFileChangeData>&Data) {
RefreshScriptTreeView();
}
void FCS_Toolkit::OnToolkitHostingStarted(const TSharedRef<IToolkit>&Toolkit) {}
void FCS_Toolkit::OnToolkitHostingFinished(const TSharedRef<IToolkit>&Toolkit){}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::AddNotification(FNotificationInfo &Info, bool Success) {
if (!ScriptNotify.IsValid()) {return;}
//
Info.bUseLargeFont = true;
//
TSharedPtr<SNotificationItem>SNotify=ScriptNotify->AddNotification(Info);
if (SNotify.IsValid()) {SNotify->SetCompletionState((Success) ? SNotificationItem::CS_Success : SNotificationItem::CS_Fail);}
}
void FCS_Toolkit::AppendCompilerResults(const FCompilerResults &Result) {
if (GET()==nullptr) {return;}
//
TSharedPtr<FCompilerResults>NewResult = MakeShareable(new FCompilerResults(Result));
//
if (NewResult->ErrorMessage.IsEmpty()) {
NewResult->ErrorMessage = FString::Printf(TEXT("%s: Success."),*(GET()->GetScriptName()));
}///
//
CompilerResults.Add(NewResult);
//
if (CompilerResultsWidget.IsValid()) {CompilerResultsWidget->RequestListRefresh();}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
EVisibility FCS_Toolkit::GetSourceTreeViewVisibility() const {
if (Search.IsValid()&&(Search->Len()>=2)) {
return EVisibility::Collapsed;
}///
//
return EVisibility::Visible;
}
EVisibility FCS_Toolkit::GetSourceTreeSearchVisibility() const {
if (Search.IsValid()&&(Search->Len()>=2)) {
return EVisibility::Visible;
}///
//
return EVisibility::Collapsed;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::OnClickedSourceViewItem(TSharedPtr<FSourceTreeNode>TreeItem) {
if (!TreeItem.IsValid()) {return;}
//
if (IsSourceFile(TreeItem->FullPath)) {
LaunchTargetSource = TreeItem->Path;
LaunchSourceCodeEditor();
} else {SourceTreeWidget->SetItemExpansion(TreeItem,true);}
}
void FCS_Toolkit::OnClickedCompilerResultItem(TSharedPtr<FCompilerResults>Item) {
LOG::CS_CHAR(ESeverity::Warning,TEXT("OnClickedCompilerResultItem()!"));
/// @ToDo...
}
void FCS_Toolkit::OnSelectedSourceViewItem(TSharedPtr<FSourceTreeNode>TreeItem, ESelectInfo::Type SelectInfo) {
if (!TreeItem.IsValid()) {return;}
//
if (IsSourceFile(TreeItem->FullPath)) {
LaunchTargetSource = TreeItem->Path;
}///
}
void FCS_Toolkit::OnGetSourceViewChildren(TSharedPtr<FSourceTreeNode>InItem, TArray<TSharedPtr<FSourceTreeNode>>&OutChildren) {
if (SourceViewSearch.Num()==0) {OutChildren=InItem->ChildNodes;}
}
void FCS_Toolkit::OnExpansionChanged(TSharedPtr<FSourceTreeNode>InItem, bool WasExpanded){}
void FCS_Toolkit::OnSourceViewCheckStatusChanged(ECheckBoxState NewCheckState, TSharedPtr<FSourceTreeNode>NodeChanged){}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::AddReferencedObjects(FReferenceCollector &Collector) {
Collector.AddReferencedObject(ScriptSource);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
FName FCS_Toolkit::GetToolkitFName() const {
return FName("CS_CodeEditor");
}
FText FCS_Toolkit::GetBaseToolkitName() const {
return LOCTEXT("CS_CodeEditor.Label","Magic Node Editor");
}
FText FCS_Toolkit::GetToolkitName() const {
const bool DirtyState = ScriptSource->GetOutermost()->IsDirty();
//
FFormatNamedArguments Args;
Args.Add(TEXT("McsName"),FText::FromString(ScriptSource->GetName()));
Args.Add(TEXT("DirtyState"),(DirtyState)?FText::FromString(TEXT("*")):FText::GetEmpty());
//
return FText::Format(LOCTEXT("CS_CodeEditor.Label","{McsName}{DirtyState}"),Args);
}
FText FCS_Toolkit::GetToolkitTitle() const {
FText Title = LOCTEXT("CS_CodeEditor_BaseTitle","Magic Node (C#)");
//
if (GET()==nullptr) {return Title;}
//
FString SName = GET()->GetScriptName();
FString SCaps; SCaps.AppendChar(SName[0]);
SName.ReplaceInline(TEXT("_"),TEXT(" "));
//
for (int32 I=1; I < SName.Len(); I++) {
if (FChar::IsUpper(SName[I])) {
if (SName[I-1] != TEXT(' ') && !FChar::IsUpper(SName[I-1])) {SCaps.AppendChar(TEXT(' '));}
} SCaps.AppendChar(SName[I]);
}///
//
FText Name = FText::FromString(SCaps);
FFormatNamedArguments Args; Args.Add(TEXT("Name"),Name);
//
Title = FText::Format(LOCTEXT("CS_CodeEditor_Title","{Name}"),Args);
//
return Title;
}
FText FCS_Toolkit::GetToolkitToolTipText() const {
return GetToolTipTextForObject(ScriptSource);
}
FString FCS_Toolkit::GetWorldCentricTabPrefix() const {
return TEXT("CS_CodeEditor");
}
FString FCS_Toolkit::GetDocumentationLink() const {
return TEXT("https://brunoxavierleite.wordpress.com/2019/01/16/unreal-magic-nodes-programming/");
}
FLinearColor FCS_Toolkit::GetWorldCentricTabColorScale() const {
return FLinearColor::White;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
bool FCS_Toolkit::IsCompiling() const {
static IMagicNodeSharpKismet &MonoKismet = FMagicNodeSharpKismet::Get();
//
return MonoKismet.IsCompiling();
}
bool FCS_Toolkit::CanCompileScript() const {
static IMagicNodeSharpKismet &MonoKismet = FMagicNodeSharpKismet::Get();
//
return (
MonoKismet.CanCompile() && ScriptSource &&
(GEditor==nullptr||(!GEditor->IsPlaySessionInProgress()))
);//
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::RefreshScriptTreeView() {
static FString ProjectRoot = FPaths::GameSourceDir();
static TArray<FString>ProjectSource;
//
IFileManager &FileManager = IFileManager::Get();
FileManager.FindFilesRecursive(ProjectSource,*(ProjectRoot+TEXT("Scripts")),*FString::Printf(TEXT("/*%s"),*CS_SCRIPT_EXT),true,false);
if (ProjectSource.Num()==0) {return;}
//
FString RootPath;
FString FullPath = FPaths::ConvertRelativePathToFull(ProjectSource[0]);
//
FullPath.Split(TEXT("/Scripts/"),&RootPath,nullptr);
RootPath += TEXT("/Scripts/");
//
TSharedPtr<FSourceTreeNode>RootNode = MakeShared<FSourceTreeNode>();
RootNode->Path = TEXT("SCRIPTS"); RootNode->FullPath = RootPath;
//
TSharedRef<FSourceTreeNode>OldRoot = RootNode.ToSharedRef();
for (const TSharedPtr<FSourceTreeNode>&Old : ScriptSourcePaths) {
if (Old->Path==RootNode->Path) {OldRoot=Old.ToSharedRef(); break;}
} ScriptSourcePaths.Remove(OldRoot);
//
ScriptSourcePaths.Add(RootNode);
TSharedRef<FSourceTreeNode>ParentNode = RootNode.ToSharedRef();
//
for (FString &Path : ProjectSource) {
Path = FPaths::ConvertRelativePathToFull(Path);
Path.ReplaceInline(*RootPath,TEXT(""));
}///
//
for (const FString &Path : ProjectSource) {
if (!IsSourceFile(Path)) {continue;}
if (Path.IsEmpty()) {continue;}
//
TArray<FString>Nodes;
Path.ParseIntoArray(Nodes,TEXT("/"));
FString Source = ParentNode->FullPath;
//
for (int32 I=0; I<Nodes.Num(); I++) {
const FString &Node = Nodes[I];
Source = FPaths::Combine(Source,Node);
TSharedPtr<FSourceTreeNode>TreeNode = ParentNode->FindNode(Node);
//
if (!TreeNode.IsValid()) {
TreeNode = MakeShared<FSourceTreeNode>();
TreeNode->ParentNode = ParentNode;
TreeNode->FullPath = Source;
TreeNode->Path = Node;
//
ParentNode->ChildNodes.Add(TreeNode);
} ParentNode = TreeNode.ToSharedRef();
}///
//
ParentNode = RootNode.ToSharedRef();
}///
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void FCS_Toolkit::OnSearchChanged(const FText &Filter) {
Search = MakeShared<FString>(Filter.ToString());
SourceViewSearch.Empty();
//
for (const TSharedPtr<FSourceTreeNode>&Node : ScriptSourcePaths) {
if (Node->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(Node);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N1 : Node->ChildNodes) {
if (N1->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N1);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N2 : N1->ChildNodes) {
if (N2->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N2);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N3 : N2->ChildNodes) {
if (N3->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N3);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N4 : N3->ChildNodes) {
if (N4->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N4);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N5 : N4->ChildNodes) {
if (N5->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N5);
}///
//
for (const TSharedPtr<FSourceTreeNode>&N6 : N5->ChildNodes) {
if (N6->Path.Contains(**Search.Get())) {
SourceViewSearch.Add(N6);
}///
}///
}///
}///
}///
}///
}///
}///
//
//
SourceSearchWidget->RequestListRefresh();
}
void FCS_Toolkit::OnSearchCommitted(const FText &NewText, ETextCommit::Type CommitInfo) {
if (NewText.ToString().Len()<2) {
SourceViewSearch.Empty();
}///
//
SourceSearchWidget->RequestListRefresh();
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
bool FCS_Toolkit::IsSourceFile(const FString &Path) {
return (Path.EndsWith(TEXT(".cs")));
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#undef LOCTEXT_NAMESPACE
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// |
intj-t/openvmsft | WRK-V1.2/TOOLS/crt/src/a_loc.c | <reponame>intj-t/openvmsft<gh_stars>1-10
/***
*a_loc.c - A versions of GetLocaleInfo.
*
* Copyright (c) Microsoft Corporation. All rights reserved.
*
*Purpose:
* Use either GetLocaleInfoA or GetLocaleInfoW depending on which is
* available
*
*******************************************************************************/
#include <cruntime.h>
#include <internal.h>
#include <stdlib.h>
#include <awint.h>
#include <dbgint.h>
#include <malloc.h>
#include <locale.h>
#include <mtdll.h>
#include <setlocal.h>
#define USE_W 1
#define USE_A 2
/***
*int __cdecl __crtGetLocaleInfoA - Get locale info and return it as an ASCII
* string
*
*Purpose:
* Internal support function. Assumes info in ANSI string format. Tries
* to use NLS API call GetLocaleInfoA if available (Chicago) and uses
* GetLocaleInfoA if it must (NT). If neither are available it fails and
* returns 0.
*
*Entry:
* LCID Locale - locale context for the comparison.
* LCTYPE LCType - see NT\Chicago docs
* LPSTR lpLCData - pointer to memory to return data
* int cchData - char (byte) count of buffer (including NULL)
* (if 0, lpLCData is not referenced, size needed
* is returned)
* int code_page - for MB/WC conversion. If 0, use __lc_codepage
*
*Exit:
* Success: the number of characters copied (including NULL).
* Failure: 0
*
*Exceptions:
*
*******************************************************************************/
static int __cdecl __crtGetLocaleInfoA_stat(
_locale_t plocinfo,
LCID Locale,
LCTYPE LCType,
LPSTR lpLCData,
int cchData,
int code_page
)
{
static int f_use = 0;
/*
* Look for unstubbed 'preferred' flavor. Otherwise use available flavor.
* Must actually call the function to ensure it's not a stub.
*/
if (0 == f_use)
{
if (0 != GetLocaleInfoW(0, LOCALE_ILANGUAGE, NULL, 0))
f_use = USE_W;
else if (GetLastError() == ERROR_CALL_NOT_IMPLEMENTED)
f_use = USE_A;
}
/* Use "A" version */
if (USE_A == f_use || f_use == 0)
{
return GetLocaleInfoA(Locale, LCType, lpLCData, cchData);
}
/* Use "W" version */
if (USE_W == f_use)
{
int retval = 0;
int buff_size;
wchar_t *wbuffer;
/*
* Use __lc_codepage for conversion if code_page not specified
*/
if (0 == code_page)
code_page = plocinfo->locinfo->lc_codepage;
/* find out how big buffer needs to be */
if (0 == (buff_size = GetLocaleInfoW(Locale, LCType, NULL, 0)))
return 0;
/* allocate buffer */
wbuffer = (wchar_t *)_calloca( buff_size, sizeof(wchar_t) );
if ( wbuffer == NULL ) {
return 0;
}
/* get the info in wide format */
if (0 == GetLocaleInfoW(Locale, LCType, wbuffer, buff_size))
goto error_cleanup;
/* convert from Wide Char to ANSI */
if (0 == cchData)
{
/* convert into local buffer */
retval = WideCharToMultiByte( code_page,
0,
wbuffer,
-1,
NULL,
0,
NULL,
NULL );
}
else {
/* convert into user buffer */
retval = WideCharToMultiByte( code_page,
0,
wbuffer,
-1,
lpLCData,
cchData,
NULL,
NULL );
}
error_cleanup:
_freea(wbuffer);
return retval;
}
else /* f_use is neither USE_A nor USE_W */
return 0;
}
extern "C" int __cdecl __crtGetLocaleInfoA(
_locale_t plocinfo,
LCID Locale,
LCTYPE LCType,
LPSTR lpLCData,
int cchData,
int code_page
)
{
_LocaleUpdate _loc_update(plocinfo);
return __crtGetLocaleInfoA_stat(
_loc_update.GetLocaleT(),
Locale,
LCType,
lpLCData,
cchData,
code_page
);
}
|
mibac138/byte-buddy | byte-buddy-dep/src/main/java/net/bytebuddy/description/enumeration/package-info.java | <reponame>mibac138/byte-buddy<filename>byte-buddy-dep/src/main/java/net/bytebuddy/description/enumeration/package-info.java
/**
* A package that contains classes for describing enumeration values.
*/
package net.bytebuddy.description.enumeration;
|
edude545/system0 | src/main/java/net/ethobat/system0/auxiliary/S0ArmorItem.java | package net.ethobat.system0.auxiliary;
import net.ethobat.system0.System0;
import net.ethobat.system0.registry.S0Registrar;
import net.minecraft.entity.EquipmentSlot;
import net.minecraft.item.ArmorItem;
import net.minecraft.item.ArmorMaterial;
public class S0ArmorItem extends ArmorItem implements IS0Item {
public final String NAME;
public boolean HAS_TOOLTIP = false;
public S0ArmorItem(ArmorMaterial material, EquipmentSlot slot, String registryName) {
this(material, slot, IS0Item.s(), registryName);
}
public S0ArmorItem(ArmorMaterial material, EquipmentSlot slot, Settings settings, String registryName) {
super(material, slot, settings.group(System0.ITEM_GROUP_ARMOR).maxCount(1));
this.NAME = registryName;
S0Registrar.register(this, registryName);
}
@Override
public void giveTooltip() {
this.HAS_TOOLTIP = true;
}
}
|
GrapeCity/ComponentOne-ASPNET-MVC-Samples | HowTo/FlexGrid/FilterPanel/FilterPanel/wwwroot/js/FilterPanel.js | <gh_stars>1-10
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
/**
* Extension that provides a drag and drop UI for editing
* groups in bound @see:FlexGrid controls.
*/
var wijmo;
(function (wijmo) {
var grid;
(function (grid) {
var filter;
(function (filter_1) {
'use strict';
var FilterPanel = (function (_super) {
__extends(FilterPanel, _super);
/**
* Initializes a new instance of the @see:FilterPanel class.
*/
function FilterPanel(element, options) {
var _this = _super.call(this, element) || this;
// check dependencies
var depErr = 'Missing dependency: GroupPanel requires ';
wijmo.assert(grid != null, depErr + 'wijmo.grid.');
// instantiate and apply template
// using wj-grouppanel to pick up styles
var tpl = _this.getTemplate();
_this.applyTemplate('wj-filterpanel wj-grouppanel wj-control', tpl, {
_divMarkers: 'div-markers',
_divPH: 'div-ph'
});
// click markers to delete filters
var e = _this.hostElement;
_this.addEventListener(e, 'click', _this._click.bind(_this));
_this._filterChangedBnd = _this._filterChanged.bind(_this);
// apply options
_this.initialize(options);
return _this;
}
Object.defineProperty(FilterPanel.prototype, "placeholder", {
/**
* Gets or sets a string to display in the control when it contains no groups.
*/
get: function () {
return this._divPH.textContent;
},
set: function (value) {
this._divPH.textContent = value;
},
enumerable: true,
configurable: true
});
Object.defineProperty(FilterPanel.prototype, "filter", {
/**
* Gets or sets the @see:FlexGridFilter that is connected to this @see:FilterPanel.
*/
get: function () {
return this._filter;
},
set: function (value) {
value = wijmo.asType(value, filter_1.FlexGridFilter, true);
if (value != this._filter) {
if (this._filter) {
this._filter.filterChanged.removeHandler(this._filterChangedBnd);
this._filter.filterApplied.removeHandler(this._filterChangedBnd);
}
this._filter = value;
if (this._filter) {
this._filter.filterChanged.addHandler(this._filterChangedBnd);
this._filter.filterApplied.addHandler(this._filterChangedBnd);
}
}
},
enumerable: true,
configurable: true
});
// ** overrides
/**
* Updates the panel to show the current groups.
*/
FilterPanel.prototype.refresh = function () {
_super.prototype.refresh.call(this);
// clear div/state
this._divMarkers.innerHTML = '';
// populate
if (this._filter) {
// build array of filter markers
var g = this._filter.grid, markers = [];
for (var i = 0; i < g.columns.length; i++) {
var cf = this._filter.getColumnFilter(i, false);
if (cf && cf.isActive) {
var marker = this._createFilterMarker(cf);
markers.push(marker);
}
}
// populate if we have markers
if (markers.length > 0) {
// add 'clear all filters' marker
var clearAll = this._createMarker('Clear All Filters', true);
clearAll.classList.add('wj-remove-all');
this._divMarkers.appendChild(clearAll);
// add regular markers
for (var i = 0; i < markers.length; i++) {
this._divMarkers.appendChild(markers[i]);
}
}
}
// show placeholder or markers
if (this._divMarkers.children.length > 0) {
this._divPH.style.display = 'none';
this._divMarkers.style.display = '';
}
else {
this._divPH.style.display = '';
this._divMarkers.style.display = 'none';
}
};
// ** event handlers
// remove filter on click
FilterPanel.prototype._click = function (e) {
var target = e.target;
if (target.classList.contains('wj-remove')) {
var marker = wijmo.closest(target, '.wj-filtermarker'), filter = marker ? marker['filter'] : null;
if (filter instanceof filter_1.ColumnFilter) {
filter.clear();
this._filter.apply();
}
else {
this._filter.clear();
}
}
};
// refresh markers when filter changes
FilterPanel.prototype._filterChanged = function () {
this.refresh();
};
// ** implementation
// checks whether a format represents a time (and not just a date)
FilterPanel.prototype._isTimeFormat = function (fmt) {
if (!fmt)
return false;
fmt = wijmo.culture.Globalize.calendar.patterns[fmt] || fmt;
return /[Hmst]+/.test(fmt); // TFS 109409
};
// creates a marker
FilterPanel.prototype._createMarker = function (hdr, removeButton) {
// create the marker element
var marker = document.createElement('div');
marker.className = 'wj-cell wj-header wj-groupmarker wj-filtermarker';
wijmo.setCss(marker, {
display: 'inline-block',
position: 'static',
});
// apply content
marker.textContent = hdr;
// add remove button before the text
if (removeButton) {
var btn = document.createElement('span');
btn.className = 'wj-remove';
wijmo.setCss(btn, {
fontWeight: 'bold',
cursor: 'pointer',
padding: 12,
paddingLeft: 0
});
btn.innerHTML = '×';
marker.insertBefore(btn, marker.firstChild);
}
// all done
return marker;
};
// creates a marker to represent a ColumnFilter
FilterPanel.prototype._createFilterMarker = function (cf) {
var hdr = this._getFilterHeader(cf), marker = this._createMarker(hdr, true);
marker['filter'] = cf;
return marker;
};
// gets the header to show in a ColumnFilter marker
FilterPanel.prototype._getFilterHeader = function (cf) {
if (cf.conditionFilter.isActive) {
return this._getConditionFilterHeader(cf);
}
else if (cf.valueFilter.isActive) {
return this._getValueFilterHeader(cf);
}
else {
throw '** should have at least one active filter';
}
};
// gets the header for condition filters
FilterPanel.prototype._getConditionFilterHeader = function (cf) {
var f = cf.conditionFilter, c1 = this._getConditionHeader(cf, f.condition1), c2 = this._getConditionHeader(cf, f.condition2);
if (c1 && c2) {
var culture = wijmo.culture.FlexGridFilter, andOr = f.and ? culture.and : culture.or;
return c1 + ' ' + andOr.toLowerCase() + ' ' + c2;
}
if (c1) {
return c1;
}
if (c2) {
return c2;
}
throw '** should have at least one active condition';
};
FilterPanel.prototype._getConditionHeader = function (cf, c) {
var hdr = null;
if (c.isActive) {
// get operator list based on column data type
var col = cf.column, list = wijmo.culture.FlexGridFilter.stringOperators;
if (col.dataType == wijmo.DataType.Date && !this._isTimeFormat(col.format)) {
list = wijmo.culture.FlexGridFilter.dateOperators;
}
else if (col.dataType == wijmo.DataType.Number && !col.dataMap) {
list = wijmo.culture.FlexGridFilter.numberOperators;
}
else if (col.dataType == wijmo.DataType.Boolean && !col.dataMap) {
list = wijmo.culture.FlexGridFilter.booleanOperators;
}
// get operator name
hdr = '';
for (var i = 0; i < list.length; i++) {
if (list[i].op == c.operator) {
hdr = list[i].name.toLowerCase();
break;
}
}
// add operator value
if (wijmo.isString(c.value)) {
hdr += ' "' + c.value + '"';
}
else {
hdr += ' ' + wijmo.Globalize.format(c.value, col.format);
}
}
return hdr;
};
// gets the header for value filters
FilterPanel.prototype._getValueFilterHeader = function (cf) {
var hdr = null, f = cf.valueFilter;
if (f.isActive) {
hdr = '"' + Object.keys(f.showValues).join(' & ') + '"';
}
return hdr;
};
return FilterPanel;
}(wijmo.Control));
/**
* Gets or sets the template used to instantiate @see:FilterPanel controls.
*/
FilterPanel.controlTemplate = '<div style="cursor:default;overflow:hidden;height:100%;width:100%;min-height:1em;">' +
'<div wj-part="div-ph"></div>' +
'<div wj-part="div-markers"></div>' +
'</div>';
filter_1.FilterPanel = FilterPanel;
})(filter = grid.filter || (grid.filter = {}));
})(grid = wijmo.grid || (wijmo.grid = {}));
})(wijmo || (wijmo = {}));
|
tt-arcade/fba-pi | src/intf/input/inp_keys.cpp | #include <cstddef>
#include "string.h"
#include "inp_keys.h"
struct Input {
const char *name;
const int code;
};
#define P2_JOY 0x4000
#define P3_JOY 0x4100
#define P4_JOY 0x4200
static const Input allInputs[] = {
// Generic
{ "P1 START", FBK_1, },
{ "P2 START", FBK_2, },
{ "P1 COIN", FBK_5, },
{ "P2 COIN", FBK_6, },
{ "TEST", FBK_F2, },
{ "SERVICE", FBK_9, },
{ "SELECT1", FBK_3, },
{ "SELECT2", FBK_4, },
{ "RESET", FBK_F3, },
{ "QUIT", FBK_ESCAPE, },
// Keyboard
{ "P1 UP", FBK_UPARROW, },
{ "P1 LEFT", FBK_LEFTARROW, },
{ "P1 RIGHT", FBK_RIGHTARROW, },
{ "P1 DOWN", FBK_DOWNARROW, },
{ "P1 JAB", FBK_A, },
{ "P1 STRONG", FBK_S, },
{ "P1 FIERCE", FBK_D, },
{ "P1 SHORT", FBK_Z, },
{ "P1 FORWARD", FBK_X, },
{ "P1 ROUNDHOUSE", FBK_C, },
{ "P1 BUTTON 1", FBK_Z, },
{ "P1 BUTTON 2", FBK_X, },
{ "P1 BUTTON 3", FBK_C, },
{ "P1 BUTTON 4", FBK_V, },
// Joystick
{ "P2 UP", P2_JOY | 0x02, },
{ "P2 LEFT", P2_JOY | 0x00, },
{ "P2 RIGHT", P2_JOY | 0x01, },
{ "P2 DOWN", P2_JOY | 0x03, },
{ "P2 BUTTON 1", P2_JOY | 0x80, },
{ "P2 BUTTON 2", P2_JOY | 0x81, },
{ "P2 BUTTON 3", P2_JOY | 0x82, },
{ "P2 BUTTON 4", P2_JOY | 0x83, },
{ "P2 JAB", P2_JOY | 0x80, },
{ "P2 STRONG", P2_JOY | 0x81, },
{ "P2 FIERCE", P2_JOY | 0x82, },
{ "P2 SHORT", P2_JOY | 0x83, },
{ "P2 FORWARD", P2_JOY | 0x84, },
{ "P2 ROUNDHOUSE", P2_JOY | 0x85, },
{ "P3 UP", P3_JOY | 0x02, },
{ "P3 LEFT", P3_JOY | 0x00, },
{ "P3 RIGHT", P3_JOY | 0x01, },
{ "P3 DOWN", P3_JOY | 0x03, },
{ "P3 BUTTON 1", P3_JOY | 0x80, },
{ "P3 BUTTON 2", P3_JOY | 0x81, },
{ "P3 BUTTON 3", P3_JOY | 0x82, },
{ "P3 BUTTON 4", P3_JOY | 0x83, },
{ "P3 JAB", P3_JOY | 0x80, },
{ "P3 STRONG", P3_JOY | 0x81, },
{ "P3 FIERCE", P3_JOY | 0x82, },
{ "P3 SHORT", P3_JOY | 0x83, },
{ "P3 FORWARD", P3_JOY | 0x84, },
{ "P3 ROUNDHOUSE", P3_JOY | 0x85, },
{ "P4 UP", P4_JOY | 0x02, },
{ "P4 LEFT", P4_JOY | 0x00, },
{ "P4 RIGHT", P4_JOY | 0x01, },
{ "P4 DOWN", P4_JOY | 0x03, },
{ "P4 BUTTON 1", P4_JOY | 0x80, },
{ "P4 BUTTON 2", P4_JOY | 0x81, },
{ "P4 BUTTON 3", P4_JOY | 0x82, },
{ "P4 BUTTON 4", P4_JOY | 0x83, },
{ "P4 JAB", P4_JOY | 0x80, },
{ "P4 STRONG", P4_JOY | 0x81, },
{ "P4 FIERCE", P4_JOY | 0x82, },
{ "P4 SHORT", P4_JOY | 0x83, },
{ "P4 FORWARD", P4_JOY | 0x84, },
{ "P4 ROUNDHOUSE", P4_JOY | 0x85, },
{ NULL, 0, },
};
int InputFindCode(const char *keystring)
{
for (const struct Input *input = allInputs; input->name != NULL; input++) {
if (strcasecmp(input->name, keystring) == 0) {
return input->code;
}
}
return -1;
}
|
vigneshwarrvenkat/Salud-EndUser-App | DropDownViewCell.h | <filename>DropDownViewCell.h
//
// DropDownViewCell.h
// Salud Juicery ios
//
// Created by Sal<NAME> (Vigneshwarr)on 10/23/15.
// Copyright (c) 2015 Salud Juice Team. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface DropDownViewCell : UITableViewCell
@end |
plamenborachev/My_JS_Advanced_October2018 | 07.Classes-and-Members-Lab/P05-Point-Distance/pointClass.js | <gh_stars>1-10
class Point {
constructor(x, y) {
this.x = x;
this.y = y;
}
static distance(a, b) {
const dx = a.x - b.x;
const dy = a.y - b.y;
return Math.hypot(dx, dy);
}
}
module.exports = {Point}; |
uw-it-aca/service-endorsement | endorsement/util/persistent_messages.py | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from persistent_message.models import Message
import hashlib
def get_persistent_messages(tags, params):
ret = {}
for message in Message.objects.active_messages(tags=tags):
level = message.get_level_display().lower()
if level not in ret:
ret[level] = []
msg_text = message.render(params)
ret[level].append({
'message': msg_text,
'hash': hashlib.md5(msg_text.encode()).hexdigest()
})
return ret
|
lexctk/SylAlexCenter | src/fr/sorbonne_u/datacenter/hardware/tests/TestsComputer.java | <reponame>lexctk/SylAlexCenter
package fr.sorbonne_u.datacenter.hardware.tests;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import fr.sorbonne_u.components.AbstractComponent;
import fr.sorbonne_u.components.connectors.DataConnector;
import fr.sorbonne_u.components.cvm.AbstractCVM;
import fr.sorbonne_u.datacenter.hardware.computers.Computer;
import fr.sorbonne_u.datacenter.hardware.computers.Computer.AllocatedCore;
import fr.sorbonne_u.datacenter.hardware.computers.connectors.ComputerServicesConnector;
import fr.sorbonne_u.datacenter.hardware.computers.ports.ComputerDynamicStateDataOutboundPort;
import fr.sorbonne_u.datacenter.hardware.computers.ports.ComputerServicesOutboundPort;
import fr.sorbonne_u.datacenter.hardware.computers.ports.ComputerStaticStateDataOutboundPort;
import fr.sorbonne_u.datacenter.hardware.processors.Processor;
import fr.sorbonne_u.datacenter.hardware.processors.Processor.ProcessorPortTypes;
import fr.sorbonne_u.datacenter.hardware.processors.connectors.ProcessorManagementConnector;
import fr.sorbonne_u.datacenter.hardware.processors.connectors.ProcessorServicesConnector;
import fr.sorbonne_u.datacenter.hardware.processors.ports.ProcessorManagementOutboundPort;
import fr.sorbonne_u.datacenter.hardware.processors.ports.ProcessorServicesOutboundPort;
import fr.sorbonne_u.datacenter.software.applicationvm.interfaces.TaskI;
import fr.sorbonne_u.datacenter.software.interfaces.RequestI;
/**
* The class <code>TestsComputer</code> deploys a <code>Computer</code>
* component connected to a <code>ComputerMonitor</code> component and then
* execute one of two test scenarios on the simulated computer.
*
* <p>
* <strong>Description</strong>
* </p>
*
* The two scenarios create a computer with one processor having two cores with
* two levels of admissible frequencies. They then execute two tasks, one on
* each core and respectively raise or lower the frequency of the first core to
* test the dynamic adaptation of the task duration. In parallel, the computer
* monitor starts the notification of the dynamic state of the computer by
* requesting 25 pushes at the rate of one each second.
*
* <p>
* <strong>Invariant</strong>
* </p>
*
* <pre>
* invariant true
* </pre>
*
* <p>
* Created on : April 15, 2015
* </p>
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class TestsComputer extends AbstractCVM {
private static final String ComputerServicesInboundPortURI = "cs-ibp";
private static final String ComputerServicesOutboundPortURI = "cs-obp";
private static final String ComputerStaticStateDataInboundPortURI = "css-dip";
private static final String ComputerStaticStateDataOutboundPortURI = "css-dop";
private static final String ComputerDynamicStateDataInboundPortURI = "cds-dip";
private static final String ComputerDynamicStateDataOutboundPortURI = "cds-dop";
private ComputerServicesOutboundPort csPort;
private ComputerMonitor cm;
private TestsComputer() throws Exception {
super();
}
@Override
public void deploy() throws Exception {
Processor.DEBUG = true;
String computerURI = "computer0";
int numberOfProcessors = 1;
int numberOfCores = 2;
Set<Integer> admissibleFrequencies = new HashSet<>();
admissibleFrequencies.add(1500);
admissibleFrequencies.add(3000);
Map<Integer, Integer> processingPower = new HashMap<>();
processingPower.put(1500, 1500000);
processingPower.put(3000, 3000000);
Computer c = new Computer(
computerURI,
admissibleFrequencies,
processingPower,
1500,
1500,
numberOfProcessors,
numberOfCores,
ComputerServicesInboundPortURI,
ComputerStaticStateDataInboundPortURI,
ComputerDynamicStateDataInboundPortURI
);
c.toggleTracing();
c.toggleLogging();
this.addDeployedComponent(c);
this.csPort = new ComputerServicesOutboundPort(ComputerServicesOutboundPortURI, new AbstractComponent(0, 0) {});
this.csPort.publishPort();
this.csPort.doConnection(ComputerServicesInboundPortURI, ComputerServicesConnector.class.getCanonicalName());
this.cm = new ComputerMonitor(computerURI, true, ComputerStaticStateDataOutboundPortURI,
ComputerDynamicStateDataOutboundPortURI);
cm.toggleTracing();
cm.toggleLogging();
this.addDeployedComponent(cm);
ComputerStaticStateDataOutboundPort cssdop = new ComputerStaticStateDataOutboundPort(ComputerStaticStateDataOutboundPortURI, c, computerURI);
cssdop.publishPort();
cssdop.doConnection(ComputerStaticStateDataInboundPortURI, DataConnector.class.getCanonicalName());
ComputerDynamicStateDataOutboundPort cdsdop = new ComputerDynamicStateDataOutboundPort(ComputerDynamicStateDataOutboundPortURI, c, computerURI);
cdsdop.publishPort();
cdsdop.doConnection(ComputerDynamicStateDataInboundPortURI, DataConnector.class.getCanonicalName());
super.deploy();
}
@Override
public void start() throws Exception {
super.start();
}
@Override
public void shutdown() throws Exception {
this.csPort.doDisconnection();
super.shutdown();
}
private void testScenario() throws Exception {
AllocatedCore[] ac = this.csPort.allocateCores(2);
final String processorServicesInboundPortURI = ac[0].processorInboundPortURI.get(ProcessorPortTypes.SERVICES);
final String processorManagementInboundPortURI = ac[0].processorInboundPortURI
.get(ProcessorPortTypes.MANAGEMENT);
ProcessorServicesOutboundPort psPort = new ProcessorServicesOutboundPort(new AbstractComponent(0, 0) {});
psPort.publishPort();
psPort.doConnection(processorServicesInboundPortURI, ProcessorServicesConnector.class.getCanonicalName());
ProcessorManagementOutboundPort pmPort = new ProcessorManagementOutboundPort(new AbstractComponent(0, 0) {});
pmPort.publishPort();
pmPort.doConnection(processorManagementInboundPortURI, ProcessorManagementConnector.class.getCanonicalName());
System.out.println("starting task-001 on core 0");
psPort.executeTaskOnCore(new TaskI() {
private static final long serialVersionUID = 1L;
@Override
public RequestI getRequest() {
return new RequestI() {
private static final long serialVersionUID = 1L;
@Override
public long getPredictedNumberOfInstructions() {
return 15000000000L;
}
@Override
public String getRequestURI() {
return "r0";
}
};
}
@Override
public String getTaskURI() {
return "task-001";
}
}, ac[0].coreNo);
System.out.println("starting task-002 on core 1");
psPort.executeTaskOnCore(new TaskI() {
private static final long serialVersionUID = 1L;
@Override
public RequestI getRequest() {
return new RequestI() {
private static final long serialVersionUID = 1L;
@Override
public long getPredictedNumberOfInstructions() {
return 30000000000L;
}
@Override
public String getRequestURI() {
return "r1";
}
};
}
@Override
public String getTaskURI() {
return "task-002";
}
}, ac[1].coreNo);
// Test scenario 1
Thread.sleep(5000L);
pmPort.setCoreFrequency(0, 3000);
// Test scenario 2
// Thread.sleep(3000L) ;
// pmPort.setCoreFrequency(0, 1500) ;
psPort.doDisconnection();
pmPort.doDisconnection();
psPort.unpublishPort();
pmPort.unpublishPort();
}
public static void main(String[] args) {
// AbstractCVM.toggleDebugMode() ;
try {
final TestsComputer c = new TestsComputer();
c.deploy();
System.out.println("starting...");
c.start();
new Thread(() -> {
try {
c.testScenario();
} catch (Exception e) {
throw new RuntimeException(e);
}
}).start();
Thread.sleep(25000L);
System.out.println("shutting down...");
c.shutdown();
System.out.println("ending...");
System.exit(0);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
m-lab/epoxy | storage/datastore_test.go | // Copyright 2016 ePoxy Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//////////////////////////////////////////////////////////////////////////////
package storage
import (
"context"
"fmt"
"reflect"
"testing"
"cloud.google.com/go/datastore"
"github.com/m-lab/epoxy/datastorex"
)
// fakeDatastoreClient implements the datastoreClient interface for testing.
// Every operation should be successful.
type fakeDatastoreClient struct {
host *Host
}
// Get reads the Host value from f.host and copies it to dst.
func (f *fakeDatastoreClient) Get(ctx context.Context, key *datastore.Key, dst interface{}) error {
// Copy the host from f.host into dst.
h, ok := dst.(*Host)
if !ok {
return fmt.Errorf("type assertion failed: got %T; want *Host", dst)
}
*h = *f.host
return nil
}
// Put reads the Host value from src and copies it to f.host.
func (f *fakeDatastoreClient) Put(ctx context.Context, key *datastore.Key, src interface{}) (*datastore.Key, error) {
// Copy the host from src into f.host.
h, ok := src.(*Host)
if !ok {
return nil, fmt.Errorf("type assertion failed: got %T; want *Host", src)
}
*f.host = *h
return nil, nil
}
func (f *fakeDatastoreClient) GetAll(ctx context.Context, q *datastore.Query, dst interface{}) ([]*datastore.Key, error) {
// Extract the pointer to a list of *Host, and append f.host to the list.
hosts, ok := dst.(*[]*Host)
if !ok {
return nil, fmt.Errorf("type assertion failed: got %T; want *[]*Host", dst)
}
*hosts = append(*hosts, f.host)
return nil, nil
}
// errDatastoreClient implements a datastoreClient interface where every call fails with an error.
// The error returned is defined in errDatastoreClient.err.
type errDatastoreClient struct {
err error
}
func (f *errDatastoreClient) Get(ctx context.Context, key *datastore.Key, dst interface{}) error {
return f.err
}
func (f *errDatastoreClient) Put(ctx context.Context, key *datastore.Key, src interface{}) (*datastore.Key, error) {
return nil, f.err
}
func (f *errDatastoreClient) GetAll(ctx context.Context, q *datastore.Query, dst interface{}) ([]*datastore.Key, error) {
return nil, f.err
}
func TestNewDatastoreClient(t *testing.T) {
h := Host{
Name: "mlab1.iad1t.measurement-lab.org",
}
f := &fakeDatastoreClient{&h}
c := NewDatastoreConfig(f)
h2, err := c.Load("mlab1.iad1t.measurement-lab.org")
if err != nil {
t.Fatal(err)
}
if h.Name != h2.Name {
t.Errorf("Load for NewDatastoreConfig failed; want %q, got %q", h.Name, h2.Name)
}
}
func TestDatastore(t *testing.T) {
// NB: we store a partial Host record for brevity.
h := Host{
Name: "mlab1.iad1t.measurement-lab.org",
IPv4Addr: "192.168.127.12",
Boot: datastorex.Map{
Stage1IPXE: "https://example.com/path/stage1to2/stage1to2.ipxe",
},
CurrentSessionIDs: SessionIDs{
Stage2ID: "01234",
},
}
// Declare the fake datastore client outside the function below so we can access member elements.
f := &fakeDatastoreClient{&h}
c := &DatastoreConfig{
Client: f,
Kind: entityKind,
Namespace: namespace,
}
// Store host record.
err := c.Save(&h)
if err != nil {
t.Fatalf("Failed to save host: %s", err)
}
if !reflect.DeepEqual(&h, f.host) {
t.Fatalf("Host records does not match: got %#v; want %#v\n", f.host, &h)
}
// Retrieve host record.
h2, err := c.Load("mlab1.iad1t.measurement-lab.org")
if err != nil {
t.Fatalf("Failed to load host: %s", err)
}
if !reflect.DeepEqual(&h, h2) {
t.Fatalf("Host records does not match: got %#v; want %#v\n", h2, &h)
}
// GetAll all hosts.
hosts, err := c.List()
if err != nil {
t.Fatalf("Failed to list hosts: %s", err)
}
if len(hosts) != 1 {
t.Fatalf("Failed to list hosts: got %d; want 1\n", len(hosts))
}
}
func TestDatastoreFailures(t *testing.T) {
// NB: we store a partial Host record for brevity.
h := Host{
Name: "mlab1.iad1t.measurement-lab.org",
IPv4Addr: "192.168.127.12",
Boot: datastorex.Map{
Stage1IPXE: "https://example.com/path/stage1to2/stage1to2.ipxe",
},
CurrentSessionIDs: SessionIDs{
Stage2ID: "01234",
},
}
// Declare the fake datastore client outside the function below so we can access member elements.
f := &errDatastoreClient{fmt.Errorf("Fake failure")}
c := &DatastoreConfig{
Client: f,
Kind: entityKind,
Namespace: namespace,
}
// Store host record.
err := c.Save(&h)
if err != f.err {
t.Fatalf("Saved without error: got %q; want %q\n", err, f.err)
}
// Retrieve host record.
_, err = c.Load("mlab1.iad1t.measurement-lab.org")
if err != f.err {
t.Fatalf("Load without error: got %q; want %q\n", err, f.err)
}
// GetAll all hosts.
_, err = c.List()
if err != f.err {
t.Fatalf("List without error: got %q; want %q\n", err, f.err)
}
}
|
gaaf/gotransip | ipaddress/ipaddress.go | package ipaddress
import (
"net"
)
// IPAddress struct for an IPAddress
type IPAddress struct {
// The IP address
Address net.IP `json:"address"`
// The TransIP DNS resolvers you can use
DNSResolvers []net.IP `json:"dnsResolvers,omitempty"`
// Gateway
Gateway net.IP `json:"gateway,omitempty"`
// Reverse DNS, also known as the PTR record
ReverseDNS string `json:"reverseDns"`
// Subnet mask
SubnetMask SubnetMask `json:"subnetMask,omitempty"`
}
// IPAddressesWrapper struct wraps an IPAddress struct,
// this is mainly used in other subpackages that need to unmarshal a ipAddresses: [] server response
type IPAddressesWrapper struct {
// array of IP Addresses
IPAddresses []IPAddress `json:"ipAddresses,omitempty"`
}
|
livehybrid/addonfactory-ucc-generator | splunk_add_on_ucc_framework/UCC-UI-lib/bower_components/SplunkWebCore/search_mrsparkle/exposed/js/contrib/jg_lib/utils/ObjectUtil.js | /*!
* Copyright (c) 2007-2016 <NAME>
*
* Released under the MIT license:
* http://opensource.org/licenses/MIT
*/
define(function(require, exports, module)
{
var Class = require("../Class");
return Class(module.id, function(ObjectUtil)
{
// Private Static Properties
var _hasOwnProperty = Object.prototype.hasOwnProperty;
// Public Static Methods
ObjectUtil.extend = function(obj, source)
{
var p;
for (var i = 1, l = arguments.length; i < l; i++)
{
source = arguments[i];
for (p in source)
{
if (_hasOwnProperty.call(source, p))
obj[p] = source[p];
}
}
return obj;
};
ObjectUtil.get = function(obj, key)
{
return _hasOwnProperty.call(obj, key) ? obj[key] : void(0);
};
ObjectUtil.has = function(obj, key)
{
return _hasOwnProperty.call(obj, key);
};
ObjectUtil.keys = function(obj)
{
var keys = [];
for (var key in obj)
{
if (_hasOwnProperty.call(obj, key))
keys.push(key);
}
return keys;
};
ObjectUtil.values = function(obj)
{
var values = [];
for (var key in obj)
{
if (_hasOwnProperty.call(obj, key))
values.push(obj[key]);
}
return values;
};
ObjectUtil.pairs = function(obj)
{
var pairs = [];
for (var key in obj)
{
if (_hasOwnProperty.call(obj, key))
pairs.push([ key, obj[key] ]);
}
return pairs;
};
ObjectUtil.isEmpty = function(obj)
{
for (var key in obj)
{
if (_hasOwnProperty.call(obj, key))
return false;
}
return true;
};
});
});
|
janfb/sbibm | sbibm/algorithms/pytorch/__init__.py | from sbibm.algorithms.pytorch.baseline_grid import run as baseline_grid
from sbibm.algorithms.pytorch.baseline_posterior import run as baseline_posterior
from sbibm.algorithms.pytorch.baseline_prior import run as baseline_prior
from sbibm.algorithms.pytorch.baseline_rejection import run as baseline_rejection
from sbibm.algorithms.pytorch.baseline_runtime import run as baseline_runtime
from sbibm.algorithms.pytorch.baseline_sir import run as baseline_sir
|
Bhpsngum/Uranus-Starblast-Prototypes | systems/hangars/hangars.js | <reponame>Bhpsngum/Uranus-Starblast-Prototypes
sp.hangarsManager = {
hangars: [],
init:function(ship){
//TODO: each ship should be provided with a global unique identifier
ship.custom.hangarSys = {
hangar: null,
}
},
validateHangar:function(ship){
if(ship.custom.hangarSys!=null){
var h = ship.custom.hangarSys.hangar;
var data = ship.custom.hangarSys;
if(
h>=0 &&
h<this.hangars.length &&
this.hangars[h] != null &&
this.hangars[h].occupiedBy == null
)
return true;
}
//if the hangar is invalid(e.g. in use) - we just reset everything for the ship
this.init(ship);
return false;
},
tick:function(){
//update all hangars occupiedBy
//clear
var emptyHangars = {};
for(var i = 0; i<this.hangars.length; i++){
this.hangars[i].occupiedBy = null;
emptyHangars[i] = this.hangars[i];
}
//iterate and assign
for(var i = 0; i<game.ships.length; i++){
var ship = game.ships[i];
if(!ship.custom.isInHangar)continue;
//if this ship is assigned a valid hangar
if(this.validateHangar(ship)){
var h = ship.custom.hangarSys.hangar;
this.hangars[h].occupiedBy = i;
emptyHangars[h] = null;
}
}
//now all hangars are occupiedBy, except for ones that aren't being used
//iterate over all ships again, and assign those that aren't to empty hangars
for(var i = 0; i<game.ships.length; i++){
var ship = game.ships[i];
if(!ship.custom.isInHangar)continue; //TODO: fill own ships list?
var data = ship.custom.hangarSys;
//if this ship hasn't been assigned yet,
if(data.hangar==null) {
var h = null;
for(var h in emptyHangars){//there's gotta be a better way
if(emptyHangars[h]!=null){
break;
}
}
if(h==null){//no hangars available
//fucking panic
//TODO: don't panic
}
data.hangar = h;
}
//by now, hopefully, all ships are properly set up. handle them:
var hangar = this.hangars[data.hangar];
ship.set({x:hangar.x, y:hangar.y, vx: 0, vy: 0, stats: 33333333,
//type: 400+(ship.custom.selectedShip+1),
idle: true
});
}
},
tickHook: function(){
sp.hangarsManager.tick();
}
}
var hangarsXPos = ms*4.7;
var hangarsSideWallXPos = hangarsXPos-155
for(var x = 0; x<3; x++){
for(var y = 0; y<16; y++){
var xx = (hangarsXPos-90)+70*x;
var yy = -240+(y/16)*480;
sp.hangarsManager.hangars.push({
GPO:GPOTypes[5].create(xx,yy,0),
occupiedBy: -1,
x:xx+4,
y:yy-4
});
}
}
var barrier = {
id: "cube",
physics: {
mass: 650,
shape: [2.682,2.723,2.806,2.958,3.169,3.474,3.678,3.672,3.308,3.048,2.878,2.759,2.697,2.697,2.759,2.878,3.048,3.308,3.672,3.678,3.474,3.169,2.958,2.806,2.723,2.682,2.723,2.806,2.958,3.169,3.474,3.678,3.672,3.307,3.054,2.878,2.761,2.698,2.698,2.761,2.878,3.054,3.307,3.672,3.678,3.474,3.169,2.958,2.806,2.723],
fixed: true
}
} ;
GPOTypes[6].create(hangarsSideWallXPos,0,1,240)
GPOTypes[6].create(-hangarsSideWallXPos,0,1,240)
//GPOTypes[6].create(hangarsXPos,240,115/2,1)
//GPOTypes[6].create(hangarsXPos,-240,115/2,1)
var step = 20
for(var y = -ms*5; y<ms*5; y+=step){
var yy = y//-240+(y/16)*480;
game.setObject({
id:"barrierL"+y,
type:barrier,
position:{x:hangarsSideWallXPos,y:yy,z:-3},
scale:{x:1,y:6,z:10},
rotation: {x:0,y:0,z:0}
}) ;
}
for(var y = -ms*5; y<ms*5; y+=step){
var yy = y//-240+(y/16)*480;
game.setObject({
id:"barrierR"+y,
type:barrier,
position:{x:-hangarsSideWallXPos,y:yy,z:-3},
scale:{x:1,y:6,z:10},
rotation: {x:0,y:0,z:0}
}) ;
}
/*
for(var x = -1; x<=1; x++){
var xx = hangarsXPos+x*70;
game.setObject({
id:"barrierT"+x,
type:barrier,
position:{x:xx,y:280,z:-3},
scale:{x:6,y:1,z:10},
rotation: {x:0,y:0,z:0}
}) ;
}
for(var x = -1; x<=1; x++){
var xx = hangarsXPos+x*70;
game.setObject({
id:"barrierB"+x,
type:barrier,
position:{x:xx,y:-280,z:-3},
scale:{x:6,y:1,z:10},
rotation: {x:0,y:0,z:0}
}) ;
}*/
|
caffeinate/gwt-map-scratch | src/uk/co/plogic/gwt/lib/map/overlay/Points.java | package uk.co.plogic.gwt.lib.map.overlay;
import java.util.HashMap;
import uk.co.plogic.gwt.lib.events.MapViewChangedEvent;
import uk.co.plogic.gwt.lib.events.MapViewChangedEventHandler;
import uk.co.plogic.gwt.lib.map.MapUtils;
import uk.co.plogic.gwt.lib.map.markers.IconMarker;
import uk.co.plogic.gwt.lib.map.markers.AbstractBaseMarker.UserInteraction;
import uk.co.plogic.gwt.lib.map.overlay.resources.OverlayImageResource;
import uk.co.plogic.gwt.lib.utils.AttributeDictionary;
import uk.co.plogic.gwt.lib.utils.StringUtils;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.shared.HandlerManager;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.Image;
import com.google.maps.gwt.client.LatLng;
import com.google.maps.gwt.client.Point;
public class Points extends AbstractOverlay {
//ArrayList<IconMarker> markers = new ArrayList<IconMarker>();
protected HashMap<String, IconMarker> markers =
new HashMap<String, IconMarker>();
protected FlowPanel info_marker;
protected String markerTemplate;
OverlayImageResource images;
public Points(HandlerManager eventBus) {
super(eventBus);
images = GWT.create(OverlayImageResource.class);
eventBus.addHandler(MapViewChangedEvent.TYPE, new MapViewChangedEventHandler() {
@Override
public void onMapViewChangedEvent(MapViewChangedEvent event) {
if( info_marker != null )
info_marker.setVisible(false);
}
});
}
public void addPoint(String id, LatLng position, String title) {
IconMarker mapMarker = new IconMarker(eventBus, id, null, position, title);
mapMarker.setMap(gMap);
mapMarker.setOverlay(this);
if( markers.containsKey(id) )
markers.get(id).remove();
markers.put(id, mapMarker);
}
@Override
public void clearAllMarkers() {
for(IconMarker m : markers.values()) {
m.remove();
}
markers.clear();
}
@Override
public void userInteractionWithMarker(UserInteraction interactionType, String markerId, LatLng latLng) {
logger.finer("userInteraction for markerId:"+markerId+" in layer:"+getOverlayId());
if( ! markers.containsKey(markerId) )
return;
IconMarker targetMarker = markers.get(markerId);
// lock marker as selected
if( interactionType == UserInteraction.CLICK ) {
annotateMarker(targetMarker, latLng);
}
}
/**
* show an info window.
*
* There is a lot of duplication between this and Shapes.annotateMarker.
* But putting it somewhere shared proved hard because of focusOnMarker
* and lockedFocusMarker.
*
* @param targetMarker
* @param latLng
*/
protected void annotateMarker(IconMarker targetMarker, LatLng latLng) {
if( info_marker == null ) {
final String mname = "marker_info_box";
info_marker = mapAdapter.createMapOverlayPanel(mname, mname);
}
if( targetMarker == null || latLng == null || markerTemplate == null ) {
info_marker.setVisible(false);
return;
}
AttributeDictionary markerData = new AttributeDictionary();
markerData.set("title", targetMarker.getTitle());
int titleLen = targetMarker.getTitle().length()+1;
String builtHtml = StringUtils.renderHtml(markerTemplate, markerData);
Point p = MapUtils.LatLngToPixel(gMap, latLng);
HTML h = new HTML(builtHtml);
h.setStyleName("marker_info_box_simple_copy");
info_marker.clear();
Image closeButton = new Image(images.close());
closeButton.setStyleName("marker_info_box_close");
closeButton.addClickHandler(new com.google.gwt.event.dom.client.ClickHandler() {
@Override
public void onClick(ClickEvent event) {
info_marker.setVisible(false);
clearAllMarkers();
hide();
}
});
info_marker.add(closeButton);
info_marker.add(h);
double offsetX = p.getX();
double offsetY = p.getY()-10;
info_marker.getElement().setAttribute(
"style",
"left: "+offsetX+"px;top: "+offsetY+"px;width:"+titleLen+"em;"
);
info_marker.setVisible(true);
}
public void setInfoMarkerTemplate(String template) {
markerTemplate = template;
}
}
|
Secure-Labs/nextgen | tests/mutate/unit/tests.c | /*
* Copyright (c) 2017, <NAME>, Minneapolis, MN
*
* Permission to use, copy, modify, and/or distribute this software for any purpose
* with or without fee is hereby granted, provided that the above copyright notice
* and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
* REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
* WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "unity.h"
#include "io/io.h"
#include "crypto/crypto.h"
#include "crypto/random.h"
#include "mutate/mutate.h"
#include "memory/memory.h"
#include "syscall/syscall.h"
#include <string.h>
#include <stdlib.h>
static void test_mutate_buffer(void)
{
int32_t rtrn = 0;
char *str = "hfd94hf497grh49fh";
char *buffer = malloc(strlen(str));
TEST_ASSERT_NOT_NULL(buffer);
memcpy(buffer, str, strlen(str));
TEST_ASSERT(strncmp(buffer, str, strlen(str)) == 0);
rtrn = mutate_buffer((void **)&buffer, strlen(str));
TEST_ASSERT(rtrn == 0);
TEST_ASSERT(strncmp(buffer, str, strlen(str)) != 0);
return;
}
static void setup_tests(void)
{
struct dependency_context *ctx = NULL;
struct output_writter *output = NULL;
output = get_console_writter();
TEST_ASSERT_NOT_NULL(output);
struct memory_allocator *allocator = NULL;
allocator = get_default_allocator();
TEST_ASSERT_NOT_NULL(allocator);
ctx = create_dependency_ctx(create_dependency(output, OUTPUT),
create_dependency(allocator, ALLOCATOR),
NULL);
inject_crypto_deps(ctx);
struct random_generator *random_gen = NULL;
random_gen = get_default_random_generator();
TEST_ASSERT_NOT_NULL(random_gen);
add_dep(ctx, create_dependency(random_gen, RANDOM_GEN));
inject_mutate_deps(ctx);
}
int main(void)
{
setup_tests();
test_mutate_buffer();
return (0);
} |
janyman/wish-c99 | src/wish_connection_mgr.c | /**
* Copyright (C) 2018, ControlThings Oy Ab
* Copyright (C) 2018, <NAME>
* Copyright (C) 2018, <NAME>
* Copyright (C) 2018, <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* @license Apache-2.0
*/
#include <stdint.h>
#include "wish_port_config.h"
#include "wish_core.h"
#include "wish_utils.h"
#include "wish_identity.h"
#include "wish_debug.h"
#include "wish_connection.h"
#include "bson.h"
#include "bson_visit.h"
#include "wish_connection_mgr.h"
#include "string.h"
void wish_connections_init(wish_core_t* core) {
core->connection_pool = wish_platform_malloc(sizeof(wish_connection_t)*WISH_CONTEXT_POOL_SZ);
memset(core->connection_pool, 0, sizeof(wish_connection_t)*WISH_CONTEXT_POOL_SZ);
core->next_conn_id = 1;
wish_core_time_set_interval(core, &check_connection_liveliness, NULL, 1);
}
void wish_connections_check(wish_core_t* core) {
int num_uids_in_db = wish_get_num_uid_entries();
wish_uid_list_elem_t uid_list[num_uids_in_db];
int num_uids = wish_load_uid_list(uid_list, num_uids_in_db);
int i = 0;
int j;
for (j = 0; j < num_uids; j++) {
if (i == j) { continue; }
if( wish_core_is_connected_luid_ruid(core, uid_list[0].uid, uid_list[j].uid) ) { continue; }
wish_identity_t id;
if (wish_identity_load(uid_list[j].uid, &id) != RET_SUCCESS) {
WISHDEBUG(LOG_CRITICAL, "Failed loading identity");
wish_identity_destroy(&id);
return;
}
/* Check if we should connect, meta: { connect: false } */
if (wish_identity_get_meta_connect(&id) == false) {
WISHDEBUG(LOG_CRITICAL, "check connections: will not connect, %s flagged as 'do not connect'", id.alias);
wish_identity_destroy(&id);
continue;
}
/* Check if we should connect, permissions: { banned: true } */
if (wish_identity_is_banned(&id) == true) {
WISHDEBUG(LOG_CRITICAL, "check connections, will not connect, %s is flagged as 'banned'", id.alias);
wish_identity_destroy(&id);
continue;
}
for (int cnt = 0; cnt < WISH_MAX_TRANSPORTS; cnt++) {
int url_len = strnlen(id.transports[cnt], WISH_MAX_TRANSPORT_LEN);
if (url_len > 0) {
char* url = id.transports[cnt];
//WISHDEBUG(LOG_CRITICAL, " Should connect %02x %02x > %02x %02x to %s", uid_list[0].uid[0], uid_list[0].uid[1], uid_list[j].uid[0], uid_list[j].uid[1], url);
wish_ip_addr_t ip;
uint16_t port;
int ret = wish_parse_transport_port(url, url_len, &port);
if (ret) {
WISHDEBUG(LOG_CRITICAL, "Could not parse transport port");
}
else {
ret = wish_parse_transport_ip(url, url_len, &ip);
if (ret) {
WISHDEBUG(LOG_CRITICAL, "Could not parse transport ip");
}
else {
/* Parsing of IP and port OK: go ahead with connecting */
wish_connections_connect_tcp(core, uid_list[0].uid, uid_list[j].uid, &ip, port);
}
}
}
}
wish_identity_destroy(&id);
}
}
/* This function will check the connections and send a 'ping' if they
* have not received anything lately */
void check_connection_liveliness(wish_core_t* core, void* ctx) {
//WISHDEBUG(LOG_CRITICAL, "check_connection_liveliness");
int i = 0;
for (i = 0; i < WISH_CONTEXT_POOL_SZ; i++) {
wish_connection_t* connection = &(core->connection_pool[i]);
switch (connection->context_state) {
case WISH_CONTEXT_CONNECTED:
/* We have found a connected context we must examine */
if ((core->core_time > (connection->latest_input_timestamp + PING_INTERVAL))
&& (connection->ping_sent_timestamp <= connection->latest_input_timestamp))
{
WISHDEBUG(LOG_DEBUG, "Pinging connection %d", i);
/* Enqueue a ping message */
const size_t ping_buffer_sz = 128;
uint8_t ping_buffer[ping_buffer_sz];
bson ping;
bson_init_buffer(&ping, ping_buffer, ping_buffer_sz);
bson_append_bool(&ping, "ping", true);
bson_finish(&ping);
wish_core_send_message(core, connection, bson_data(&ping), bson_size(&ping));
connection->ping_sent_timestamp = core->core_time;
}
if (core->core_time > (connection->latest_input_timestamp + PING_TIMEOUT) &&
(connection->ping_sent_timestamp > connection->latest_input_timestamp)) {
WISHDEBUG(LOG_CRITICAL, "Connection ping: Killing connection because of inactivity");
wish_close_connection(core, connection);
}
break;
case WISH_CONTEXT_IN_MAKING: {
if (core->core_time > (connection->latest_input_timestamp + CONNECTION_SETUP_TIMEOUT)) {
#ifndef WISH_CORE_DEBUG
WISHDEBUG(LOG_CRITICAL, "Ping timeout. Closing connection. (luid: %02x %02x, ruid: %02x %02x)",
connection->luid[0], connection->luid[1], connection->ruid[0], connection->ruid[1]);
#else
WISHDEBUG(LOG_CRITICAL, "Ping timeout. Closing connection. (luid: %02x %02x, ruid: %02x %02x), tp: %d, ps: %d, b_in: %i, b_out: %i, rb: %d, expect: %d, time: %d, core-time: %d (%s, %s)",
connection->luid[0], connection->luid[1], connection->ruid[0], connection->ruid[1],
connection->curr_transport_state, connection->curr_protocol_state, connection->bytes_in, connection->bytes_out, ring_buffer_length(&connection->rx_ringbuf), connection->expect_bytes,
connection->latest_input_timestamp, wish_time_get_relative(core), connection->via_relay ? "relayed" : "direct",
connection->friend_req_connection ? "friendReq" : "normal");
#endif
wish_close_connection(core, connection);
}
break;
}
case WISH_CONTEXT_CLOSING:
WISHDEBUG(LOG_CRITICAL, "Connection ping: Found context in closing state! Forcibly closing it.");
wish_close_connection(core, connection);
break;
case WISH_CONTEXT_FREE:
/* Obviously we don't ping unused contexts! */
break;
}
}
}
return_t wish_connections_connect_tcp(wish_core_t* core, uint8_t *luid, uint8_t *ruid, wish_ip_addr_t *ip, uint16_t port) {
wish_identity_t lu;
wish_identity_t ru;
if ( RET_SUCCESS != wish_identity_load(luid, &lu)
|| RET_SUCCESS != wish_identity_load(ruid, &ru) )
{
wish_identity_destroy(&lu);
wish_identity_destroy(&ru);
return RET_FAIL;
}
wish_identity_destroy(&lu);
wish_identity_destroy(&ru);
wish_connection_t* connection = wish_connection_init(core, luid, ruid);
if (connection != NULL) {
//WISHDEBUG(LOG_CRITICAL, "Connection attempt: %s > %s (%u.%u.%u.%u:%hu)", lu.alias, ru.alias, ip->addr[0], ip->addr[1], ip->addr[2], ip->addr[3], port);
wish_open_connection(core, connection, ip, port, false);
}
return RET_SUCCESS;
}
void wish_close_parallel_connections(wish_core_t *core, void *_connection) {
wish_connection_t *connection = (wish_connection_t *) _connection;
if (connection->context_state != WISH_CONTEXT_CONNECTED) {
return;
}
for (int i = 0; i < WISH_CONTEXT_POOL_SZ; i++) {
wish_connection_t *c = &core->connection_pool[i];
if (c == connection) {
continue;
}
if (memcmp(c->luid, connection->luid, WISH_ID_LEN) == 0) {
if (memcmp(c->ruid, connection->ruid, WISH_ID_LEN) == 0) {
if (memcmp(c->rhid, connection->rhid, WISH_WHID_LEN) == 0) {
if (c->context_state == WISH_CONTEXT_CONNECTED) {
wish_close_connection(core, c);
}
}
}
}
}
}
|
npocmaka/Windows-Server-2003 | multimedia/directx/dplay/dvoice/common/dndbg.h | <gh_stars>10-100
/*==========================================================================
*
* Copyright (C) 1999 - 2001 Microsoft Corporation. All Rights Reserved.
*
* File: dndbg.h
* Content: debug support functions for DirectNet
*
* History:
* Date By Reason
* ==== == ======
* 05-20-99 aarono Created
* 07-16-99 johnkan Added DEBUG_ONLY, DBG_CASSERT, fixed DPFERR to take an argument
* 02-17-00 rodtoll Added Memory / String validation routines
* 05-23-00 RichGr IA64: Changed some DWORDs to DWORD_PTRs to make va_arg work OK.
* 07-27-00 masonb Rewrite to make sub-component stuff work, improve perf
* 08/28/2000 masonb Voice Merge: Part of header guard was missing (#define _DNDBG_H_)
* 10/25/2001 vanceo Use NT build friendly BUGBUG, TODO, plus add PRINTVALUE.
*
***************************************************************************/
#ifndef _DNDBG_H_
#define _DNDBG_H_
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
// DEBUG_BREAK()
#if defined(DBG) || defined(DPINST)
#define DEBUG_BREAK() DebugBreak()
#endif // defined(DBG) || defined(DPINST)
//==================================================================================
// Useful macros based on some DNet code (which was taken from code by ToddLa)
//==================================================================================
//
// Macros that generate compile time messages. Use these with #pragma:
//
// #pragma TODO(vanceo, "Fix this later")
// #pragma BUGBUG(vanceo, "Busted!")
// #pragma PRINTVALUE(DPERR_SOMETHING)
//
// To turn them off, define TODO_OFF, BUGBUG_OFF, PRINTVALUE_OFF in your project
// preprocessor defines.
//
//
// If we're building under VC, (as denoted by the preprocessor define
// DPNBUILD_ENV_NT), these expand to look like:
//
// D:\directory\file.cpp(101) : BUGBUG: vanceo: Busted!
//
// in your output window, and you should be able to double click on it to jump
// directly to that location (line 101 of D:\directory\file.cpp).
//
// If we're building under the NT build environment, these expand to look like:
//
// BUGBUG: vanceo: D:\directory\file.cpp(101) : Busted!
//
// because (at least right now) the build process thinks that a failure occurred if
// a message beginning with a filename and line number is printed. It used to work
// just fine, but who knows.
//
#ifdef DPNBUILD_ENV_NT
#define __TODO(user, msgstr, n) message("TODO: " #user ": " __FILE__ "(" #n ") : " msgstr)
#define __BUGBUG(user, msgstr, n) message("BUGBUG: " #user ": " __FILE__ "(" #n ") : " msgstr)
#define __PRINTVALUE(itemnamestr, itemvaluestr, n) message("PRINTVALUE: " __FILE__ "(" #n ") : " itemnamestr " = " itemvaluestr)
#else // ! DPNBUILD_ENV_NT
#define __TODO(user, msgstr, n) message(__FILE__ "(" #n ") : TODO: " #user ": " msgstr)
#define __BUGBUG(user, msgstr, n) message(__FILE__ "(" #n ") : BUGBUG: " #user ": " msgstr)
#define __PRINTVALUE(itemnamestr, itemvaluestr, n) message(__FILE__ "(" #n ") : PRINTVALUE: " itemnamestr " = " itemvaluestr)
#endif // ! DPNBUILD_ENV_NT
#define _TODO(user, msgstr, n) __TODO(user, msgstr, n)
#define _BUGBUG(user, msgstr, n) __BUGBUG(user, msgstr, n)
#define _PRINTVALUE(itemstr, item, n) __PRINTVALUE(itemstr, #item, n)
#ifdef TODO_OFF
#define TODO(user, msgstr)
#else
#define TODO(user, msgstr) _TODO(user, msgstr, __LINE__)
#endif // TODO_OFF
#ifdef BUGBUG_OFF
#define BUGBUG(user, msgstr)
#else
#define BUGBUG(user, msgstr) _BUGBUG(user, msgstr, __LINE__)
#endif // BUGBUG_OFF
#ifdef PRINTVALUE_OFF
#define PRINTVALUE(item)
#else
#define PRINTVALUE(item) _PRINTVALUE(#item, item, __LINE__)
#endif // PRINTVALUE_OFF
//========================
// Debug Logging support
//========================
/*=============================================================================
Usage:
In code, you can use DPF to print to the log or the debug windows of the
running application. The format of DPF (debug printf) is as follows:
DPFX(DPFPREP,level, string *fmt, arg1, arg2, ...);
level specifies how important this debug printf is. The standard convention
for debug levels is as follows. This is no way strictly enforced for
personal use, but by the time the code is checked in, it should be as close
to this as possible...
DPF_ERRORLEVEL: Error useful for application developers.
DPF_WARNINGLEVEL: Warning useful for application developers.
DPF_ENTRYLEVEL: API Entered
DPF_APIPARAM: API parameters, API return values
DPF_LOCKS: Driver conversation
DPF_INFOLEVEL: Deeper program flow notifications
DPF_STRUCTUREDUMP: Dump structures
DPF_TRACELEVEL: Trace messages
When printing a critical error, you can use:
DPERR( "String" );
which will print a string at debug level zero.
In order to cause the code to stop and break in. You can use ASSERT() or
DEBUG_BREAK(). In order for ASSERT to break in, you must have
BreakOnAssert set in the win.ini file section (see osindep.cpp).
=============================================================================*/
#define DPF_ERRORLEVEL 0
#define DPF_WARNINGLEVEL 1
#define DPF_ENTRYLEVEL 2
#define DPF_APIPARAM 3
#define DPF_LOCKS 4
#define DPF_INFOLEVEL 5
#define DPF_STRUCTUREDUMP 6
#define DPF_TRACELEVEL 9
// For Voice
#define DVF_ERRORLEVEL 0
#define DVF_WARNINGLEVEL 1
#define DVF_ENTRYLEVEL 2
#define DVF_APIPARAM 3
#define DVF_LOCKS 4
#define DVF_INFOLEVEL 5
#define DVF_STRUCTUREDUMP 6
#define DVF_TRACELEVEL 9
#define DN_SUBCOMP_GLOBAL 0
#define DN_SUBCOMP_CORE 1
#define DN_SUBCOMP_ADDR 2
#define DN_SUBCOMP_LOBBY 3
#define DN_SUBCOMP_PROTOCOL 4
#define DN_SUBCOMP_VOICE 5
#define DN_SUBCOMP_DPNSVR 6
#define DN_SUBCOMP_WSOCK 7
#define DN_SUBCOMP_MODEM 8
#define DN_SUBCOMP_COMMON 9
#define DN_SUBCOMP_NATHELP 10
#define DN_SUBCOMP_TOOLS 11
#define DN_SUBCOMP_THREADPOOL 12
#ifdef DBG
extern void DebugPrintfX(LPCTSTR szFile, DWORD dwLineNumber,LPCTSTR szFnName, DWORD dwSubComp, DWORD dwDetail, ...);
extern void _DNAssert(LPCTSTR szFile, DWORD dwLineNumber, LPCTSTR szFnName, DWORD dwSubComp, LPCTSTR szCondition, DWORD dwLevel);
#define DPFX DebugPrintfX
#define DPFPREP _T(__FILE__),__LINE__,_T(DPF_MODNAME), DPF_SUBCOMP
#define DPFERR(a) DebugPrintfX(DPFPREP, DPF_ERRORLEVEL, a )
#ifdef DPNBUILD_USEASSUME
#define DNASSERT(condition) __assume(condition)
#define DNASSERTX(condition, level) DBG_CASSERT(level > 1); if (!(condition)) _DNAssert(DPFPREP, _T(#condition), level)
#else // ! DPNBUILD_USEASSUME
#define DNASSERT(condition) if (!(condition)) _DNAssert(DPFPREP, _T(#condition), 1)
#define DNASSERTX(condition, level) if (!(condition)) _DNAssert(DPFPREP, _T(#condition), level)
#endif // ! DPNBUILD_USEASSUME
#define DBG_CASSERT(exp) switch (0) case 0: case exp:
#define DEBUG_ONLY(arg) arg
#define DPF_RETURN(a) DPFX(DPFPREP,DPF_APIPARAM,"Returning: 0x%lx",a); return a;
#define DPF_ENTER() DPFX(DPFPREP,DPF_TRACELEVEL, "Enter");
#define DPF_EXIT() DPFX(DPFPREP,DPF_TRACELEVEL, "Exit");
#else // NOT DBG
// C4002: too many actual parameters for macro 'identifier'
#pragma warning(disable:4002)
#define DPFX()
#define DPFERR(a)
#ifdef DPNBUILD_USEASSUME
#define DNASSERT(condition) __assume(condition)
#define DNASSERTX(condition, level)
#else // ! DPNBUILD_USEASSUME
#define DNASSERT(condition)
#define DNASSERTX(condition, level)
#endif // ! DPNBUILD_USEASSUME
#define DBG_CASSERT(exp)
#define DEBUG_ONLY(arg)
#define DPF_RETURN(a) return a;
#define DPF_ENTER()
#define DPF_EXIT()
#endif // DBG
#ifdef __cplusplus
} //extern "C"
#endif // __cplusplus
#endif // _DNDBG_H_
|
datasalt/pangool | core/src/test/java/com/datasalt/pangool/tuplemr/mapred/lib/input/TestCascadingTupleInputFormat.java | <gh_stars>1-10
package com.datasalt.pangool.tuplemr.mapred.lib.input;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.Charset;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.junit.Test;
import com.datasalt.pangool.io.ITuple;
import com.datasalt.pangool.tuplemr.MapOnlyJobBuilder;
import com.datasalt.pangool.tuplemr.mapred.MapOnlyMapper;
import com.datasalt.pangool.tuplemr.mapred.lib.output.HadoopOutputFormat;
import com.datasalt.pangool.utils.HadoopUtils;
import com.datasalt.pangool.utils.test.AbstractHadoopTestLibrary;
import com.google.common.io.Files;
@SuppressWarnings("serial")
public class TestCascadingTupleInputFormat extends AbstractHadoopTestLibrary implements Serializable {
public final static String OUTPUT = "out-" + TestCascadingTupleInputFormat.class.getName();
@Test
public void test() throws Exception {
MapOnlyJobBuilder builder = new MapOnlyJobBuilder(getConf());
// Enable Cascading serialization in Hadoop config.
CascadingTupleInputFormat.setSerializations(getConf());
// Instantiate InputFormat
InputFormat<ITuple, NullWritable> iF = new CascadingTupleInputFormat("logs", "day", "month", "year",
"count", "metric", "value");
builder.addInput(new Path("src/test/resources/cascading-binary"), iF,
new MapOnlyMapper<ITuple, NullWritable, Text, NullWritable>() {
@Override
protected void map(ITuple key, NullWritable value, Context context) throws IOException,
InterruptedException {
context.write(new Text(key.toString()), NullWritable.get());
}
});
builder.setOutput(new Path(OUTPUT), new HadoopOutputFormat(TextOutputFormat.class), Text.class,
NullWritable.class);
Job job = builder.createJob();
try {
assertRun(job);
} finally {
builder.cleanUpInstanceFiles();
}
String expectedOutput = "{\"day\":20,\"month\":10,\"year\":2012,\"count\":97,\"metric\":\"ALL\",\"value\":\"\"}\n"
+ "{\"day\":21,\"month\":10,\"year\":2012,\"count\":717,\"metric\":\"ALL\",\"value\":\"\"}\n"
+ "{\"day\":22,\"month\":10,\"year\":2012,\"count\":186,\"metric\":\"ALL\",\"value\":\"\"}";
assertEquals(expectedOutput, Files.toString(new File(OUTPUT, "part-m-00000"), Charset.defaultCharset()).trim());
HadoopUtils.deleteIfExists(FileSystem.get(getConf()), new Path(OUTPUT));
}
}
|
msztylko/CS-programming-with-a-purpose | w03-arrays/extra/MostLikelyRoll.java | public class MostLikelyRoll {
public static void main(String[] args) {
int SIDES = 6;
int TARGET = 12;
int trials = Integer.parseInt(args[0]); // number of trials
int[] freq = new int[TARGET + SIDES + 1];
for (int t = 1; t <= trials; t++) {
int sum = 0;
while (sum <= TARGET) {
int die = 1 + (int) (Math.random() * SIDES);
sum += die;
}
freq[sum]++;
}
for (int i = TARGET + 1; i <= TARGET + SIDES; i++) {
double fraction = 1.0 * freq[i] / trials;
System.out.println(i + ": " + fraction);
}
}
}
|
Kashif-Rabbani/ODIN | MetadataFrontend/public/js/client_view_wrapper.js | <filename>MetadataFrontend/public/js/client_view_wrapper.js
/**
* Created by snadal on 07/06/16.
*/
function getParameterByName(name) {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
$(function() {
$.get("/wrapper/"+getParameterByName("wrapperID"), function(data) {
var wrapper = (data);
$("#id").val(wrapper.wrapperID);
$("#iri").val(wrapper.iri);
$("#name").val(wrapper.name);
$("#dataSource").val(wrapper.dataSourceID);
_.each(wrapper.attributes,function(attribute) {
$('#controls').append($('<input class="form-control" type="text" required="required" readonly="">').val(attribute.name));
});
$.get("/dataSource/"+wrapper.dataSourceID, function(ds) {
switch (ds.type) {
case "avro":
$("#sparksqlQueryForm").show();
$("#sparksqlQuery").val(wrapper.query);
break;
case "mongodb":
$("#mongodbQueryForm").show();
$("#mongodbQuery").val(wrapper.query);
break;
case "neo4j":
$("#cypherQueryForm").show();
$("#cypherQuery").val(wrapper.query);
break;
case "parquet":
$("#sparksqlQueryForm").show();
$("#sparksqlQuery").val(wrapper.query);
break;
case "plaintext":
$("#fileseparatorForm").show();
$("#fileseparator").val(wrapper.query);
break;
case "restapi":
$("#restapiQueryForm").show();
$("#restapiQuery").val(wrapper.query);
break;
case "sqldatabase":
$("#sqlQueryForm").show();
$("#sqlQuery").val(wrapper.query);
break;
}
});
});
});
|
AJAkimana/todo-app | __tests__/todo.js | <reponame>AJAkimana/todo-app
import chai from 'chai';
import chaiHttp from 'chai-http';
import app from '../app';
import { Todo } from '../models';
const { expect } = chai;
chai.use(chaiHttp);
const todoDb = new Todo();
let todoId = null;
const mockedTodo = { title: 'test', description: 'test', priority: 'LOW' };
describe('TODO feature', () => {
before(async () => {
try {
const newTodo = await todoDb.create(mockedTodo);
todoId = newTodo.id;
} catch (error) {
throw error;
}
});
it('Should return all todos', () => {
chai
.request(app)
.get('/api/todos')
.end((err, res) => {
expect(res.status).to.equal(200);
});
});
it('Should return a todo detail', () => {
chai
.request(app)
.get(`/api/todos/${todoId}`)
.end((err, res) => {
expect(res.status).to.equal(200);
});
});
it('Should return 401 when create to do with no permission', () => {
chai
.request(app)
.post(`/api/todos`)
.send(mockedTodo)
.end((err, res) => {
expect(res.status).to.equal(401);
});
});
after(async () => {
try {
await todoDb.delete(todoId);
} catch (error) {
throw error;
}
});
});
|
Machiry/checkedc-clang | mlir/include/mlir/Dialect/OpenMP/OpenMPDialect.h | <filename>mlir/include/mlir/Dialect/OpenMP/OpenMPDialect.h
//===- OpenMPDialect.h - MLIR Dialect for OpenMP ----------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file declares the OpenMP dialect in MLIR.
//
//===----------------------------------------------------------------------===//
#ifndef MLIR_DIALECT_OPENMP_OPENMPDIALECT_H_
#define MLIR_DIALECT_OPENMP_OPENMPDIALECT_H_
#include "mlir/Dialect/LLVMIR/LLVMTypes.h"
#include "mlir/IR/Dialect.h"
#include "mlir/IR/OpDefinition.h"
#include "mlir/Interfaces/ControlFlowInterfaces.h"
#include "mlir/Interfaces/SideEffectInterfaces.h"
#include "mlir/Dialect/OpenMP/OpenMPOpsDialect.h.inc"
#include "mlir/Dialect/OpenMP/OpenMPOpsEnums.h.inc"
#define GET_OP_CLASSES
#include "mlir/Dialect/OpenMP/OpenMPOps.h.inc"
#endif // MLIR_DIALECT_OPENMP_OPENMPDIALECT_H_
|
geoff5802/liftie | test/resorts/sugarbowl.js | <reponame>geoff5802/liftie
const lifts = require('../lifts');
lifts('sugarbowl', 'html', {
'Lincoln Express': 'scheduled',
'Judah Express': 'scheduled',
'Disney Express': 'scheduled',
'Summit Chair': 'closed',
'Christmas Tree Express': 'scheduled',
'Crow\'s Peak': 'scheduled',
'Jerome Hill Express': 'scheduled',
'Village Tow': 'scheduled',
'White Pine (Beginner)': 'scheduled',
'Flume Carpet': 'scheduled',
'Nob Hill (Beginner)': 'scheduled',
'Gondola': 'open',
'Village Kids Carpet': 'scheduled'
});
|
Thanasis17m/RentRoom-website | src/servlets/InsertNewMessageServlet.java | package servlets;
import java.io.IOException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.UserDAOImpl;
import dao.UserDAO;
import javax.servlet.http.HttpSession;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Servlet implementation class InsertNewMessageServlet
*/
@WebServlet("/htmlCssFiles/insertNewMessage")
public class InsertNewMessageServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doPost(request,response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
RequestDispatcher disp = getServletContext().getRequestDispatcher("/htmlCssFiles/index.jsp");
UserDAO dao = new UserDAOImpl(false);
HttpSession session = request.getSession();
String sender =(String) session.getAttribute("username");
String receiver =(String) session.getAttribute("host_name");
String message = request.getParameter("message");
DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
Date dateTime = new Date();
dao.insertMessage(message, sender, receiver, dateFormat.format(dateTime));
disp.forward(request, response);
}
}
|
AsahiOS/gate | usr/src/cmd/scsi/sestopo/common/sestopo.c | <filename>usr/src/cmd/scsi/sestopo/common/sestopo.c
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
* or http://www.opensolaris.org/os/licensing.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
#pragma ident "%Z%%M% %I% %E% SMI"
#include <libnvpair.h>
#include <stdio.h>
#include <unistd.h>
#include <scsi/libses.h>
static void fatal(int, const char *, ...) __NORETURN;
static void
fatal(int err, const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
(void) vfprintf(stderr, fmt, ap);
va_end(ap);
(void) fprintf(stderr, "\n");
(void) fflush(stderr);
_exit(err);
}
/*ARGSUSED*/
static ses_walk_action_t
node(ses_node_t *np, void *arg)
{
ses_node_type_t type;
uint64_t val;
nvlist_t *props;
char *t;
type = ses_node_type(np);
(void) printf("Node Type: %d\n", type);
if ((props = ses_node_props(np)) == NULL) {
(void) printf("No properties\n");
return (SES_WALK_ACTION_CONTINUE);
}
if (type == SES_NODE_ELEMENT || type == SES_NODE_AGGREGATE) {
(void) nvlist_lookup_uint64(props, SES_PROP_ELEMENT_TYPE, &val);
if (nvlist_lookup_string(props, LIBSES_PROP_ELEMENT_TYPE_NAME,
&t) != 0)
t = NULL;
(void) printf("Element Type: %s\n", t ? t : "<unknown>");
}
nvlist_print(stdout, props);
return (SES_WALK_ACTION_CONTINUE);
}
int
main(int argc, char *argv[])
{
ses_target_t *tp;
ses_snap_t *sp;
if (argc != 2)
fatal(1, "Usage: %s <device>", argv[0]);
if ((tp = ses_open(LIBSES_VERSION, argv[1])) == NULL)
fatal(-1, "failed to open %s: %s", argv[1], ses_errmsg());
sp = ses_snap_hold(tp);
(void) ses_walk(sp, node, NULL);
ses_snap_rele(sp);
ses_close(tp);
return (0);
}
|
dedep/bonobo | app/db/row/mapper/BaseRowMapper.scala | <filename>app/db/row/mapper/BaseRowMapper.scala<gh_stars>0
package db.row.mapper
import db.row.model.BaseRow
abstract class BaseRowMapper[A] {
def fromEntity(entity: A): BaseRow[A]
}
|
raylia-w/workspace | UML Lab Shop Example/src/com/yattasolutions/umllab/examples/shop/Human.java | package com.yattasolutions.umllab.examples.shop;
import javax.persistence.Entity;
@Entity
public interface Human {
public int getAge();
public String getGender();
public String getName();
public void setAge(int value);
public void setGender(String value);
public void setName(String value);
}
|
yumin/SMTK | smtk/bridge/cgm/operators/CreateEdge.cxx | //=========================================================================
// Copyright (c) Kitware, Inc.
// All rights reserved.
// See LICENSE.txt for details.
//
// This software is distributed WITHOUT ANY WARRANTY; without even
// the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the above copyright notice for more information.
//=========================================================================
#include "smtk/bridge/cgm/operators/CreateEdge.h"
#include "smtk/bridge/cgm/Session.h"
#include "smtk/bridge/cgm/CAUUID.h"
#include "smtk/bridge/cgm/Engines.h"
#include "smtk/bridge/cgm/TDUUID.h"
#include "smtk/io/Logger.h"
#include "smtk/model/Vertex.h"
#include "smtk/attribute/Attribute.h"
#include "smtk/attribute/DoubleItem.h"
#include "smtk/attribute/IntItem.h"
#include "smtk/attribute/ModelEntityItem.h"
#include "smtk/attribute/StringItem.h"
#include "CGMApp.hpp"
#include "DagType.hpp"
#include "CubitAttribManager.hpp"
#include "CubitCompat.hpp"
#include "CubitDefines.h"
#include "DLIList.hpp"
#include "InitCGMA.hpp"
#include "GeometryModifyTool.hpp"
#include "GeometryQueryEngine.hpp"
#include "GeometryQueryTool.hpp"
#include "RefEntity.hpp"
#include "RefEntityFactory.hpp"
#include "RefGroup.hpp"
#include "RefVertex.hpp"
#include "RefEdge.hpp"
#include "smtk/bridge/cgm/CreateEdge_xml.h"
namespace smtk {
namespace bridge {
namespace cgm {
smtk::model::OperatorResult CreateEdge::operateInternal()
{
smtk::model::Vertices vertices =
this->associatedEntitiesAs<smtk::model::Vertices>();
if (vertices.size() != 2)
{
smtkInfoMacro(log(), "Expected 2 vertices, got " << vertices.size() << ".");
return this->createResult(smtk::model::OPERATION_FAILED);
}
smtk::attribute::DoubleItem::Ptr pointItem =
this->findDouble("point");
smtk::attribute::IntItem::Ptr curveTypeItem =
this->findInt("curve type");
smtk::attribute::IntItem::Ptr colorItem =
this->findInt("color");
int color = colorItem->value();
CubitVector point(
pointItem->value(0),
pointItem->value(1),
pointItem->value(2));
GeometryType curveType = static_cast<GeometryType>(
curveTypeItem->concreteDefinition()->discreteValue(
curveTypeItem->discreteIndex()));
switch (curveType)
{
case STRAIGHT_CURVE_TYPE: // intermediate_point_ptr is not used
case PARABOLA_CURVE_TYPE: // intermediate_point_ptr is the tip of the parabola
case HYPERBOLA_CURVE_TYPE: // intermediate_point_ptr is the center of its two foci
case ELLIPSE_CURVE_TYPE:
// intermediate_point_ptr is the center of the ellipse
// the two points are vertices, one gives the major radius,
// the other point gives the minor radius.
case ARC_CURVE_TYPE: // arc passes three points
break;
default:
smtkInfoMacro(log(), "Bad curve type " << curveType << ".");
return this->createResult(smtk::model::OPERATION_FAILED);
}
RefVertex* v0 = this->cgmEntityAs<RefVertex*>(vertices[0]);
RefVertex* v1 = this->cgmEntityAs<RefVertex*>(vertices[1]);
if (!v0 || !v1)
{
smtkInfoMacro(log(), "One or more vertices were invalid " << v0 << ", " << v1 << ".");
return this->createResult(smtk::model::OPERATION_FAILED);
}
RefEdge* cgmEdge = GeometryModifyTool::instance()->make_RefEdge(curveType, v0, v1, &point);
if (!cgmEdge)
{
smtkInfoMacro(log(), "Failed to create edge.");
return this->createResult(smtk::model::OPERATION_FAILED);
}
// Assign color to match vertex API that requires a color.
cgmEdge->color(color);
smtk::model::OperatorResult result = this->createResult(
smtk::model::OPERATION_SUCCEEDED);
DLIList<RefEdge*> cgmEdgesOut;
cgmEdgesOut.push(cgmEdge);
this->addEntitiesToResult(cgmEdgesOut, result, CREATED);
// Nothing to expunge.
return result;
}
} // namespace cgm
} //namespace bridge
} // namespace smtk
smtkImplementsModelOperator(
SMTKCGMSESSION_EXPORT,
smtk::bridge::cgm::CreateEdge,
cgm_create_edge,
"create edge",
CreateEdge_xml,
smtk::bridge::cgm::Session);
|
HeyLey/catboost | util/string/pcdata.h | #pragma once
#include <util/generic/fwd.h>
/// Converts a text into HTML-code. Special characters of HTML («<», «>», ...) replaced with entities.
TString EncodeHtmlPcdata(const TStringBuf str, bool qAmp = true);
void EncodeHtmlPcdataAppend(const TStringBuf str, TString& strout);
/// Reverse of EncodeHtmlPcdata()
TString DecodeHtmlPcdata(const TString& sz);
|
cstom4994/SourceEngineRebuild | src/public/movieobjects/dmsmdserializer.h | //========= Copyright Valve Corporation, All rights reserved. ============//
//
// Read SMD and create DMX data
//
//=============================================================================
#ifndef DMSMDSERIALIZER_H
#define DMSMDSERIALIZER_H
#if defined( _WIN32 )
#pragma once
#endif
// Valve includes
#include "datamodel/idatamodel.h"
#include "tier1/utlbuffer.h"
#include "tier1/utlstring.h"
#include "tier1/utlvector.h"
//-----------------------------------------------------------------------------
// Forward declarations
//-----------------------------------------------------------------------------
class CDmeDag;
class CDmeMesh;
class CPolygonData;
//-----------------------------------------------------------------------------
// Serialization class for SMD files
//-----------------------------------------------------------------------------
class CDmSmdSerializer : public IDmSerializer
{
public:
enum Axis_t
{
X_AXIS = 0,
Y_AXIS = 1,
Z_AXIS = 2
};
CDmSmdSerializer()
: m_bOptAutoStripPrefix( false )
, m_bOptImportSkeleton( true )
, m_bOptAnimation( false )
, m_flFrameRate( 30.0f )
{
SetUpAxis( Z_AXIS );
}
// Inherited from IDMSerializer
virtual const char *GetName() const { return "smd"; }
virtual const char *GetDescription() const { return "VALVe SMD"; }
virtual bool IsBinaryFormat() const { return false; }
virtual bool StoresVersionInFile() const { return true; }
virtual int GetCurrentVersion() const { return 1; }
virtual bool Serialize( CUtlBuffer &buf, CDmElement *pRoot ) { return false; } // No DMX -> SMD support
virtual bool Unserialize(
CUtlBuffer &utlBuf,
const char *pszEncodingName,
int nEncodingVersion,
const char *pszSourceFormatName,
int nSourceFormatVersion,
DmFileId_t nDmFileId,
DmConflictResolution_t nDmConflictResolution,
CDmElement **ppDmRoot );
// Methods used for importing (only should return non-NULL for serializers that return false from StoresVersionInFile)
virtual const char *GetImportedFormat() const { return NULL; }
virtual int GetImportedVersion() const { return 1; }
// CDmSmdSerializer
CDmElement *ReadSMD( const char *pszFilename, CDmeMesh **ppDmeMeshCreated = NULL );
void SetUpAxis( Axis_t nUpAxis );
Axis_t GetUpAxis() const { return m_nUpAxis; }
void SetIsAnimation( bool bOptAnimation ) { m_bOptAnimation = bOptAnimation; }
bool IsReadAnimation() const { return m_bOptAnimation; }
void SetFrameRate( float flFrameRate ) { m_flFrameRate = MAX( 0.1f, flFrameRate ); } // Don't allow 0 or negative frame rate
float GetFrameRate() const { return m_flFrameRate; }
//-----------------------------------------------------------------------------
//
//-----------------------------------------------------------------------------
struct SmdJoint_t
{
int m_nId; // The id parsed from the SMD file
int m_nActualId; // The actual node id which is created after sorting and creating all joints in order with no gaps in numbering, corresponds to joitnIndex in DmeModel
CUtlString m_sName;
int m_nParentId;
int m_nLineNumber;
CDmeDag *m_pDmeDag;
SmdJoint_t()
: m_nId( -1 )
, m_nActualId( -1 )
, m_nParentId( -1 )
, m_nLineNumber( -1 )
, m_pDmeDag( NULL )
{}
};
//-----------------------------------------------------------------------------
//
//-----------------------------------------------------------------------------
typedef CUtlMap< int, SmdJoint_t > SmdJointMap_t;
protected:
void ParserGetNodeName( const char *pszBuf, CUtlString &sName ) const;
bool ParserHandleSkeletonLine(
const char *pszBuf,
CUtlString &sName,
int &nId,
int &nParentId ) const;
CDmElement *CDmSmdSerializer::ReadSMD(
CUtlBuffer &inUtlBuf,
DmFileId_t nDmFileId,
const char *pszFilename,
CDmeMesh **ppDmeMeshCreated );
//-----------------------------------------------------------------------------
//
//-----------------------------------------------------------------------------
class CNodeData
{
public:
CNodeData()
: m_nParentIndex( -1 )
, m_bSkinned( false )
, m_nInfluenceIndex( 0 )
, m_pDmeDag( NULL )
{
}
bool Valid() const
{
return m_pDmeDag != NULL;
}
void Reset()
{
m_pDmeDag = NULL;
}
int m_nParentIndex;
bool m_bSkinned;
int m_nInfluenceIndex;
CDmeDag *m_pDmeDag;
CUtlVector< Vector > m_positions;
};
void FixNodeName( CUtlString &sName ) const;
void ParserSetJoint(
const SmdJointMap_t &smdJointMap,
int nFrame, int nId,
const Vector &vPosition, const RadianEuler &eRadianEulerXYZ,
const char *pszFilename, int nLineNumber );
Axis_t m_nUpAxis; // 0 == X, 1 == Y, 2 == Z
matrix3x4_t m_mAdj; // Matrix to adjust for SMD source orientation to DMX Y up
matrix3x4_t m_mAdjNormal; // Matrix to adjust normals, inverse transpose of m_mAdj
public:
bool m_bOptImportSkeleton;
bool m_bOptAutoStripPrefix;
bool m_bOptAnimation;
float m_flFrameRate;
CUtlString m_sNodeDelPrefix;
CUtlString m_sNodeAddPrefix;
};
#endif // DMSMDSERIALIZER_H |
btwiuse/naiveproxy | src/net/quic/quic_http_utils_test.cc | <filename>src/net/quic/quic_http_utils_test.cc
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/quic/quic_http_utils.h"
#include <stdint.h>
#include <limits>
#include "net/third_party/quiche/src/quic/platform/api/quic_endian.h"
#include "net/third_party/quiche/src/spdy/core/spdy_alt_svc_wire_format.h"
#include "testing/gtest/include/gtest/gtest.h"
using quic::ParsedQuicVersion;
using quic::PROTOCOL_QUIC_CRYPTO;
namespace net {
namespace test {
TEST(QuicHttpUtilsTest, ConvertRequestPriorityToQuicPriority) {
EXPECT_EQ(0u, ConvertRequestPriorityToQuicPriority(HIGHEST));
EXPECT_EQ(1u, ConvertRequestPriorityToQuicPriority(MEDIUM));
EXPECT_EQ(2u, ConvertRequestPriorityToQuicPriority(LOW));
EXPECT_EQ(3u, ConvertRequestPriorityToQuicPriority(LOWEST));
EXPECT_EQ(4u, ConvertRequestPriorityToQuicPriority(IDLE));
}
TEST(QuicHttpUtilsTest, ConvertQuicPriorityToRequestPriority) {
EXPECT_EQ(HIGHEST, ConvertQuicPriorityToRequestPriority(0));
EXPECT_EQ(MEDIUM, ConvertQuicPriorityToRequestPriority(1));
EXPECT_EQ(LOW, ConvertQuicPriorityToRequestPriority(2));
EXPECT_EQ(LOWEST, ConvertQuicPriorityToRequestPriority(3));
EXPECT_EQ(IDLE, ConvertQuicPriorityToRequestPriority(4));
// These are invalid values, but we should still handle them
// gracefully. TODO(rtenneti): should we test for all possible values of
// uint32_t?
for (int i = 5; i < std::numeric_limits<uint8_t>::max(); ++i) {
EXPECT_EQ(IDLE, ConvertQuicPriorityToRequestPriority(i));
}
}
TEST(QuicHttpUtilsTest, FilterSupportedAltSvcVersions) {
quic::ParsedQuicVersionVector supported_versions = {
ParsedQuicVersion(PROTOCOL_QUIC_CRYPTO, quic::QUIC_VERSION_46),
ParsedQuicVersion(PROTOCOL_QUIC_CRYPTO, quic::QUIC_VERSION_39),
};
std::vector<uint32_t> alt_svc_versions_google = {quic::QUIC_VERSION_46,
quic::QUIC_VERSION_43};
std::vector<uint32_t> alt_svc_versions_ietf = {
QuicVersionToQuicVersionLabel(quic::QUIC_VERSION_46),
QuicVersionToQuicVersionLabel(quic::QUIC_VERSION_43)};
quic::ParsedQuicVersionVector supported_alt_svc_versions = {
ParsedQuicVersion(PROTOCOL_QUIC_CRYPTO, quic::QUIC_VERSION_46)};
spdy::SpdyAltSvcWireFormat::AlternativeService altsvc;
altsvc.protocol_id = "quic";
altsvc.version = alt_svc_versions_google;
EXPECT_EQ(supported_alt_svc_versions,
FilterSupportedAltSvcVersions(altsvc, supported_versions));
}
} // namespace test
} // namespace net
|
stachu540/twitch4j | rest-helix/src/main/java/com/github/twitch4j/helix/domain/TeamUser.java | <reponame>stachu540/twitch4j<gh_stars>100-1000
package com.github.twitch4j.helix.domain;
import lombok.AccessLevel;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Data
@Setter(AccessLevel.PRIVATE)
@NoArgsConstructor
public class TeamUser {
/**
* User ID of a Team member.
*/
private String userId;
/**
* Display name of a Team member.
*/
private String userName;
/**
* Login of a Team member.
*/
private String userLogin;
}
|
mgscreativa/meteor-browser-policy-issue | imports/modules/server/rate-limit.js | <filename>imports/modules/server/rate-limit.js
import _ from 'lodash';
import { Meteor } from 'meteor/meteor';
import { DDPRateLimiter } from 'meteor/ddp-rate-limiter';
const fetchMethodNames = methods => _.map(methods, 'name');
const assignLimits = ({ methods, limit, timeRange }) => {
const methodNames = fetchMethodNames(methods);
if (Meteor.isServer) {
DDPRateLimiter.addRule(
{
name(name) {
return _.includes(methodNames, name);
},
connectionId() {
return true;
},
},
limit,
timeRange,
);
}
};
export default function rateLimit(options) {
return assignLimits(options);
}
|
zhangkn/iOS14Header | System/Library/PrivateFrameworks/Settings/GeneralSettingsUI.framework/PSGDeviceNameEditingController.h | /*
* This header is generated by classdump-dyld 1.0
* on Sunday, September 27, 2020 at 12:26:16 PM Mountain Standard Time
* Operating System: Version 14.0 (Build 18A373)
* Image Source: /System/Library/PrivateFrameworks/Settings/GeneralSettingsUI.framework/GeneralSettingsUI
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
#import <Preferences/PSListController.h>
@class NSString;
@interface PSGDeviceNameEditingController : PSListController {
id _effectiveSettingsChangedNotificationObserver;
NSString* _originalDeviceName;
}
@property (nonatomic,retain) id effectiveSettingsChangedNotificationObserver; //@synthesize effectiveSettingsChangedNotificationObserver=_effectiveSettingsChangedNotificationObserver - In the implementation block
@property (nonatomic,retain) NSString * originalDeviceName; //@synthesize originalDeviceName=_originalDeviceName - In the implementation block
-(void)viewWillDisappear:(BOOL)arg1 ;
-(BOOL)shouldSelectResponderOnAppearance;
-(void)viewDidDisappear:(BOOL)arg1 ;
-(void)viewDidAppear:(BOOL)arg1 ;
-(void)suspend;
-(void)viewWillAppear:(BOOL)arg1 ;
-(id)specifiers;
-(BOOL)canBeShownFromSuspendedState;
-(id)tableView:(id)arg1 cellForRowAtIndexPath:(id)arg2 ;
-(id)deviceName:(id)arg1 ;
-(void)setEffectiveSettingsChangedNotificationObserver:(id)arg1 ;
-(id)effectiveSettingsChangedNotificationObserver;
-(id)_editedDeviceName;
-(NSString *)originalDeviceName;
-(void)setOriginalDeviceName:(NSString *)arg1 ;
@end
|
win32kbase/CAFED00D | src/main/java/me/coley/cafedude/classfile/attribute/AnnotationDefaultAttribute.java | <reponame>win32kbase/CAFED00D
package me.coley.cafedude.classfile.attribute;
import me.coley.cafedude.classfile.annotation.ElementValue;
import java.util.Set;
/**
* Represents the default value of a annotation field <i>(Which are technically methods, but I digress)</i>.
*
* @author <NAME>
*/
public class AnnotationDefaultAttribute extends Attribute {
private final ElementValue elementValue;
/**
* @param nameIndex
* Name index in constant pool.
* @param elementValue
* Value of the annotation type element represented by the {@code method_info} structure
* enclosing this attribute.
*/
public AnnotationDefaultAttribute(int nameIndex, ElementValue elementValue) {
super(nameIndex);
this.elementValue = elementValue;
}
/**
* @return Value of the annotation type element represented by the {@code method_info} structure
* enclosing this attribute.
*/
public ElementValue getElementValue() {
return elementValue;
}
@Override
public Set<Integer> cpAccesses() {
Set<Integer> set = super.cpAccesses();
set.addAll(elementValue.cpAccesses());
return set;
}
@Override
public int computeInternalLength() {
return getElementValue().computeLength();
}
}
|
DeepDiver1975/msgraph.go | beta/EasAuthenticationMethodEnum.go | // Code generated by msgraph-generate.go DO NOT EDIT.
package msgraph
// EasAuthenticationMethod undocumented
type EasAuthenticationMethod int
const (
// EasAuthenticationMethodVUsernameAndPassword undocumented
EasAuthenticationMethodVUsernameAndPassword EasAuthenticationMethod = 0
// EasAuthenticationMethodVCertificate undocumented
EasAuthenticationMethodVCertificate EasAuthenticationMethod = 1
// EasAuthenticationMethodVDerivedCredential undocumented
EasAuthenticationMethodVDerivedCredential EasAuthenticationMethod = 2
)
// EasAuthenticationMethodPUsernameAndPassword returns a pointer to EasAuthenticationMethodVUsernameAndPassword
func EasAuthenticationMethodPUsernameAndPassword() *EasAuthenticationMethod {
v := EasAuthenticationMethodVUsernameAndPassword
return &v
}
// EasAuthenticationMethodPCertificate returns a pointer to EasAuthenticationMethodVCertificate
func EasAuthenticationMethodPCertificate() *EasAuthenticationMethod {
v := EasAuthenticationMethodVCertificate
return &v
}
// EasAuthenticationMethodPDerivedCredential returns a pointer to EasAuthenticationMethodVDerivedCredential
func EasAuthenticationMethodPDerivedCredential() *EasAuthenticationMethod {
v := EasAuthenticationMethodVDerivedCredential
return &v
}
|
therealmmk/aws-toolkit-eclipse | bundles/com.amazonaws.eclipse.dynamodb/src/com/amazonaws/eclipse/explorer/dynamodb/CreateTableFirstPage.java | <filename>bundles/com.amazonaws.eclipse.dynamodb/src/com/amazonaws/eclipse/explorer/dynamodb/CreateTableFirstPage.java<gh_stars>100-1000
/*
* Copyright 2013 Amazon Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://aws.amazon.com/apache2.0
*
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.eclipse.explorer.dynamodb;
import org.eclipse.core.databinding.AggregateValidationStatus;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.beans.PojoObservables;
import org.eclipse.core.databinding.observable.ChangeEvent;
import org.eclipse.core.databinding.observable.IChangeListener;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import com.amazonaws.eclipse.core.AwsToolkitCore;
import com.amazonaws.eclipse.core.ui.WebLinkListener;
import com.amazonaws.eclipse.databinding.ChainValidator;
import com.amazonaws.eclipse.databinding.NotEmptyValidator;
import com.amazonaws.eclipse.databinding.RangeValidator;
public class CreateTableFirstPage extends WizardPage {
private final String OK_MESSAGE = "Configure new DynamoDB table";
private boolean usesRangeKey = false;
private Font italicFont;
private IObservableValue tableName;
private IObservableValue hashKeyName;
private IObservableValue hashKeyType;
private IObservableValue enableRangeKey;
private IObservableValue rangeKeyName;
private IObservableValue rangeKeyType;
private IObservableValue readCapacity;
private IObservableValue writeCapacity;
private final DataBindingContext bindingContext = new DataBindingContext();
@Override
public void dispose() {
if (italicFont != null)
italicFont.dispose();
super.dispose();
}
private static final long CAPACITY_UNIT_MINIMUM = 1;
private static final String[] DATA_TYPE_STRINGS = new String[] { "String", "Number", "Binary" };
CreateTableFirstPage(CreateTableWizard wizard) {
super("Configure table");
setMessage(OK_MESSAGE);
setImageDescriptor(AwsToolkitCore.getDefault().getImageRegistry().getDescriptor(AwsToolkitCore.IMAGE_AWS_LOGO));
tableName = PojoObservables.observeValue(wizard.getDataModel(), "tableName");
hashKeyName = PojoObservables.observeValue(wizard.getDataModel(), "hashKeyName");
hashKeyType = PojoObservables.observeValue(wizard.getDataModel(), "hashKeyType");
enableRangeKey = PojoObservables.observeValue(wizard.getDataModel(), "enableRangeKey");
rangeKeyName = PojoObservables.observeValue(wizard.getDataModel(), "rangeKeyName");
rangeKeyType = PojoObservables.observeValue(wizard.getDataModel(), "rangeKeyType");
readCapacity = PojoObservables.observeValue(wizard.getDataModel(), "readCapacity");
writeCapacity = PojoObservables.observeValue(wizard.getDataModel(), "writeCapacity");
}
@Override
public void createControl(Composite parent) {
Composite comp = new Composite(parent, SWT.NONE);
GridDataFactory.fillDefaults().grab(true, true).applyTo(comp);
GridLayoutFactory.fillDefaults().numColumns(2).applyTo(comp);
// Table name
Label tableNameLabel = new Label(comp, SWT.READ_ONLY);
tableNameLabel.setText("Table Name:");
final Text tableNameText = CreateTablePageUtil.newTextField(comp);
bindingContext.bindValue(SWTObservables.observeText(tableNameText, SWT.Modify), tableName);
ChainValidator<String> tableNameValidationStatusProvider = new ChainValidator<>(tableName, new NotEmptyValidator("Please provide a table name"));
bindingContext.addValidationStatusProvider(tableNameValidationStatusProvider);
// Hash key
Group hashKeyGroup = CreateTablePageUtil.newGroup(comp, "Hash Key", 2);
new Label(hashKeyGroup, SWT.READ_ONLY).setText("Hash Key Name:");
final Text hashKeyText = CreateTablePageUtil.newTextField(hashKeyGroup);
bindingContext.bindValue(SWTObservables.observeText(hashKeyText, SWT.Modify), hashKeyName);
ChainValidator<String> hashKeyNameValidationStatusProvider = new ChainValidator<>(hashKeyName, new NotEmptyValidator("Please provide an attribute name for the hash key"));
bindingContext.addValidationStatusProvider(hashKeyNameValidationStatusProvider);
new Label(hashKeyGroup, SWT.READ_ONLY).setText("Hash Key Type:");
final Combo hashKeyTypeCombo = new Combo(hashKeyGroup, SWT.DROP_DOWN | SWT.READ_ONLY);
hashKeyTypeCombo.setItems(DATA_TYPE_STRINGS);
bindingContext.bindValue(SWTObservables.observeSelection(hashKeyTypeCombo), hashKeyType);
hashKeyTypeCombo.select(0);
// Range key
Group rangeKeyGroup = CreateTablePageUtil.newGroup(comp, "Range Key", 2);
final Button enableRangeKeyButton = new Button(rangeKeyGroup, SWT.CHECK);
enableRangeKeyButton.setText("Enable Range Key");
GridDataFactory.fillDefaults().span(2, 1).applyTo(enableRangeKeyButton);
bindingContext.bindValue(SWTObservables.observeSelection(enableRangeKeyButton), enableRangeKey);
final Label rangeKeyAttributeLabel = new Label(rangeKeyGroup, SWT.READ_ONLY);
rangeKeyAttributeLabel.setText("Range Key Name:");
final Text rangeKeyText = CreateTablePageUtil.newTextField(rangeKeyGroup);
bindingContext.bindValue(SWTObservables.observeText(rangeKeyText, SWT.Modify), rangeKeyName);
ChainValidator<String> rangeKeyNameValidationStatusProvider = new ChainValidator<>(rangeKeyName, enableRangeKey, new NotEmptyValidator(
"Please provide an attribute name for the range key"));
bindingContext.addValidationStatusProvider(rangeKeyNameValidationStatusProvider);
final Label rangeKeyTypeLabel = new Label(rangeKeyGroup, SWT.READ_ONLY);
rangeKeyTypeLabel.setText("Range Key Type:");
final Combo rangeKeyTypeCombo = new Combo(rangeKeyGroup, SWT.DROP_DOWN | SWT.READ_ONLY);
rangeKeyTypeCombo.setItems(DATA_TYPE_STRINGS);
bindingContext.bindValue(SWTObservables.observeSelection(rangeKeyTypeCombo), rangeKeyType);
rangeKeyTypeCombo.select(0);
enableRangeKeyButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
usesRangeKey = enableRangeKeyButton.getSelection();
rangeKeyAttributeLabel.setEnabled(usesRangeKey);
rangeKeyText.setEnabled(usesRangeKey);
rangeKeyTypeLabel.setEnabled(usesRangeKey);
rangeKeyTypeCombo.setEnabled(usesRangeKey);
}
});
enableRangeKeyButton.setSelection(false);
rangeKeyAttributeLabel.setEnabled(usesRangeKey);
rangeKeyText.setEnabled(usesRangeKey);
rangeKeyTypeLabel.setEnabled(usesRangeKey);
rangeKeyTypeCombo.setEnabled(usesRangeKey);
FontData[] fontData = tableNameLabel.getFont().getFontData();
for (FontData fd : fontData) {
fd.setStyle(SWT.ITALIC);
}
italicFont = new Font(Display.getDefault(), fontData);
// Table throughput
Group throughputGroup = CreateTablePageUtil.newGroup(comp, "Table Throughput", 3);
new Label(throughputGroup, SWT.READ_ONLY).setText("Read Capacity Units:");
final Text readCapacityText = CreateTablePageUtil.newTextField(throughputGroup);
readCapacityText.setText("" + CAPACITY_UNIT_MINIMUM);
bindingContext.bindValue(SWTObservables.observeText(readCapacityText, SWT.Modify), readCapacity);
ChainValidator<Long> readCapacityValidationStatusProvider = new ChainValidator<>(
readCapacity, new RangeValidator(
"Please enter a read capacity of " + CAPACITY_UNIT_MINIMUM + " or more.", CAPACITY_UNIT_MINIMUM,
Long.MAX_VALUE));
bindingContext.addValidationStatusProvider(readCapacityValidationStatusProvider);
Label minimumReadCapacityLabel = new Label(throughputGroup, SWT.READ_ONLY);
minimumReadCapacityLabel.setText("(Minimum capacity " + CAPACITY_UNIT_MINIMUM + ")");
minimumReadCapacityLabel.setFont(italicFont);
new Label(throughputGroup, SWT.READ_ONLY).setText("Write Capacity Units:");
final Text writeCapacityText = CreateTablePageUtil.newTextField(throughputGroup);
writeCapacityText.setText("" + CAPACITY_UNIT_MINIMUM);
Label minimumWriteCapacityLabel = new Label(throughputGroup, SWT.READ_ONLY);
minimumWriteCapacityLabel.setText("(Minimum capacity " + CAPACITY_UNIT_MINIMUM + ")");
minimumWriteCapacityLabel.setFont(italicFont);
bindingContext.bindValue(SWTObservables.observeText(writeCapacityText, SWT.Modify), writeCapacity);
ChainValidator<Long> writeCapacityValidationStatusProvider = new ChainValidator<>(
writeCapacity, new RangeValidator(
"Please enter a write capacity of " + CAPACITY_UNIT_MINIMUM + " or more.", CAPACITY_UNIT_MINIMUM,
Long.MAX_VALUE));
bindingContext.addValidationStatusProvider(writeCapacityValidationStatusProvider);
final Label throughputCapacityLabel = new Label(throughputGroup, SWT.WRAP);
throughputCapacityLabel
.setText("Amazon DynamoDB will reserve the necessary machine resources to meet your throughput needs based on the read and write capacity specified with consistent, low-latency performance. You pay a flat, hourly rate based on the capacity you reserve.");
GridData gridData = new GridData(SWT.FILL, SWT.TOP, true, false);
gridData.horizontalSpan = 3;
gridData.widthHint = 200;
throughputCapacityLabel.setLayoutData(gridData);
throughputCapacityLabel.setFont(italicFont);
// Help info
String pricingLinkText = "<a href=\"" + "http://aws.amazon.com/dynamodb/#pricing" + "\">" + "More information on Amazon DynamoDB pricing</a>. ";
CreateTablePageUtil.newLink(new WebLinkListener(), pricingLinkText, throughputGroup);
// Finally provide aggregate status reporting for the entire wizard page
final AggregateValidationStatus aggregateValidationStatus = new AggregateValidationStatus(bindingContext, AggregateValidationStatus.MAX_SEVERITY);
aggregateValidationStatus.addChangeListener(new IChangeListener() {
@Override
public void handleChange(ChangeEvent event) {
Object value = aggregateValidationStatus.getValue();
if (value instanceof IStatus == false)
return;
IStatus status = (IStatus) value;
if (status.isOK()) {
setErrorMessage(null);
setMessage(OK_MESSAGE, Status.OK);
} else if (status.getSeverity() == Status.WARNING) {
setErrorMessage(null);
setMessage(status.getMessage(), Status.WARNING);
} else if (status.getSeverity() == Status.ERROR) {
setErrorMessage(status.getMessage());
}
setPageComplete(status.isOK());
}
});
setPageComplete(false);
setControl(comp);
}
}
|
stonewesley/bk-bcs | bcs-k8s/bcs-k8s-watch/pkg/kubefed/apis/core/typeconfig/interface.go | /*
* Tencent is pleased to support the open source community by making Blueking Container Service available.
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package typeconfig
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// Interface defines how to interact with a FederatedTypeConfig
type Interface interface {
GetObjectMeta() metav1.ObjectMeta
GetTargetType() metav1.APIResource
GetNamespaced() bool
GetPropagationEnabled() bool
GetFederatedType() metav1.APIResource
GetStatusType() *metav1.APIResource
GetStatusEnabled() bool
GetFederatedNamespaced() bool
IsNamespace() bool
}
|
TheRealHaui/jetty.project | jetty-util/src/test/java/org/eclipse/jetty/util/ScannerTest.java | <reponame>TheRealHaui/jetty.project
//
// ========================================================================
// Copyright (c) 1995-2019 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.condition.OS.WINDOWS;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.eclipse.jetty.toolchain.test.FS;
import org.eclipse.jetty.toolchain.test.MavenTestingUtils;
import org.eclipse.jetty.util.Scanner.Notification;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
import org.junit.jupiter.api.condition.DisabledOnOs;
public class ScannerTest
{
static File _directory;
static Scanner _scanner;
static BlockingQueue<Event> _queue = new LinkedBlockingQueue<Event>();
static BlockingQueue<List<String>> _bulk = new LinkedBlockingQueue<List<String>>();
@BeforeAll
public static void setUpBeforeClass() throws Exception
{
File testDir = MavenTestingUtils.getTargetTestingDir(ScannerTest.class.getSimpleName());
FS.ensureEmpty(testDir);
// Use full path, pointing to a real directory (for FileSystems that are case-insensitive, like Windows and OSX to use)
// This is only needed for the various comparisons below to make sense.
_directory = testDir.toPath().toRealPath().toFile();
_scanner = new Scanner();
_scanner.addScanDir(_directory);
_scanner.setScanInterval(0);
_scanner.addListener(new Scanner.DiscreteListener()
{
@Override
public void fileRemoved(String filename) throws Exception
{
_queue.add(new Event(filename,Notification.REMOVED));
}
@Override
public void fileChanged(String filename) throws Exception
{
_queue.add(new Event(filename,Notification.CHANGED));
}
@Override
public void fileAdded(String filename) throws Exception
{
_queue.add(new Event(filename,Notification.ADDED));
}
});
_scanner.addListener(new Scanner.BulkListener()
{
@Override
public void filesChanged(List<String> filenames) throws Exception
{
_bulk.add(filenames);
}
});
_scanner.start();
_scanner.scan();
assertTrue(_queue.isEmpty());
assertTrue(_bulk.isEmpty());
}
@AfterAll
public static void tearDownAfterClass() throws Exception
{
_scanner.stop();
IO.delete(_directory);
}
static class Event
{
String _filename;
Scanner.Notification _notification;
public Event(String filename, Notification notification)
{
_filename=filename;
_notification=notification;
}
}
@Test
@DisabledOnOs(WINDOWS) // TODO: needs review
@DisabledIfSystemProperty(named = "env", matches = "ci") // TODO: SLOW, needs review
public void testAddedChangeRemove() throws Exception
{
touch("a0");
// takes 2 scans to notice a0 and check that it is stable
_scanner.scan();
_scanner.scan();
Event event = _queue.poll();
assertNotNull(event, "Event should not be null");
assertEquals(_directory+"/a0",event._filename);
assertEquals(Notification.ADDED,event._notification);
// add 3 more files
Thread.sleep(1100); // make sure time in seconds changes
touch("a1");
touch("a2");
touch("a3");
// not stable after 1 scan so should not be seen yet.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// Keep a2 unstable and remove a3 before it stabalized
Thread.sleep(1100); // make sure time in seconds changes
touch("a2");
delete("a3");
// only a1 is stable so it should be seen.
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a1",event._filename);
assertEquals(Notification.ADDED,event._notification);
assertTrue(_queue.isEmpty());
// Now a2 is stable
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a2",event._filename);
assertEquals(Notification.ADDED,event._notification);
assertTrue(_queue.isEmpty());
// We never see a3 as it was deleted before it stabalised
// touch a1 and a2
Thread.sleep(1100); // make sure time in seconds changes
touch("a1");
touch("a2");
// not stable after 1scan so nothing should not be seen yet.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// Keep a2 unstable
Thread.sleep(1100); // make sure time in seconds changes
touch("a2");
// only a1 is stable so it should be seen.
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a1",event._filename);
assertEquals(Notification.CHANGED,event._notification);
assertTrue(_queue.isEmpty());
// Now a2 is stable
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a2",event._filename);
assertEquals(Notification.CHANGED,event._notification);
assertTrue(_queue.isEmpty());
// delete a1 and a2
delete("a1");
delete("a2");
// not stable after 1scan so nothing should not be seen yet.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// readd a2
touch("a2");
// only a1 is stable so it should be seen.
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a1",event._filename);
assertEquals(Notification.REMOVED,event._notification);
assertTrue(_queue.isEmpty());
// Now a2 is stable and is a changed file rather than a remove
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/a2",event._filename);
assertEquals(Notification.CHANGED,event._notification);
assertTrue(_queue.isEmpty());
}
@Test
@DisabledOnOs(WINDOWS) // TODO: needs review
public void testSizeChange() throws Exception
{
touch("tsc0");
_scanner.scan();
_scanner.scan();
// takes 2s to notice tsc0 and check that it is stable. This syncs us with the scan
Event event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/tsc0",event._filename);
assertEquals(Notification.ADDED,event._notification);
// Create a new file by writing to it.
long now = TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
File file = new File(_directory,"st");
try (OutputStream out = new FileOutputStream(file,true))
{
out.write('x');
out.flush();
file.setLastModified(now);
// Not stable yet so no notification.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// Modify size only
out.write('x');
out.flush();
file.setLastModified(now);
// Still not stable yet so no notification.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// now stable so finally see the ADDED
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/st",event._filename);
assertEquals(Notification.ADDED,event._notification);
// Modify size only
out.write('x');
out.flush();
file.setLastModified(now);
// Still not stable yet so no notification.
_scanner.scan();
event = _queue.poll();
assertTrue(event==null);
// now stable so finally see the ADDED
_scanner.scan();
event = _queue.poll();
assertTrue(event!=null);
assertEquals(_directory+"/st",event._filename);
assertEquals(Notification.CHANGED,event._notification);
}
}
private void delete(String string) throws IOException
{
File file = new File(_directory,string);
if (file.exists())
IO.delete(file);
}
private void touch(String string) throws IOException
{
File file = new File(_directory,string);
if (file.exists())
file.setLastModified(TimeUnit.NANOSECONDS.toMillis(System.nanoTime()));
else
file.createNewFile();
}
}
|
MaritimeCloud/Sandbox | mc-identityregistry-core/src/main/java/net/maritimecloud/identityregistry/repositories/CertificateRepository.java | <filename>mc-identityregistry-core/src/main/java/net/maritimecloud/identityregistry/repositories/CertificateRepository.java
/* Copyright 2016 Danish Maritime Authority.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.maritimecloud.identityregistry.repositories;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import net.maritimecloud.identityregistry.model.Certificate;
import net.maritimecloud.identityregistry.model.Device;
import net.maritimecloud.identityregistry.model.Vessel;
import net.maritimecloud.identityregistry.model.User;
public interface CertificateRepository extends CrudRepository<Certificate, Long> {
List<Certificate> findByvessel(Vessel vessel);
List<Certificate> findBydevice(Device device);
List<Certificate> findByuser(User user);
@Query("SELECT c FROM Certificate c WHERE c.revoked=1 AND CURDATE() BETWEEN c.start AND c.end")
List<Certificate> findRevoked();
}
|
ayompedb/culper | api/account_test.go | package api
import (
"database/sql"
"fmt"
"testing"
"time"
)
func TestBasicAuthentication(t *testing.T) {
username := fmt.Sprintf("user-%v", time.Now().Unix())
password := "<PASSWORD>"
account := &Account{
Username: username,
Firstname: "Admin",
Lastname: "Last",
FormType: "SF86",
FormVersion: "2017-07",
}
membership := &BasicAuthMembership{
AccountID: account.ID,
Account: account,
}
membership.HashPassword(password)
if matched := membership.PasswordMatch(password); !matched {
t.Fatal("Expected password to match")
}
if matched := membership.PasswordMatch("<PASSWORD>"); matched {
t.Fatal("Expected incorrect password")
}
}
func TestValidFormType(t *testing.T) {
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "SF86",
FormVersion: "2017-07",
}
type testCase struct {
sftype string
sfversion string
valid bool
}
testCases := []testCase{
testCase{"SF86", "2017-07", true},
testCase{"SF 86", "2017-07", false},
testCase{"SF86", "2036-11", false},
testCase{"SF85", "2017-12-draft7", true},
testCase{"SF85", "2017-07", false},
}
for _, tCase := range testCases {
account.FormType = tCase.sftype
account.FormVersion = tCase.sfversion
if tCase.valid != account.FormTypeIsKnown() {
helperWord := "should"
if !tCase.valid {
helperWord += " not"
}
t.Logf("%s + %s %s be valid", tCase.sftype, tCase.sfversion, helperWord)
t.Fail()
}
}
}
func TestAccountUsernameValidation(t *testing.T) {
account := &Account{
Username: "",
Email: sql.NullString{},
FormType: "SF86",
FormVersion: "2017-07",
}
err := account.validate()
if err == nil {
t.Errorf("expected a Missing Username error")
t.Fail()
}
}
func TestAccountFormTypeValidation(t *testing.T) {
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "Dogs",
FormVersion: "2017-07",
}
err := account.validate()
if err == nil {
t.Errorf("expected a Known Form error")
t.Fail()
}
}
func TestGetAccountID(t *testing.T) {
accountID := 148958398
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "Dogs",
FormVersion: "2017-07",
ID: accountID,
}
if account.GetID() != accountID {
t.Errorf("incorrect account ID")
t.Fail()
}
}
func TestSetAccountID(t *testing.T) {
accountID := 148958398
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "Dogs",
FormVersion: "2017-07",
ID: 1835833,
}
account.SetID(accountID)
if account.ID != accountID {
t.Errorf("account ID set failure")
t.Fail()
}
}
func TestSubmitAccount(t *testing.T) {
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "Dogs",
FormVersion: "2017-07",
Status: StatusSubmitted,
}
if account.CanSubmit() {
t.Errorf("Expect to be unable to submit account")
t.Fail()
}
}
func TestUnsubmitAccount(t *testing.T) {
account := &Account{
Username: "<EMAIL>",
Email: sql.NullString{},
FormType: "Dogs",
FormVersion: "2017-07",
}
account.Unsubmit()
if account.Status != StatusIncomplete {
t.Errorf("Expect to be have set account status to incomplete")
t.Fail()
}
}
func TestDefaultFormVersion(t *testing.T) {
form, err := DefaultFormVersion("SF86")
if err != nil {
t.Log("Error finding default form: ", err)
t.Fail()
}
if form != "2017-07" {
t.Errorf("Returned wrong form, expected SF86, got %v.", form)
t.Fail()
}
}
func TestDefaultFormVersionFail(t *testing.T) {
_, err := DefaultFormVersion("SF101")
if err == nil {
t.Log("Expected to fine an error, did not receive one")
t.Fail()
}
}
|
repir/repirapps | src2/main/java/util/mvIndex.java | <reponame>repir/repirapps<gh_stars>0
package util;
import org.apache.hadoop.fs.FileSystem;
import io.github.repir.Repository.Repository;
import io.github.htools.search.ByteSearch;
import io.github.htools.io.Datafile;
import io.github.htools.io.HDFSPath;
import io.github.repir.MapReduceTools.RRConfiguration;
import io.github.htools.lib.Log;
import java.io.IOException;
/**
*
* @author <NAME>
*/
public class mvIndex {
public static Log log = new Log(mvIndex.class);
public static void main(String[] args) throws IOException {
Repository repository = new Repository(args, "newindex");
RRConfiguration conf = repository.getConf();
String newname = conf.get("newindex");
FileSystem fs = repository.getFS();
Repository newrepository = new Repository(conf);
newrepository.changeName(newname);
if (newrepository.exists()) {
log.fatal("Directory %s exists, please remove", newrepository.getBaseDir().toString());
}
HDFSPath sourcedir = repository.getBaseDir();
HDFSPath destdir = newrepository.getBaseDir();
sourcedir.move(destdir);
mv(destdir, repository.getPrefix(), newrepository.getPrefix());
newrepository.writeConfiguration();
Datafile fsfilein = RRConfiguration.configfile(args[0]);
String content = fsfilein.readAsString();
ByteSearch needle = ByteSearch.create("(?<=\\W)" + repository.getPrefix() + "($|(?=\\W))");
String content1 = needle.replaceAll(content, newrepository.getPrefix());
fsfilein.close();
Datafile fsfileout = new Datafile(fsfilein.getCanonicalPath().replaceAll(repository.getPrefix(), newrepository.getPrefix()));
fsfileout.printf("%s", content1);
fsfileout.close();
}
public static void mv(HDFSPath dir, String sourceprefix, String destprefix) throws IOException {
dir.move(dir, sourceprefix + "*", destprefix + "*");
for (HDFSPath d : dir.getDirs()) {
mv(d, sourceprefix, destprefix);
}
}
}
|
bobbrow/cpp-docs | docs/parallel/concrt/codesnippet/CPP/how-to-use-a-message-block-filter_3.cpp | // primes-filter.cpp
// compile with: /EHsc
#include <agents.h>
#include <algorithm>
#include <iostream>
#include <random>
using namespace concurrency;
using namespace std;
// Determines whether the input value is prime.
bool is_prime(unsigned long n)
{
if (n < 2)
return false;
for (unsigned long i = 2; i < n; ++i)
{
if ((n % i) == 0)
return false;
}
return true;
}
// Illustrates usage of a message buffer that does not use filtering.
void count_primes(unsigned long random_seed)
{
// Holds prime numbers.
vector<unsigned long> primes;
// Adds numbers that are prime to the vector object.
transformer<unsigned long, unsigned long> t([&primes](unsigned long n) -> unsigned long
{
if (is_prime(n))
{
primes.push_back(n);
}
return n;
});
// Send random values to the message buffer.
mt19937 generator(random_seed);
for (int i = 0; i < 20; ++i)
{
send(t, static_cast<unsigned long>(generator()%10000));
}
// Receive from the message buffer the same number of times
// to ensure that the message buffer has processed each message.
for (int i = 0; i < 20; ++i)
{
receive(t);
}
// Print the prime numbers to the console.
wcout << L"The following numbers are prime: " << endl;
for(unsigned long prime : primes)
{
wcout << prime << endl;
}
}
// Illustrates usage of a message buffer that uses filtering.
void count_primes_filter(unsigned long random_seed)
{
// Accepts numbers that are prime.
transformer<unsigned long, unsigned long> t([](unsigned long n) -> unsigned long
{
// The filter function guarantees that the input value is prime.
// Return the input value.
return n;
},
nullptr,
[](unsigned long n) -> bool
{
// Filter only values that are prime.
return is_prime(n);
});
// Send random values to the message buffer.
mt19937 generator(random_seed);
size_t prime_count = 0;
for (int i = 0; i < 20; ++i)
{
if (send(t, static_cast<unsigned long>(generator()%10000)))
{
++prime_count;
}
}
// Print the prime numbers to the console.
wcout << L"The following numbers are prime: " << endl;
while (prime_count-- > 0)
{
wcout << receive(t) << endl;
}
}
int wmain()
{
const unsigned long random_seed = 99714;
wcout << L"Without filtering:" << endl;
count_primes(random_seed);
wcout << L"With filtering:" << endl;
count_primes_filter(random_seed);
/* Output:
9973
9349
9241
8893
1297
7127
8647
3229
With filtering:
The following numbers are prime:
9973
9349
9241
8893
1297
7127
8647
3229
*/
} |
Tom-CaoZH/xdoj_2021 | 2022/134.c | <filename>2022/134.c
#include<stdio.h>
int num[10];
int main(void) {
int n;
scanf("%d",&n);
int index = 0;;
while(n > 0) {
num[index] = n % 10;
index++;
n /= 10;
}
for(int i = 0;i < index; ++i) {
for(int j = i+1;j < index; ++j) {
if(num[i] < num[j]) {
int tmp = num[i];
num[i] = num[j];
num[j] = tmp;
}
}
}
for(int i = 0;i < index; ++i) {
printf("%d ",num[i]);
}
return 0;
} |
MikeOwino/blade | packages/blade-old/src/molecules/BottomSheet/__tests__/BottomSheet.native.test.js | import React from 'react';
import BottomSheet from '../BottomSheet';
import { renderWithTheme } from '../../../_helpers/testing';
import View from '../../../atoms/View';
import Text from '../../../atoms/Text';
const list = [
'Apple',
'Asus',
'Blackberry',
'Honor',
'HTC',
'Huawei',
'Lava',
'Lenovo',
'LG',
'Motorola',
'Nexus',
'Nokia',
'OnePlus',
'Oppo',
'Panasonic',
'Pixel',
'Realme',
'Samsung',
'Sony',
'Toshiba',
'Vivo',
'Xiaomi',
];
export const sections = [
{
title: 'Section1',
data: list,
},
{
title: 'Section2',
data: list,
},
];
beforeAll(() => jest.spyOn(console, 'error').mockImplementation());
afterAll(() => jest.restoreAllMocks());
describe('<BottomSheet />', () => {
let mockOnClose;
beforeEach(() => {
mockOnClose = jest.fn();
});
it('renders default BottomSheet', () => {
const { container } = renderWithTheme(<BottomSheet visible={true} onClose={mockOnClose} />);
expect(container).toMatchSnapshot();
});
it('renders BottomSheet with Header, Footer and Content Defined', () => {
const { container } = renderWithTheme(
<BottomSheet visible={true} onClose={mockOnClose}>
<BottomSheet.Header>
<View>
<Text>Header</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('renders bottomsheet when initialHeight prop is passed', () => {
const { container } = renderWithTheme(
<BottomSheet visible={true} initialHeight={300} onClose={() => {}}>
<BottomSheet.Header>
<View>
<Text>Header</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('renders linear-gradient view at bottom when content height is more than open bottomsheet height', () => {
const data = new Array(list.length).fill({}).map((item, index) => ({
id: index,
name: list[index],
}));
const { container } = renderWithTheme(
<BottomSheet visible={true} onClose={mockOnClose}>
<BottomSheet.Header>
<View>
<Text>Header</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
{data.map((item) => (
<View key={item.id}>
<Text>{item.name}</Text>
</View>
))}
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('renders non-scrollable bottomsheet', () => {
const nonScrollableList = [
'Samsung',
'Xiaomi',
'OnePlus',
'Apple',
'Vivo',
'Oppo',
'Lenovo',
'LG',
'Nokia',
'HTC',
];
const data = new Array(nonScrollableList.length).fill({}).map((item, index) => ({
id: index,
name: nonScrollableList[index],
}));
const { container } = renderWithTheme(
<BottomSheet visible={true} onClose={mockOnClose} adjustToContentHeight>
<BottomSheet.Header>
<View>
<Text>Header</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
{data.map((item) => (
<View key={item.id}>
<Text>{item.name}</Text>
</View>
))}
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('should throw error when multiple Headers are passed to bottomsheet', () => {
expect(() =>
renderWithTheme(
<BottomSheet visible={true} onClose={mockOnClose}>
<BottomSheet.Header>
<View>
<Text>Header1</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Header>
<View>
<Text>Header2</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
),
).toThrow('expected to have single `BottomSheet.Header` but found 2');
});
it('should throw error when multiple Footer are passed to bottomsheet', () => {
expect(() =>
renderWithTheme(
<BottomSheet visible={true} onClose={mockOnClose}>
<BottomSheet.Header>
<View>
<Text>Header1</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer1</Text>
</View>
</BottomSheet.Footer>
<BottomSheet.Footer>
<View>
<Text>Footer2</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
),
).toThrow('expected to have single `BottomSheet.Footer` but found 2');
});
it('should throw error when onClose method callback is not passed', () => {
expect(() =>
renderWithTheme(
<BottomSheet visible={true}>
<BottomSheet.Header>
<View>
<Text>Header1</Text>
</View>
</BottomSheet.Header>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
<BottomSheet.Footer>
<View>
<Text>Footer1</Text>
</View>
</BottomSheet.Footer>
<BottomSheet.Footer>
<View>
<Text>Footer2</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
),
).toThrow('expected onClose prop for `BottomSheet`');
});
it('renders BottomSheet with SectionList', () => {
const { container } = renderWithTheme(
<BottomSheet visible={true} initialHeight={300} onClose={() => {}}>
<BottomSheet.SectionList
sections={sections}
renderItem={({ item }) => (
<View>
<Text>{item}</Text>
</View>
)}
renderSectionHeader={({ section }) => (
<View>
<Text>{section.title}</Text>
</View>
)}
keyExtractor={(item) => item}
/>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('renders BottomSheet with SectionList and Footer', () => {
const { container } = renderWithTheme(
<BottomSheet visible={true} initialHeight={300} onClose={() => {}}>
<BottomSheet.SectionList
sections={sections}
renderItem={({ item }) => (
<View>
<Text>{item}</Text>
</View>
)}
renderSectionHeader={({ section }) => (
<View>
<Text>{section.title}</Text>
</View>
)}
keyExtractor={(item) => item}
/>
<BottomSheet.Footer>
<View>
<Text>Footer1</Text>
</View>
</BottomSheet.Footer>
</BottomSheet>,
);
expect(container).toMatchSnapshot();
});
it('should throw error when both Content and SectionList are provided', () => {
expect(() =>
renderWithTheme(
<BottomSheet visible={true} initialHeight={300} onClose={() => {}}>
<BottomSheet.SectionList
sections={sections}
renderItem={({ item }) => (
<View>
<Text>{item}</Text>
</View>
)}
renderSectionHeader={({ section }) => (
<View>
<Text>{section.title}</Text>
</View>
)}
keyExtractor={(item) => item}
/>
<BottomSheet.Content>
<View>
<Text>Bottomsheet content</Text>
</View>
</BottomSheet.Content>
</BottomSheet>,
),
).toThrow(
'expected to have one of `BottomSheet.Content or BottomSheet.SectionList` but found both',
);
});
it('should throw error when multiple SectionList are provided', () => {
expect(() =>
renderWithTheme(
<BottomSheet visible={true} initialHeight={300} onClose={() => {}}>
<BottomSheet.SectionList
sections={sections}
renderItem={({ item }) => (
<View>
<Text>{item}</Text>
</View>
)}
renderSectionHeader={({ section }) => (
<View>
<Text>{section.title}</Text>
</View>
)}
keyExtractor={(item) => item}
/>
<BottomSheet.SectionList
sections={sections}
renderItem={({ item }) => (
<View>
<Text>{item}</Text>
</View>
)}
renderSectionHeader={({ section }) => (
<View>
<Text>{section.title}</Text>
</View>
)}
keyExtractor={(item) => item}
/>
</BottomSheet>,
),
).toThrow('expected to have single `BottomSheet.SectionList` but found but found 2');
});
});
|
merj4/merj | server/seeder.js | <reponame>merj4/merj<filename>server/seeder.js
var db = require('./db.js');
//run this file node seeder.js to populate the database
//create variables for each table
var User = db.User;
var Event = db.Event;
// var Message = db.Message;
var EventParticipant = db.EventParticipant;
// Hit me with them ghetto delays to let the tables procreate - <NAME>
//wait 2000ms for the tables above to be created
setTimeout(function() {
seedData();
}, 2000);
//add data to database
var seedData = function() {
// Drop it like it's hot
console.log('Dropping and re-creating tables');
Event.sync({force: true})
.then(() => User.sync({force: true}))
// .then(() => Message.sync({force: true}))
.then(() => EventParticipant.sync({force: true}))
// Insert default events
.then(() => Event.create({
location: "Santa Barbara, CA",
date: "Jul. 11, 2017",
title: "Sandy Barbie",
time: "2:00 pm",
category: "Outdoor",
description: "Come out to the beach for some Australian BBQ",
image: "http://www.unique-canvas.com/media/images/popup/meer-und-traumstraende-fotografie--fotomotiv-beach-party--816406.jpg"
}))
.then(() => Event.create({
location: "Lake Tahoe, CA",
date: "Apr. 30, 2017",
title: "A Sunday Hike",
time: "10:00 am",
category: "Outdoor",
description: "Take a hike to the lakeside and enjoy company, food and sand castles",
image: "https://s-media-cache-ak0.pinimg.com/originals/62/f0/48/62f04865010d469b7f44dac815d916df.jpg"
}))
.then(() => Event.create({
location: "Berkeley, CA",
date: "May. 6, 2017",
title: "Berkeley Babes",
time: "4:00 pm",
category: "Outdoor",
description: "Get rekt. Hype. Bonfire. Beach. Drinks. Food.",
image: "http://media.new.mensxp.com/media/content/2015/May/wildestbeachpartydestinationsintheworldforsinglemen0_1431689937_980x457.jpg"
}))
.then(() => Event.create({
location: "Santa Monica, CA",
date: "May. 5, 2017",
title: "Arcade Game Night",
time: "8:00 pm",
category: "Outdoor",
description: "Release your inner child and let loose on your favorite arcade games!",
image: "http://i.huffpost.com/gen/1556614/images/o-SANTA-MONICA-facebook.jpg"
}))
.then(() => Event.create({
location: "San Francisco, CA",
date: "May. 5, 2017",
title: "Cinco de San Fun-cisco",
time: "5:00 pm",
category: "Outdoor",
description: "Celebrate cerveza, tequila and burritos!",
image: "http://cdn.sosueme.ie/wp-content/uploads/2016/01/1_best_party_beach_South_Beach_Miami_Florida-e1399908931396.jpg"
}))
.then(() => Event.create({
location: "Malibu, CA",
date: "June. 10, 2017",
title: "Be free, be young, be Malibu",
time: "3:00 pm",
category: "Outdoor",
description: "Be free, be young, be Malibu.",
image: "http://www.winterparty.com/sites/www.winterparty.com/files/styles/project_1126_470/public/BeachPartySlider1_0.jpg?itok=5eB_59TL"
}))
.then(() => Event.create({
location: "San Luis Obispo, CA",
date: "Aug. 10, 2017",
title: "Sandy in the S.L.O. Cali",
time: "3:00 pm",
category: "Outdoor",
description: "Get sandy with it",
image: "http://www.loadednightclub.co.uk/wp-content/uploads/sites/2/2015/05/4dadc304234cad1821315e885c04c01d.jpg"
}))
.then(() => Event.create({
location: "Oakland, CA",
date: "Aug. 10, 2017",
title: "Oakland First Fridays",
time: "6:00 pm",
category: "Outdoor",
description: "TGIF BABES.",
image: "https://bryanallo.files.wordpress.com/2013/04/9q1a6291_blog.jpg"
}))
.then(() => Event.create({
location: "San Diego, CA",
date: "May. 15, 2017",
title: "Spend the day drinking a mai tai",
time: "2:00 pm",
category: "Outdoor",
description: "Come out and play this Friday night. Meet new friends!",
image: "https://s-media-cache-ak0.pinimg.com/originals/55/25/fe/5525fed7f7af445653e3973b24be7559.jpg"
}))
.then(() => Event.create({
location: "Yosemite, CA",
date: "Aug. 10, 2017",
title: "Bouldering and Hot Springs",
time: "1:00 pm",
category: "Outdoor",
description: "Climb a rock. Don't get recked.",
image: "https://cdn0.tnwcdn.com/wp-content/blogs.dir/1/files/2014/10/yosemite.jpg"
}))
// Insert some users
.then(() => User.create({ username: "Test User", email: "<EMAIL>:", image: "http://www.cutestpaw.com/wp-content/uploads/2013/12/Most-Famous-Felines-001.jpg"}))
// Create joins
.then(() => EventParticipant.create({ UserId: 1, EventId: 1 }));
// .then(() => Language.create({ name: 'html', displayname: 'HTML' }))
// .then(() => Language.create({ name: 'javascript', displayname: 'Javascript' }))
// .then(() => Language.create({ name: 'json', displayname: 'JSON' })) //5
// .then(() => Language.create({ name: 'jsx', displayname: 'JSX' }))
// .then(() => Language.create({ name: 'markdown', displayname: 'Markdown' }))
// .then(() => Language.create({ name: 'text', displayname: 'Plain Text' }))
// .then(() => Language.create({ name: 'pgsql', displayname: 'PostgreSQL' }))
// .then(() => Language.create({ name: 'python', displayname: 'Python' }))
// .then(() => Language.create({ name: 'sass', displayname: 'Sass' })) //10
// .then(() => Language.create({ name: 'scss', displayname: 'SCSS' }))
// .then(() => Language.create({ name: 'sql', displayname: 'SQL' }))
// .then(() => Language.create({ name: 'typescript', displayname: 'Typescript' }))
// .then(() => Language.create({ name: 'xml', displayname: 'XML' })); //15
// Insert dummy snippets and code samples directly after each snippet
// .then(() =>
// Snippet.create({
// title: "Welcome!",
// snippet: JSON.stringify("var x = \"hello\";\n\nvar print = function () {\n console.log(x);\n};"),
// "shortDescription": "Dummy shortDescription 1",
// explanation: JSON.stringify("Sample Explanation"),
// "TopicId": 3,
// "LanguageId": 4
// }).then(function (snippet) {
// SnippetTag.create({ SnippetId: snippet.id, TagId: 7 });
// }))
// .then(() =>
// CodeSample.create({
// "codeSample": JSON.stringify("Test code sample 1"),
// "SnippetId": 1
// }));
}; |
PrettyCoolWeb/PrettCoolWeb | src/Pages/Dashboards/Shop/ArticleMeta.js | import { join } from 'path'
import React from 'react'
import { Link } from 'react-navi'
import { formatDate } from './utils/formats'
import styles from './ArticleMeta.module.css'
function ArticleMeta({ blogRoot, meta, readingTime }) {
let readingTimeElement
if (readingTime) {
let minutes = Math.max(Math.round(readingTime.minutes), 1)
let cups = Math.round(minutes / 5);
readingTimeElement =
<React.Fragment>
{' '}•{' '}
<span className={styles.readingTime}>
{new Array(cups || 1).fill('☕️').join('')} {minutes} min read
</span>
</React.Fragment>
}
return (
<small className={styles.ArticleMeta}>
<time dateTime={meta.date.toUTCString()}>{formatDate(meta.date)}</time>
{
meta.tags &&
meta.tags.length &&
<>
{' '}•{' '}
<ul className={styles.tags}>
{meta.tags.map(tag =>
<li key={tag}>
<Link href={join(blogRoot, 'tags', tag)}>{tag}</Link>
</li>
)}
</ul>
</>
}
{readingTimeElement || null}
</small>
)
}
export default ArticleMeta |
talitadeoa/CEV-Exercicios-Python | Exercicios-mundo-2/desafio057.py | #Um programa que lê o sexo de uma pessoa e só aceita os valores M ou F
sexo = (input('Qual seu sexo? [M/F] ')).upper().strip()[0]
while sexo not in 'MF':
sexo = (input('Dados inválidos, responda novamente [M/F] ')).upper().strip()[0]
if sexo == 'F':
sexo = 'feminino'
else:
sexo = 'masculino'
print(f'Sexo {sexo} registrado com sucesso') |
maurizioabba/rose | tests/CompileTests/ElsaTestCases/t0080.cc | <reponame>maurizioabba/rose
// cc.in80
// reproduce ASTTypeId ambiguity
int f(int);
int g(int);
int func(int param)
{
// unambiguous
//int y(f(param));
// ambiguous
int x(f(g(param)));
}
// different way of seeing it
typedef int x;
typedef int y;
typedef int z;
int h(int /*anon*/(x /*anon*/(y /*anon*/)));
|
deepti1408/CodingInterview | java/merge.java | <reponame>deepti1408/CodingInterview<gh_stars>10-100
class Solution {
public void merge(int[] nums1, int m, int[] nums2, int n) {
int i = m-1, j=n-1, k = m+n-1;
while(i >=0 && j >=0)
{
if(nums1[i] > nums2[j])
nums1[k--] = nums1[i--];
else
nums1[k--] = nums2[j--];
}
while(j >= 0)
nums1[k--] = nums2[j--];
}
}
|
FJplant/AntIDE | src/com/antsoft/ant/wizard/generalwizard/FireEventPanel.java | <gh_stars>10-100
/*
* $Id: FireEventPanel.java,v 1.2 1999/08/19 05:25:07 multipia Exp $
* Ant ( JDK wrapper Java IDE )
* Version 1.0
* Copyright (c) 1998-1999 Antsoft Co. All rights reserved.
* This program and source file is protected by Korea and international
* Copyright laws.
*
* $Revision: 1.2 $
*/
package com.antsoft.ant.wizard.generalwizard;
import java.awt.*;
import javax.swing.*;
import javax.swing.border.*;
import java.awt.event.*;
import java.util.*;
import com.antsoft.ant.util.*;
public class FireEventPanel extends JPanel {
JScrollPane eventPane;
BlackTitledBorder border = new BlackTitledBorder("Select events to fire");
//JLabel lbl1 = new JLabel("Select events to fire.");
JCheckBox action = new JCheckBox("Action");
JCheckBox adjustment = new JCheckBox("Adjustment");
JCheckBox component = new JCheckBox("Component");
JCheckBox container = new JCheckBox("Container");
JCheckBox focus = new JCheckBox("Focus");
JCheckBox item = new JCheckBox("Item");
JCheckBox key = new JCheckBox("Key");
JCheckBox mouse = new JCheckBox("Mouse");
JCheckBox mouseMotion = new JCheckBox("MouseMotion");
JCheckBox text = new JCheckBox("Text");
JCheckBox window = new JCheckBox("Window");
JCheckBox ancestor = new JCheckBox("Ancestor");
JCheckBox caret = new JCheckBox("Caret");
JCheckBox cellEditor = new JCheckBox("CellEditor");
JCheckBox change = new JCheckBox("Change");
JCheckBox document = new JCheckBox("Document");
JCheckBox hyperlink = new JCheckBox("Hyperlink");
JCheckBox internalF = new JCheckBox("InternalFrame");
JCheckBox listData = new JCheckBox("ListData");
JCheckBox listS = new JCheckBox("ListSelection");
JCheckBox menu = new JCheckBox("Menu");
JCheckBox popupMenu = new JCheckBox("PopupMenu");
JCheckBox tableCM = new JCheckBox("TableColumnModel");
JCheckBox tableM = new JCheckBox("TableModel");
JCheckBox treeE = new JCheckBox("TreeExpansion");
JCheckBox treeM = new JCheckBox("TreeModel");
JCheckBox treeS = new JCheckBox("TreeSelection");
JCheckBox undo = new JCheckBox("UndoableEdit");
public FireEventPanel() {
try {
jbInit();
}
catch (Exception ex) {
ex.printStackTrace();
}
}
void jbInit() throws Exception {
JLabel lbl2 = new JLabel("AWT Events(java.awt.event.*)");
JLabel lbl3 = new JLabel("Swing Events(javax.swing.event.*)" );
Box box = Box.createVerticalBox();
box.add(lbl2);
action.setBackground(Color.white);
box.add(action);
adjustment.setBackground(Color.white);
box.add(adjustment);
component.setBackground(Color.white);
box.add(component);
container.setBackground(Color.white);
box.add(container);
focus.setBackground(Color.white);
box.add(focus);
item.setBackground(Color.white);
box.add(item);
key.setBackground(Color.white);
box.add(key);
mouse.setBackground(Color.white);
box.add(mouse);
mouseMotion.setBackground(Color.white);
box.add(mouseMotion);
text.setBackground(Color.white);
box.add(text);
window.setBackground(Color.white);
box.add(window);
box.add(lbl3);
ancestor.setBackground(Color.white);
box.add(ancestor);
caret.setBackground(Color.white);
box.add(caret);
cellEditor.setBackground(Color.white);
box.add(cellEditor);
change.setBackground(Color.white);
box.add(change);
document.setBackground(Color.white);
box.add(document);
hyperlink.setBackground(Color.white);
box.add(hyperlink);
internalF.setBackground(Color.white);
box.add(internalF);
listData.setBackground(Color.white);
box.add(listData);
listS.setBackground(Color.white);
box.add(listS);
menu.setBackground(Color.white);
box.add(menu);
popupMenu.setBackground(Color.white);
box.add(popupMenu);
tableCM.setBackground(Color.white);
box.add(tableCM);
tableM.setBackground(Color.white);
box.add(tableM);
treeE.setBackground(Color.white);
box.add(treeE);
treeM.setBackground(Color.white);
box.add(treeM);
treeS.setBackground(Color.white);
box.add(treeS);
undo.setBackground(Color.white);
box.add(undo);
eventPane = new JScrollPane(box);
eventPane.setPreferredSize( new Dimension(330,185) );
eventPane.createVerticalScrollBar();
eventPane.setRowHeaderView( new JLabel(" ") );
JPanel p3 = new JPanel();
p3.setLayout(new BorderLayout());
p3.setBackground(Color.white);
p3.add(eventPane,BorderLayout.CENTER);
JPanel p2 = new JPanel();
p2.setLayout( new FlowLayout(FlowLayout.CENTER) );
p2.setBorder(border);
p2.add(p3);
setLayout( new GridLayout(1,1) );
add(p2);
}
}
|
ilya-markevich/node-mapper | tests/unit/baseMapInstance.js | 'use strict';
require('should');
const sinon = require('sinon');
const TypeWrapper = require('../../src/typeWrapper');
const MapInstance = require('../../src/mapInstances/base');
const testData = require('./data/mapInstance');
function checkInitialState(mapInstance, convention) {
(mapInstance.sourceType instanceof TypeWrapper).should.be.eql(true);
(mapInstance.destType instanceof TypeWrapper).should.be.eql(true);
mapInstance.mapInfo.should.have.property('size', 0);
mapInstance.should.have.property('convention', convention);
(mapInstance.convertCb === null).should.be.eql(true);
}
describe('Base Map Instance', () => {
describe('Initial state', () => {
it('should set correct initial state with passed callback', () => {
const { sourceType, destType, convention } = testData;
const configCb = sinon.mock().once();
const mapInstance = new MapInstance(convention, sourceType, destType, configCb);
checkInitialState(mapInstance, convention);
configCb.verify();
});
it('should set correct initial state without passed callback', () => {
const { sourceType, destType, convention } = testData;
const mapInstance = new MapInstance(convention, sourceType, destType);
checkInitialState(mapInstance, convention);
});
});
describe('#mapField', () => {
it('should set mapping for field', () => {
const {
convention, sourceType, destType, configCb, mapFieldName, mapFieldValue
} = testData;
const mapInstance = new MapInstance(convention, sourceType, destType, configCb);
mapInstance.mapField(mapFieldName, mapFieldValue);
mapInstance.mapInfo.get(mapFieldName).should.be.eql(mapFieldValue);
});
});
describe('#ignoreField', () => {
it('should set mapping to ignore the field', () => {
const { convention, sourceType, destType, configCb, fieldToIgnore } = testData;
const mapInstance = new MapInstance(convention, sourceType, destType, configCb);
mapInstance.ignoreField(fieldToIgnore);
(mapInstance.mapInfo.get(fieldToIgnore) === null).should.be.eql(true);
});
});
describe('#mapFieldByPath', () => {
it('should set field mapping by path', () => {
const {
convention, sourceType, destType, configCb, mapFieldName, fieldPath
} = testData;
const mapInstance = new MapInstance(convention, sourceType, destType, configCb);
mapInstance.mapFieldByPath(mapFieldName, fieldPath);
(typeof mapInstance.mapInfo.get(mapFieldName)).should.be.eql('function');
});
});
describe('#convert', () => {
it('should set convert function for map instance', () => {
const { convention, sourceType, destType, configCb, convertCb } = testData;
const mapInstance = new MapInstance(convention, sourceType, destType, configCb);
mapInstance.convert(convertCb);
mapInstance.convertCb.should.be.eql(convertCb);
});
});
}); |
RCXcrafter/Materialis | src/main/java/com/rcx/materialis/util/ColorizerModifierRecipe.java | <reponame>RCXcrafter/Materialis
package com.rcx.materialis.util;
import java.util.List;
import javax.annotation.Nullable;
import com.google.common.collect.ImmutableList;
import com.google.gson.JsonObject;
import com.rcx.materialis.MaterialisResources;
import com.rcx.materialis.modifiers.ColorizedModifier;
import net.minecraft.item.Item;
import net.minecraft.item.crafting.IRecipeSerializer;
import net.minecraft.item.crafting.Ingredient;
import net.minecraft.network.PacketBuffer;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.fml.ModList;
import slimeknights.mantle.recipe.SizedIngredient;
import slimeknights.mantle.util.JsonHelper;
import slimeknights.tconstruct.library.modifiers.ModifierEntry;
import slimeknights.tconstruct.library.recipe.modifiers.ModifierMatch;
import slimeknights.tconstruct.library.recipe.modifiers.adding.AbstractModifierRecipe;
import slimeknights.tconstruct.library.recipe.modifiers.adding.ModifierRecipe;
import slimeknights.tconstruct.library.recipe.tinkerstation.ITinkerStationInventory;
import slimeknights.tconstruct.library.recipe.tinkerstation.ValidatedResult;
import slimeknights.tconstruct.library.tools.SlotType.SlotCount;
import slimeknights.tconstruct.library.tools.nbt.ModDataNBT;
import slimeknights.tconstruct.library.tools.nbt.ToolStack;
import vazkii.psi.api.cad.ICADColorizer;
public class ColorizerModifierRecipe extends ModifierRecipe {
boolean enabled = ModList.get().isLoaded("psi");
private final List<SizedIngredient> inputs;
public ColorizerModifierRecipe(ResourceLocation id, List<SizedIngredient> inputs, Ingredient toolRequirement, ModifierMatch requirements, String requirementsError, ModifierEntry result, int maxLevel, @Nullable SlotCount slots) {
super(id, inputs, toolRequirement, requirements, requirementsError, result, maxLevel, slots);
this.inputs = inputs;
}
@Override
public ValidatedResult getValidatedResult(ITinkerStationInventory inv) {
ToolStack tool = ToolStack.from(inv.getTinkerableStack());
// common errors
ValidatedResult commonError = validatePrerequisites(tool);
if (commonError.hasError()) {
return commonError;
}
// consume slots
tool = tool.copy();
ModDataNBT persistentData = tool.getPersistentData();
SlotCount slots = getSlots();
if (slots != null) {
persistentData.addSlots(slots.getType(), -slots.getCount());
}
// add modifier
tool.addModifier(result.getModifier(), result.getLevel());
// ensure no modifier problems
ValidatedResult toolValidation = tool.validate();
if (toolValidation.hasError()) {
return toolValidation;
}
//add colorizer information
if (enabled) {
for (int i = 0; i < inv.getInputCount(); i++) {
Item item = inv.getInput(i).getItem();
if (item instanceof ICADColorizer) {
persistentData.put(ColorizedModifier.COLORIZER, inv.getInput(i).serializeNBT());
break;
}
}
}
return ValidatedResult.success(tool.createStack());
}
@Override
public IRecipeSerializer<?> getSerializer() {
return MaterialisResources.colorizerModifierSerializer.get();
}
public static class Serializer extends AbstractModifierRecipe.Serializer<ColorizerModifierRecipe> {
@Override
public ColorizerModifierRecipe read(ResourceLocation id, JsonObject json, Ingredient toolRequirement, ModifierMatch requirements,
String requirementsError, ModifierEntry result, int maxLevel, @Nullable SlotCount slots) {
List<SizedIngredient> ingredients = JsonHelper.parseList(json, "inputs", SizedIngredient::deserialize);
return new ColorizerModifierRecipe(id, ingredients, toolRequirement, requirements, requirementsError, result, maxLevel, slots);
}
@Override
public ColorizerModifierRecipe read(ResourceLocation id, PacketBuffer buffer, Ingredient toolRequirement, ModifierMatch requirements, String requirementsError, ModifierEntry result, int maxLevel, @Nullable SlotCount slots) {
int size = buffer.readVarInt();
ImmutableList.Builder<SizedIngredient> builder = ImmutableList.builder();
for (int i = 0; i < size; i++) {
builder.add(SizedIngredient.read(buffer));
}
return new ColorizerModifierRecipe(id, builder.build(), toolRequirement, requirements, requirementsError, result, maxLevel, slots);
}
@Override
@Deprecated
public ColorizerModifierRecipe read(ResourceLocation id, JsonObject json, Ingredient toolRequirement, ModifierMatch requirements, String requirementsError, ModifierEntry result, int maxLevel, int upgradeSlots, int abilitySlots) {
throw new UnsupportedOperationException();
}
@Override
@Deprecated
public ColorizerModifierRecipe read(ResourceLocation id, PacketBuffer buffer, Ingredient toolRequirement, ModifierMatch requirements, String requirementsError, ModifierEntry result, int maxLevel, int upgradeSlots, int abilitySlots) {
throw new UnsupportedOperationException();
}
@Override
protected void writeSafe(PacketBuffer buffer, ColorizerModifierRecipe recipe) {
super.writeSafe(buffer, recipe);
buffer.writeVarInt(recipe.inputs.size());
for (SizedIngredient ingredient : recipe.inputs) {
ingredient.write(buffer);
}
}
@Override
public ColorizerModifierRecipe fromJson(ResourceLocation id, JsonObject json) {
return super.read(id, json);
}
}
}
|
rofl0r/pspsdk | src/libcglue/socket.c | <reponame>rofl0r/pspsdk
/*
* PSP Software Development Kit - http://www.pspdev.org
* -----------------------------------------------------------------------
* Licensed under the BSD license, see LICENSE in PSPSDK root for details.
*
* socket.c - Socket wrappers to provide similar functions to normal unix
*
* Copyright (c) 2005 <NAME> <<EMAIL>>
* Copyright (c) 2005 <NAME> <<EMAIL>>
*
*/
#include <string.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/socket.h>
#include <sys/select.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <psptypes.h>
#include <pspnet_inet.h>
#include "fdman.h"
#ifdef F_socket
int socket(int domain, int type, int protocol)
{
int sock, scesock;
scesock = sceNetInetSocket(domain, type, protocol);
if(scesock < 0) {
errno = sceNetInetGetErrno();
return -1;
}
sock = __fdman_get_new_descriptor();
if( sock != -1 ) {
__descriptormap[sock]->descriptor = scesock;
__descriptormap[sock]->type = __DESCRIPTOR_TYPE_SOCKET;
}
else {
sceNetInetClose(scesock);
errno = ENOENT;
return -1;
}
return sock;
}
#endif
/* These are glue routines that are called from _close(), _read(), and
_write(). They are here so that any program that uses socket() will pull
them in and have expanded socket capability. */
#ifdef F___socket_close
int __socket_close(int sock)
{
int ret = 0;
if (__descriptormap[sock]->ref_count == 1) {
ret = sceNetInetClose(__descriptormap[sock]->descriptor);
}
__fdman_release_descriptor(sock);
if(ret < 0)
{
/* If close is defined likely errno is */
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_accept
int accept(int s, struct sockaddr *addr, socklen_t *addrlen)
{
int newscesock, newsock;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
newscesock = sceNetInetAccept(__descriptormap[s]->descriptor, addr, addrlen);
if( (newscesock >= 0) ) {
newsock = __fdman_get_new_descriptor();
if ( newsock != -1 ) {
__descriptormap[newsock]->descriptor = newscesock;
__descriptormap[newsock]->type = __DESCRIPTOR_TYPE_SOCKET;
}
else {
sceNetInetClose(newscesock);
errno = ENOENT;
return -1;
}
}
else {
errno = ENOENT;
return -1;
}
return newsock;
}
#endif
#ifdef F_bind
int bind(int s, const struct sockaddr *my_addr, socklen_t addrlen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetBind(__descriptormap[s]->descriptor, my_addr, addrlen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_connect
int connect(int s, const struct sockaddr *serv_addr, socklen_t addrlen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetConnect(__descriptormap[s]->descriptor, serv_addr, addrlen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_listen
int listen(int s, int backlog)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetListen(__descriptormap[s]->descriptor, backlog);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_recv
ssize_t recv(int s, void *buf, size_t len, int flags)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetRecv(__descriptormap[s]->descriptor, buf, len, flags);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return ret;
}
#endif
#ifdef F_recvfrom
ssize_t recvfrom(int s, void *buf, size_t len, int flags, struct sockaddr *from, socklen_t *fromlen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetRecvfrom(__descriptormap[s]->descriptor, buf, len, flags, from, fromlen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return ret;
}
#endif
#ifdef F_send
ssize_t send(int s, const void *buf, size_t len, int flags)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetSend(__descriptormap[s]->descriptor, buf, len, flags);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return ret;
}
#endif
#ifdef F_sendto
ssize_t sendto(int s, const void *buf, size_t len, int flags, const struct sockaddr *to, socklen_t tolen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetSendto(__descriptormap[s]->descriptor, buf, len, flags, to, tolen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return ret;
}
#endif
#ifdef F_getsockopt
int getsockopt(int s, int level, int optname, void *optval, socklen_t *optlen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetGetsockopt(__descriptormap[s]->descriptor, level, optname, optval, optlen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_setsockopt
int setsockopt(int s, int level, int optname, const void *optval, socklen_t optlen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetSetsockopt(__descriptormap[s]->descriptor, level, optname, optval, optlen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
else
{
if ( (level == SOL_SOCKET) && (optname == SO_NONBLOCK) ) {
if (*((int*)optval) == 1) {
__descriptormap[s]->flags |= O_NONBLOCK;
}
else {
__descriptormap[s]->flags &= ~O_NONBLOCK;
}
}
}
return 0;
}
#endif
#ifdef F_shutdown
int shutdown(int s, int how)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetShutdown(__descriptormap[s]->descriptor, how);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_getpeername
int getpeername(int s, struct sockaddr *name, socklen_t *namelen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetGetpeername(__descriptormap[s]->descriptor, name, namelen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_getsockname
int getsockname(int s, struct sockaddr *name, socklen_t *namelen)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetGetsockname(__descriptormap[s]->descriptor, name, namelen);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_inet_ntoa
char *inet_ntoa(struct in_addr in)
{
static char ip_addr[INET_ADDRSTRLEN+1];
if(sceNetInetInetNtop(AF_INET, &in, ip_addr, INET_ADDRSTRLEN) == NULL)
{
strcpy(ip_addr, "Invalid");
}
return ip_addr;
}
#endif
#ifdef F_sendmsg
ssize_t sendmsg(int s, const struct msghdr *msg, int flags)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetSendmsg(__descriptormap[s]->descriptor, msg, flags);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif
#ifdef F_recvmsg
ssize_t recvmsg(int s, struct msghdr *msg, int flags)
{
int ret;
if (!__IS_FD_OF_TYPE(s, __DESCRIPTOR_TYPE_SOCKET)) {
errno = EBADF;
return -1;
}
ret = sceNetInetRecvmsg(__descriptormap[s]->descriptor, msg, flags);
if(ret < 0)
{
errno = sceNetInetGetErrno();
return -1;
}
return 0;
}
#endif |
FlaviuVadan/sick-fits | frontend/components/DeleteItem.js | import React, { Component } from 'react';
import { Mutation } from 'react-apollo';
import { ALL_ITEMS_QUERY, DELETE_ITEM_MUTATION } from '../queries/queries';
class DeleteItem extends Component {
/**
* Page update method
* @param cache - Apollo cache access
* @param payload - deleted item, returned by Apollo
*/
update = (cache, payload) => {
// manually update cache on client side so it matches server
// read the cache for the item we want (search)
const data = cache.readQuery({ query: ALL_ITEMS_QUERY });
// filter deleted item out of page cache
data.items = data.items.filter(item => item.id !== payload.data.deleteItem.id);
// place items back
cache.writeQuery({ query: ALL_ITEMS_QUERY, data });
};
render() {
return (
<Mutation mutation={DELETE_ITEM_MUTATION} variables={{ id: this.props.id }} update={this.update}>
{(deleteItem, { error }) => (
<button onClick={() => {
if (confirm('Are you sure you want to delete this?')) {
// deleteItem is a Promise, can catch and display errors from it accordingly
deleteItem().catch(err => {
alert(err.message);
});
}
}}>{this.props.children}</button>
)}
</Mutation>
);
}
}
export default DeleteItem; |
luxe/CodeLang-compiler | source/code/programs/transcompilers/enum_cpp/main.cc | <filename>source/code/programs/transcompilers/enum_cpp/main.cc
#include <iostream>
#include <string>
#include "code/programs/transcompilers/enum_cpp/enum_cpp_maker.hpp"
#include "code/programs/transcompilers/enum_cpp/enum.hpp"
#include "code/programs/transcompilers/enum_cpp/program_options/program_options_creator.hpp"
int main(int argc, char** argv){
//get program options
auto po = Program_Options_Creator::Create(argc,argv);
//build enum
Enum e;
e.name = po.Enum_Name();
e.values = po.Enum_Values();
//store it
std::vector<Enum> enums;
enums.emplace_back(e);
auto path_name = po.Enum_Name();
if (!po.Output_Path().empty()){
path_name = po.Output_Path() + "/" + path_name;
}
//build it
Enum_Cpp_Maker::Create_Enums_Header(path_name,enums);
Enum_Cpp_Maker::Create_Enums_Source(path_name,enums);
}
|
gmbarroso/pilot | packages/cockpit/scripts/test.js | <filename>packages/cockpit/scripts/test.js<gh_stars>1-10
const path = require('path')
const createJestConfig = require('../config/jest/createJestConfig')
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'test'
process.env.NODE_ENV = 'test'
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
process.on('unhandledRejection', (err) => {
throw err
})
const jest = require('jest') // eslint-disable-line
const argv = process.argv.slice(2)
// Watch unless on CI or in coverage mode
if (!process.env.CI && argv.indexOf('--coverage') < 0) {
argv.push('--watch')
}
argv.push(
'--config',
JSON.stringify(createJestConfig(
relativePath => path.resolve(__dirname, '..', relativePath),
path.resolve(__dirname, '..')
))
)
jest.run(argv)
|
nchandrappa/cassandra-java-driver | driver-core/src/test/java/com/datastax/driver/core/HostConnectionPoolMultiTest.java | /*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import static com.datastax.driver.core.Assertions.assertThat;
import static com.datastax.driver.core.HostDistance.LOCAL;
import static com.datastax.driver.core.TestUtils.nonDebouncingQueryOptions;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.scassandra.http.client.ClosedConnectionReport.CloseType.CLOSE;
import com.datastax.driver.core.policies.ConstantReconnectionPolicy;
import com.google.common.util.concurrent.Uninterruptibles;
import java.net.InetSocketAddress;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class HostConnectionPoolMultiTest {
private ScassandraCluster scassandra;
private Cluster cluster;
@BeforeMethod(groups = {"short", "long"})
private void setUp() {
scassandra = ScassandraCluster.builder().withNodes(2).build();
scassandra.init();
}
@AfterMethod(
groups = {"short", "long"},
alwaysRun = true)
private void tearDown() {
if (cluster != null) {
cluster.close();
}
scassandra.stop();
}
private void createCluster(int core, int max) {
PoolingOptions poolingOptions = new PoolingOptions().setConnectionsPerHost(LOCAL, core, max);
SocketOptions socketOptions = new SocketOptions().setReadTimeoutMillis(1000);
cluster =
Cluster.builder()
.addContactPoints(scassandra.address(1).getAddress())
.withPort(scassandra.getBinaryPort())
.withQueryOptions(nonDebouncingQueryOptions())
.withPoolingOptions(poolingOptions)
.withSocketOptions(socketOptions)
.withReconnectionPolicy(new ConstantReconnectionPolicy(1000))
.build();
cluster.connect();
}
/**
* Ensures that if all connections fail to a host on pool init that the host is marked down.
*
* @jira_ticket JAVA-544
* @test_category connection:connection_pool
* @since 2.0.11
*/
@Test(groups = "short")
public void should_mark_host_down_if_all_connections_fail_on_init() {
// Prevent any connections on node 2.
scassandra.node(2).currentClient().disableListener();
createCluster(8, 8);
// Node 2 should be in a down state while node 1 stays up.
assertThat(cluster).host(2).goesDownWithin(10, SECONDS);
assertThat(cluster).host(1).isUp();
// Node 2 should come up as soon as it is able to reconnect.
scassandra.node(2).currentClient().enableListener();
assertThat(cluster).host(2).comesUpWithin(2, SECONDS);
}
/**
* Ensures that if the control connection goes down, but the Host bound the control connection
* still has an up pool, the Host should remain up and the Control Connection should be replaced.
*
* @jira_ticket JAVA-544
* @test_category connection:connection_pool
* @since 2.0.11
*/
@Test(groups = "short")
public void should_replace_control_connection_if_it_goes_down_but_host_remains_up() {
createCluster(1, 2);
// Ensure control connection is on node 1.
assertThat(cluster).usesControlHost(1);
// Identify the socket associated with the control connection.
Connection controlConnection = cluster.manager.controlConnection.connectionRef.get();
InetSocketAddress controlSocket = (InetSocketAddress) controlConnection.channel.localAddress();
// Close the control connection.
scassandra.node(1).currentClient().closeConnection(CLOSE, controlSocket);
// Sleep reconnect interval * 2 to allow time to reconnect.
Uninterruptibles.sleepUninterruptibly(2, SECONDS);
// Ensure the control connection was replaced and host 1 remains up.
assertThat(cluster).hasOpenControlConnection().host(1).isUp();
assertThat(cluster.manager.controlConnection.connectionRef.get())
.isNotEqualTo(controlConnection);
}
}
|
noisecode3/DPF | distrho/extra/ScopedPointer.hpp | /*
* DISTRHO Plugin Framework (DPF)
* Copyright (C) 2012-2016 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any purpose with
* or without fee is hereby granted, provided that the above copyright notice and this
* permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
* NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
* DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef DISTRHO_SCOPED_POINTER_HPP_INCLUDED
#define DISTRHO_SCOPED_POINTER_HPP_INCLUDED
#include "../DistrhoUtils.hpp"
#include <algorithm>
START_NAMESPACE_DISTRHO
// -----------------------------------------------------------------------
// The following code was based from juce-core ScopedPointer class
// Copyright (C) 2013 Raw Material Software Ltd.
//==============================================================================
/**
This class holds a pointer which is automatically deleted when this object goes
out of scope.
Once a pointer has been passed to a ScopedPointer, it will make sure that the pointer
gets deleted when the ScopedPointer is deleted. Using the ScopedPointer on the stack or
as member variables is a good way to use RAII to avoid accidentally leaking dynamically
created objects.
A ScopedPointer can be used in pretty much the same way that you'd use a normal pointer
to an object. If you use the assignment operator to assign a different object to a
ScopedPointer, the old one will be automatically deleted.
A const ScopedPointer is guaranteed not to lose ownership of its object or change the
object to which it points during its lifetime. This means that making a copy of a const
ScopedPointer is impossible, as that would involve the new copy taking ownership from the
old one.
If you need to get a pointer out of a ScopedPointer without it being deleted, you
can use the release() method.
Something to note is the main difference between this class and the std::auto_ptr class,
which is that ScopedPointer provides a cast-to-object operator, wheras std::auto_ptr
requires that you always call get() to retrieve the pointer. The advantages of providing
the cast is that you don't need to call get(), so can use the ScopedPointer in pretty much
exactly the same way as a raw pointer. The disadvantage is that the compiler is free to
use the cast in unexpected and sometimes dangerous ways - in particular, it becomes difficult
to return a ScopedPointer as the result of a function. To avoid this causing errors,
ScopedPointer contains an overloaded constructor that should cause a syntax error in these
circumstances, but it does mean that instead of returning a ScopedPointer from a function,
you'd need to return a raw pointer (or use a std::auto_ptr instead).
*/
template<class ObjectType>
class ScopedPointer
{
public:
//==============================================================================
/** Creates a ScopedPointer containing a null pointer. */
ScopedPointer() noexcept
: object(nullptr) {}
/** Creates a ScopedPointer that owns the specified object. */
ScopedPointer(ObjectType* const objectToTakePossessionOf) noexcept
: object(objectToTakePossessionOf) {}
/** Creates a ScopedPointer that takes its pointer from another ScopedPointer.
Because a pointer can only belong to one ScopedPointer, this transfers
the pointer from the other object to this one, and the other object is reset to
be a null pointer.
*/
ScopedPointer(ScopedPointer& objectToTransferFrom) noexcept
: object(objectToTransferFrom.object)
{
objectToTransferFrom.object = nullptr;
}
/** Destructor.
This will delete the object that this ScopedPointer currently refers to.
*/
~ScopedPointer()
{
delete object;
}
/** Changes this ScopedPointer to point to a new object.
Because a pointer can only belong to one ScopedPointer, this transfers
the pointer from the other object to this one, and the other object is reset to
be a null pointer.
If this ScopedPointer already points to an object, that object
will first be deleted.
*/
ScopedPointer& operator=(ScopedPointer& objectToTransferFrom)
{
if (this != objectToTransferFrom.getAddress())
{
// Two ScopedPointers should never be able to refer to the same object - if
// this happens, you must have done something dodgy!
DISTRHO_SAFE_ASSERT_RETURN(object == nullptr || object != objectToTransferFrom.object, *this);
ObjectType* const oldObject = object;
object = objectToTransferFrom.object;
objectToTransferFrom.object = nullptr;
delete oldObject;
}
return *this;
}
/** Changes this ScopedPointer to point to a new object.
If this ScopedPointer already points to an object, that object
will first be deleted.
The pointer that you pass in may be a nullptr.
*/
ScopedPointer& operator=(ObjectType* const newObjectToTakePossessionOf)
{
if (object != newObjectToTakePossessionOf)
{
ObjectType* const oldObject = object;
object = newObjectToTakePossessionOf;
delete oldObject;
}
return *this;
}
//==============================================================================
/** Returns the object that this ScopedPointer refers to. */
operator ObjectType*() const noexcept { return object; }
/** Returns the object that this ScopedPointer refers to. */
ObjectType* get() const noexcept { return object; }
/** Returns the object that this ScopedPointer refers to. */
ObjectType& getObject() const noexcept { return *object; }
/** Returns the object that this ScopedPointer refers to. */
ObjectType& operator*() const noexcept { return *object; }
/** Lets you access methods and properties of the object that this ScopedPointer refers to. */
ObjectType* operator->() const noexcept { return object; }
//==============================================================================
/** Removes the current object from this ScopedPointer without deleting it.
This will return the current object, and set the ScopedPointer to a null pointer.
*/
ObjectType* release() noexcept { ObjectType* const o = object; object = nullptr; return o; }
//==============================================================================
/** Swaps this object with that of another ScopedPointer.
The two objects simply exchange their pointers.
*/
void swapWith(ScopedPointer<ObjectType>& other) noexcept
{
// Two ScopedPointers should never be able to refer to the same object - if
// this happens, you must have done something dodgy!
DISTRHO_SAFE_ASSERT_RETURN(object != other.object || this == other.getAddress() || object == nullptr,);
std::swap(object, other.object);
}
private:
//==============================================================================
ObjectType* object;
// (Required as an alternative to the overloaded & operator).
const ScopedPointer* getAddress() const noexcept { return this; }
#ifndef _MSC_VER // (MSVC can't deal with multiple copy constructors)
/* The copy constructors are private to stop people accidentally copying a const ScopedPointer
(the compiler would let you do so by implicitly casting the source to its raw object pointer).
A side effect of this is that in a compiler that doesn't support C++11, you may hit an
error when you write something like this:
ScopedPointer<MyClass> m = new MyClass(); // Compile error: copy constructor is private.
Even though the compiler would normally ignore the assignment here, it can't do so when the
copy constructor is private. It's very easy to fix though - just write it like this:
ScopedPointer<MyClass> m (new MyClass()); // Compiles OK
It's probably best to use the latter form when writing your object declarations anyway, as
this is a better representation of the code that you actually want the compiler to produce.
*/
# ifdef DISTRHO_PROPER_CPP11_SUPPORT
ScopedPointer(const ScopedPointer&) = delete;
ScopedPointer& operator=(const ScopedPointer&) = delete;
# else
ScopedPointer(const ScopedPointer&);
ScopedPointer& operator=(const ScopedPointer&);
# endif
#endif
};
//==============================================================================
/** Compares a ScopedPointer with another pointer.
This can be handy for checking whether this is a null pointer.
*/
template<class ObjectType>
bool operator==(const ScopedPointer<ObjectType>& pointer1, ObjectType* const pointer2) noexcept
{
return static_cast<ObjectType*>(pointer1) == pointer2;
}
/** Compares a ScopedPointer with another pointer.
This can be handy for checking whether this is a null pointer.
*/
template<class ObjectType>
bool operator!=(const ScopedPointer<ObjectType>& pointer1, ObjectType* const pointer2) noexcept
{
return static_cast<ObjectType*>(pointer1) != pointer2;
}
// -----------------------------------------------------------------------
END_NAMESPACE_DISTRHO
#endif // DISTRHO_SCOPED_POINTER_HPP_INCLUDED
|
DiegoVallely/tsuru | provision/juju/writer_test.go | // Copyright 2013 tsuru authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package juju
import (
"launchpad.net/gocheck"
"net/http/httptest"
)
func (s *S) TestfilterOutputWithPythonWarnings(c *gocheck.C) {
output := []byte(`2012-11-28 16:00:35,615 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated
2012-11-28 16:00:35,616 INFO Connecting to environment...
/usr/local/lib/python2.7/dist-packages/txAWS-0.2.3-py2.7.egg/txaws/client/base.py:208: UserWarning: The client attribute on BaseQuery is deprecated and will go away in future release.
warnings.warn('The client attribute on BaseQuery is deprecated and'
2012-11-28 16:00:36,787 INFO Connected to environment.
2012-11-28 16:00:37,110 INFO Connecting to machine 23 at 10.19.2.195
pre-restart:
- python manage.py dbmigrate
- python manage.py collectstatic --noinput)
`)
expected := []byte(`pre-restart:
- python manage.py dbmigrate
- python manage.py collectstatic --noinput)
`)
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestfilterOutputWithJujuLog(c *gocheck.C) {
output := []byte(`/usr/lib/python2.6/site-packages/juju/providers/ec2/files.py:8: DeprecationWarning: the sha module is deprecated; use the hashlib module instead
import sha
2012-06-05 17:26:15,881 WARNING ssl-hostname-verification is disabled for this environment
2012-06-05 17:26:15,881 WARNING EC2 API calls not using secure transport
2012-06-05 17:26:15,881 WARNING S3 API calls not using secure transport
2012-06-05 17:26:15,881 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated
2012-06-05 17:26:15,891 INFO Connecting to environment...
2012-06-05 17:26:16,657 INFO Connected to environment.
2012-06-05 17:26:16,860 INFO Connecting to machine 0 at 10.170.0.191
; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
expected := []byte(`; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestfilterOutputWithoutJujuLog(c *gocheck.C) {
output := []byte(`/usr/lib/python2.6/site-packages/juju/providers/ec2/files.py:8: DeprecationWarning: the sha module is deprecated; use the hashlib module instead
import sha
; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
expected := []byte(`; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestFiterOutputWithSshWarning(c *gocheck.C) {
output := []byte(`2012-06-20 16:54:09,922 WARNING ssl-hostname-verification is disabled for this environment
2012-06-20 16:54:09,922 WARNING EC2 API calls not using secure transport
2012-06-20 16:54:09,922 WARNING S3 API calls not using secure transport
2012-06-20 16:54:09,922 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated
2012-06-20 16:54:09,924 INFO Connecting to environment...
2012-06-20 16:54:10,549 INFO Connected to environment.
2012-06-20 16:54:10,664 INFO Connecting to machine 3 at 10.170.0.166
Warning: Permanently added '10.170.0.121' (ECDSA) to the list of known hosts.
total 0`)
expected := []byte("total 0")
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestfilterOutputWithoutJujuLogAndWarnings(c *gocheck.C) {
output := []byte(`; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
expected := []byte(`; generated by /sbin/dhclient-script
search novalocal
nameserver 192.168.1.1`)
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestfilterOutputRSA(c *gocheck.C) {
output := []byte(`/usr/lib/python2.6/site-packages/juju/providers/ec2/files.py:8: DeprecationWarning: the sha module is deprecated; use the hashlib module instead
import sha
2012-08-22 14:39:18,211 WARNING ssl-hostname-verification is disabled for this environment
2012-08-22 14:39:18,211 WARNING EC2 API calls not using secure transport
2012-08-22 14:39:18,212 WARNING S3 API calls not using secure transport
2012-08-22 14:39:18,212 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated
2012-08-22 14:39:18,222 INFO Connecting to environment...
2012-08-22 14:39:18,854 INFO Connected to environment.
2012-08-22 14:39:18,989 INFO Connecting to machine 4 at 10.170.1.193
Warning: Permanently added '10.170.1.193' (RSA) to the list of known hosts.
Last login: Wed Aug 15 16:08:40 2012 from 10.170.1.239`)
expected := []byte("Last login: Wed Aug 15 16:08:40 2012 from 10.170.1.239")
got := filterOutput(output)
c.Assert(string(got), gocheck.Equals, string(expected))
}
func (s *S) TestWriter(c *gocheck.C) {
recorder := httptest.NewRecorder()
writer := Writer{recorder}
data := []byte("ble")
_, err := writer.Write(data)
c.Assert(err, gocheck.IsNil)
c.Assert(recorder.Body.Bytes(), gocheck.DeepEquals, data)
}
func (s *S) TestWriterShouldReturnTheDataSize(c *gocheck.C) {
recorder := httptest.NewRecorder()
writer := Writer{recorder}
data := []byte("ble")
n, err := writer.Write(data)
c.Assert(err, gocheck.IsNil)
c.Assert(n, gocheck.Equals, len(data))
}
func (s *S) TestWriterShouldNotFilterWhenTheContentTypeIsntText(c *gocheck.C) {
recorder := httptest.NewRecorder()
recorder.Header().Set("Content-Type", "application/xml")
writer := Writer{recorder}
data := []byte("2012-11-28 16:00:35,615 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated")
_, err := writer.Write(data)
c.Assert(err, gocheck.IsNil)
c.Assert(recorder.Body.Bytes(), gocheck.DeepEquals, data)
}
func (s *S) TestWriterShouldFilterWhenTheContentTypeIsText(c *gocheck.C) {
recorder := httptest.NewRecorder()
recorder.Header().Set("Content-Type", "text")
writer := Writer{recorder}
data := []byte("2012-11-28 16:00:35,615 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated")
_, err := writer.Write(data)
c.Assert(err, gocheck.IsNil)
c.Assert(len(recorder.Body.Bytes()), gocheck.Equals, 0)
}
func (s *S) TestWriterShouldReturnTheOriginalLength(c *gocheck.C) {
recorder := httptest.NewRecorder()
recorder.Header().Set("Content-Type", "text")
writer := Writer{recorder}
data := []byte("2012-11-28 16:00:35,615 WARNING Ubuntu Cloud Image lookups encrypted but not authenticated")
expected := len(data)
n, err := writer.Write(data)
c.Assert(err, gocheck.IsNil)
c.Assert(len(recorder.Body.Bytes()), gocheck.Equals, 0)
c.Assert(n, gocheck.Equals, expected)
}
|
madang01/gitcodda | codda2/project/sample_base/server_build/src/main/java/kr/pe/codda/impl/message/BoardListRes/BoardListResDecoder.java | <reponame>madang01/gitcodda
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kr.pe.codda.impl.message.BoardListRes;
import kr.pe.codda.common.exception.BodyFormatException;
import kr.pe.codda.common.message.AbstractMessage;
import kr.pe.codda.common.message.codec.AbstractMessageDecoder;
import kr.pe.codda.common.protocol.SingleItemDecoderIF;
/**
* BoardListRes message decoder
* @author <NAME>
*
*/
public final class BoardListResDecoder extends AbstractMessageDecoder {
@Override
protected AbstractMessage decodeBody(SingleItemDecoderIF singleItemDecoder, Object receivedMiddleObject) throws BodyFormatException {
BoardListRes boardListRes = new BoardListRes();
java.util.LinkedList<String> pathStack = new java.util.LinkedList<String>();
pathStack.push("BoardListRes");
boardListRes.setBoardID((Short)
singleItemDecoder.getValue(pathStack.peek()
, "boardID" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_BYTE // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setBoardName((String)
singleItemDecoder.getValue(pathStack.peek()
, "boardName" // itemName
, kr.pe.codda.common.type.SingleItemType.UB_PASCAL_STRING // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setBoardListType((Byte)
singleItemDecoder.getValue(pathStack.peek()
, "boardListType" // itemName
, kr.pe.codda.common.type.SingleItemType.BYTE // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setBoardWritePermissionType((Byte)
singleItemDecoder.getValue(pathStack.peek()
, "boardWritePermissionType" // itemName
, kr.pe.codda.common.type.SingleItemType.BYTE // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setPageNo((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "pageNo" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_SHORT // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setPageSize((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "pageSize" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_SHORT // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setTotal((Long)
singleItemDecoder.getValue(pathStack.peek()
, "total" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
boardListRes.setCnt((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "cnt" // itemName
, kr.pe.codda.common.type.SingleItemType.INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, receivedMiddleObject));
int board$2ListSize = boardListRes.getCnt();
if (board$2ListSize < 0) {
String errorMessage = new StringBuilder("the var board$2ListSize is less than zero").toString();
throw new kr.pe.codda.common.exception.BodyFormatException(errorMessage);
}
Object board$2ArrayMiddleObject = singleItemDecoder.getArrayMiddleObject(pathStack.peek(), "board", board$2ListSize, receivedMiddleObject);
java.util.List<BoardListRes.Board> board$2List = new java.util.ArrayList<BoardListRes.Board>();
for (int i2=0; i2 < board$2ListSize; i2++) {
pathStack.push(new StringBuilder(pathStack.peek()).append(".").append("Board").append("[").append(i2).append("]").toString());
Object board$2MiddleWritableObject= singleItemDecoder.getMiddleObjectFromArrayMiddleObject(pathStack.peek(), board$2ArrayMiddleObject, i2);
BoardListRes.Board board$2 = new BoardListRes.Board();
board$2List.add(board$2);
board$2.setBoardNo((Long)
singleItemDecoder.getValue(pathStack.peek()
, "boardNo" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setGroupNo((Long)
singleItemDecoder.getValue(pathStack.peek()
, "groupNo" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setGroupSeq((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "groupSeq" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_SHORT // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setParentNo((Long)
singleItemDecoder.getValue(pathStack.peek()
, "parentNo" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setDepth((Short)
singleItemDecoder.getValue(pathStack.peek()
, "depth" // itemName
, kr.pe.codda.common.type.SingleItemType.UNSIGNED_BYTE // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setWriterID((String)
singleItemDecoder.getValue(pathStack.peek()
, "writerID" // itemName
, kr.pe.codda.common.type.SingleItemType.UB_PASCAL_STRING // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setWriterNickname((String)
singleItemDecoder.getValue(pathStack.peek()
, "writerNickname" // itemName
, kr.pe.codda.common.type.SingleItemType.UB_PASCAL_STRING // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setViewCount((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "viewCount" // itemName
, kr.pe.codda.common.type.SingleItemType.INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setBoardSate((Byte)
singleItemDecoder.getValue(pathStack.peek()
, "boardSate" // itemName
, kr.pe.codda.common.type.SingleItemType.BYTE // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setRegisteredDate((java.sql.Timestamp)
singleItemDecoder.getValue(pathStack.peek()
, "registeredDate" // itemName
, kr.pe.codda.common.type.SingleItemType.JAVA_SQL_TIMESTAMP // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setVotes((Integer)
singleItemDecoder.getValue(pathStack.peek()
, "votes" // itemName
, kr.pe.codda.common.type.SingleItemType.INTEGER // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setSubject((String)
singleItemDecoder.getValue(pathStack.peek()
, "subject" // itemName
, kr.pe.codda.common.type.SingleItemType.UB_PASCAL_STRING // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
board$2.setLastModifiedDate((java.sql.Timestamp)
singleItemDecoder.getValue(pathStack.peek()
, "lastModifiedDate" // itemName
, kr.pe.codda.common.type.SingleItemType.JAVA_SQL_TIMESTAMP // itemType
, -1 // itemSize
, null // nativeItemCharset
, board$2MiddleWritableObject));
pathStack.pop();
}
boardListRes.setBoardList(board$2List);
pathStack.pop();
return boardListRes;
}
} |
FilipRy/versu-android | app/src/main/java/com/filip/versu/view/fragment/CommentFragment.java | package com.filip.versu.view.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import com.filip.versu.R;
import com.filip.versu.model.dto.CommentDTO;
import com.filip.versu.model.dto.PostDTO;
import com.filip.versu.model.dto.UserDTO;
import com.filip.versu.model.view.CommentsFeedViewModel;
import com.filip.versu.view.adapter.AbsBaseEntityRecyclerViewAdapter;
import com.filip.versu.view.adapter.CommentRecyclerViewAdapter;
import com.filip.versu.view.viewmodel.CommentViewModel;
import com.filip.versu.view.viewmodel.callback.ICommentViewModel.ICommentViewCallback;
import java.util.ArrayList;
public class CommentFragment extends AbsRefreshablePageableFragment<ICommentViewCallback, CommentsFeedViewModel, CommentViewModel> implements ICommentViewCallback {
public static final String TAG = "CommentFragment";
public static final String POST_KEY = "POST_KEY";
private AbsPostsFeedFragment.INotifyFeedbackActionChange notifyFeedbackActionChange;
private Button loadMoreComments;
private boolean hideLoadMoreBtn;
public static CommentFragment newInstance(PostDTO postDTO) {
CommentFragment commentFragment = new CommentFragment();
Bundle bundle = new Bundle();
bundle.putSerializable(POST_KEY, postDTO);
commentFragment.setArguments(bundle);
return commentFragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_comment, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ImageView commentView = (ImageView) view.findViewById(R.id.imageViewLayout);
final EditText commentText = (EditText) view.findViewById(R.id.editTextLayout);
commentView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String content = commentText.getText().toString();
getViewModel().createCommentTask(content);
commentText.setText("");
}
});
PostDTO postDTO = (PostDTO) getArguments().getSerializable(POST_KEY);
getViewModel().setDependencies(postDTO);
getViewModel().setRecyclerViewAdapterCallback(notifyFeedbackActionChange);
getViewModel().requestItemsFromInternalStorage(getActivity().getApplicationContext());
}
@Override
public void initializeEndlessScrolling(View parentView) {
loadMoreComments = (Button) parentView.findViewById(R.id.load_more_btn);
loadMoreComments.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
getViewModel().requestNextPageFromBackend();
}
});
}
@Override
public void hideLoadMoreBtn() {
hideLoadMoreBtn = true;//when showProgressBarAtBtn(false) is called afterwards, bnt wont be shown anymore
loadMoreComments.setVisibility(View.GONE);
}
@Override
public void showProgressBarAtBottom(boolean show) {
super.showProgressBarAtBottom(show);
if(show) {
loadMoreComments.setVisibility(View.GONE);
} else {
if (!hideLoadMoreBtn) {
loadMoreComments.setVisibility(View.VISIBLE);
}
}
}
@Override
public AbsBaseEntityRecyclerViewAdapter<CommentDTO> createRecyclerViewAdapter() {
CommentRecyclerViewAdapter commentRecyclerViewAdapter = new CommentRecyclerViewAdapter(new ArrayList<CommentDTO>(), getActivity().getApplicationContext(), getViewModel());
return commentRecyclerViewAdapter;
}
@Override
public void setModelView() {
setModelView(this);
}
@Override
public void setRecyclerViewAdapterCallback(AbsPostsFeedFragment.INotifyFeedbackActionChange notifyFeedbackActionChangeCallback) {
this.notifyFeedbackActionChange = notifyFeedbackActionChangeCallback;
}
@Override
public void scrollToLastItem() {
int size = super.recyclerViewAdapter.getItemCount();
super.recyclerView.scrollToPosition(size - 1);
//TODO find better sol, this is a hack
hideLoadMoreBtn = false;
}
@Nullable
@Override
public Class<CommentViewModel> getViewModelClass() {
return CommentViewModel.class;
}
@Override
public void displayProfileOfUserFragment(UserDTO userDTO) {
profileDisplayer.displayUserProfile(userDTO, getActivity());
}
@Override
public void addItemToViewAdapter(CommentDTO item) {
recyclerViewAdapter.addItem(item);
}
@Override
public void addItemToViewAdapter(CommentDTO item, int position) {
recyclerViewAdapter.addItemToPosition(item, position);
}
@Override
public void removeItemFromViewAdapter(CommentDTO item) {
recyclerViewAdapter.removeItem(item);
}
}
|
JianpingZeng/xcc | xcc/java/backend/target/TargetSubtarget.java | package backend.target;
/*
* Extremely Compiler Collection
* Copyright (c) 2015-2020, <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import backend.codegen.dagisel.SDep;
import backend.codegen.dagisel.SUnit;
import backend.mc.MCSubtargetInfo;
/**
* @author <NAME>
* @version 0.4
*/
public abstract class TargetSubtarget extends MCSubtargetInfo {
protected TargetInstrInfo instrInfo;
protected TargetRegisterInfo regInfo;
public int getSpecialAddressLatency() {
return 0;
}
public boolean isTargetDarwin() {
return false;
}
public boolean isTargetELF() {
return false;
}
public boolean isTargetCygMing() {
return false;
}
public boolean isTargetWindows() {
return false;
}
public boolean isTargetCOFF() {
return isTargetCygMing() || isTargetWindows();
}
public void adjustSchedDependency(SUnit opSU, SUnit su, SDep dep) {
}
public int getHwMode() {
return 0;
}
public TargetRegisterInfo getRegisterInfo() {
return regInfo;
}
public TargetInstrInfo getInstrInfo() {
return instrInfo;
}
public abstract TargetFrameLowering getFrameLowering();
public abstract TargetLowering getTargetLowering();
/**
* Parses the sub-features string specified by subtarget options.
* This function should by overrided by any tablegen generated sub class.
* @param fs
* @param cpu
*/
public abstract void parseSubtargetFeatures(String fs, String cpu);
public boolean is64Bit() {
return false;
}
}
|
powerfuler/spring-boot-student | spring-boot-student-log/src/main/java/com/xiaolyuh/annotation/EnableAspectJLog.java | <filename>spring-boot-student-log/src/main/java/com/xiaolyuh/annotation/EnableAspectJLog.java
package com.xiaolyuh.annotation;
import com.xiaolyuh.core.TrackConfig;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Import;
import java.lang.annotation.*;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@EnableAspectJAutoProxy
@Import({TrackConfig.class})
public @interface EnableAspectJLog {
}
|
jhwoodward/nompl | src/interpreter/parserUtils.js | <gh_stars>1-10
var _ = require('lodash');
var api = {
parseValue: s => {
const value = parseInt(/-?[\d]{1,5}/.exec(s)[0], 10);
const phrase = s.indexOf('==') === -1;
// now using ==V inline
if (phrase) {
return { value, phrase };
} else {
return { value };
}
},
getBracketed: (s, startIndex, open, close) => {
open = open || '(';
close = close || ')';
var nest = 1;//assumption is that we are starting already inside the brackets
var c = startIndex;
while (nest > 0 && c < s.length) {
var char = s.substring(c, c + 1);
if (char === open) {
nest++;
}
if (char === close) {
nest--;
}
c++;
}
if (nest === 0) {
return s.substring(startIndex, c - 1);
} else {
return undefined;
}
},
parsePitch: s => {
var char = /[a-gA-Gx-zX-Z]/.exec(s)[0];
var octJump = s.match(/!/g);
var out = {
char: char,
down: char === char.toLowerCase(),
up: char === char.toUpperCase(),
octJump: octJump ? octJump.length : false
};
return api.strip(out);
},
strip: obj => {
var out = {};
for (var key in obj) {
if (obj[key] !== undefined
&& obj[key] !== false
&& obj[key] !== null
&& !(typeof obj[key] === 'object' && _.isEmpty(obj[key]))
) {
out[key] = obj[key];
}
}
return out;
},
parseOctave: s => {
var result = /\-?[0-4]:/.exec(s);
if (!result) {
return false;
}
return {
octave: parseInt(result[0].replace(':', ''), 10) + 5
};
},
parseNote: s => {
var octJump = s.match(/!/g);
var char = /[A-Ga-g]/.exec(s)[0];
var out = {
flat: s.indexOf('-') > -1,
sharp: s.indexOf('+') > -1,
char: char.toUpperCase(),
accidental: 0,
down: char === char.toLowerCase(),
up: char === char.toUpperCase(),
octJump: octJump ? octJump.length : false
};
out.string = out.char + (out.flat ? 'b' : out.sharp ? '#' : '');
if (out.sharp) {
out.accidental++;
}
if (out.flat) {
out.accidental--;
}
return api.strip(out);
},
parseNotes: s => {
var xArray;
var out = [];
var re = /(?:[+-]?[A-Ga-g])/g;
var note;
while (xArray = re.exec(s)) {
note = xArray[0];
out.push(api.parseNote(note));
}
return out;
}
};
module.exports = api;
|
IShostak/KindaExpressKing | src/main/java/com/softserve/itacademy/kek/configuration/WebMvcConfig.java | package com.softserve.itacademy.kek.configuration;
import java.util.HashMap;
import java.util.Map;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.web.servlet.config.annotation.ContentNegotiationConfigurer;
import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
@EnableWebMvc
@EnableScheduling
@ComponentScan(basePackages = {"com.softserve.itacademy.kek", "com.softserve.itacademy.kek.security", "com.softserve.itacademy.kek.controller"})
public class WebMvcConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
@Override
public void configureDefaultServletHandling(DefaultServletHandlerConfigurer configurer) {
configurer.enable("DefaultController");
}
@Override
public void configureContentNegotiation(ContentNegotiationConfigurer configurer) {
final Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("charset", "UTF-8");
configurer.defaultContentType(new MediaType(MediaType.APPLICATION_JSON, parameterMap));
}
}
|
asiekierka/OpenComputers | src/main/scala/li/cil/oc/util/mods/ComputerCraft16.scala | package li.cil.oc.util.mods
import dan200.computercraft.api.ComputerCraftAPI
import dan200.computercraft.api.filesystem.{IMount, IWritableMount}
import dan200.computercraft.api.lua.ILuaContext
import dan200.computercraft.api.media.IMedia
import dan200.computercraft.api.peripheral.{IComputerAccess, IPeripheral, IPeripheralProvider}
import li.cil.oc
import li.cil.oc.common.tileentity.{ComputerWrapper, Router}
import li.cil.oc.server.fs.{CC16FileSystem, CC16WritableFileSystem}
import net.minecraft.item.ItemStack
import net.minecraft.world.World
import scala.collection.mutable
object ComputerCraft16 {
def init() {
ComputerCraftAPI.registerPeripheralProvider(new IPeripheralProvider {
override def getPeripheral(world: World, x: Int, y: Int, z: Int, side: Int) = world.getBlockTileEntity(x, y, z) match {
case router: Router => new RouterPeripheral(router)
case _ => null
}
})
}
def isDisk(stack: ItemStack) = stack.getItem.isInstanceOf[IMedia]
def createDiskMount(stack: ItemStack, world: World) =
if (isDisk(stack)) oc.api.FileSystem.fromComputerCraft(stack.getItem.asInstanceOf[IMedia].createDataMount(stack, world)) else null
def createFileSystem(mount: AnyRef) = Option(mount) collect {
case rw: IWritableMount => new CC16WritableFileSystem(rw)
case ro: IMount => new CC16FileSystem(ro)
}
class RouterPeripheral(val router: Router) extends IPeripheral {
override def getType = router.getType
override def attach(computer: IComputerAccess) {
router.computers += computer -> new ComputerWrapper {
override def id = computer.getID
override def attachmentName = computer.getAttachmentName
override def queueEvent(name: String, args: Array[AnyRef]) = computer.queueEvent(name, args)
}
router.openPorts += computer -> mutable.Set.empty
}
override def detach(computer: IComputerAccess) {
router.computers -= computer
router.openPorts -= computer
}
override def getMethodNames = router.getMethodNames
override def callMethod(computer: IComputerAccess, context: ILuaContext, method: Int, arguments: Array[AnyRef]) =
router.callMethod(computer, computer.getID, computer.getAttachmentName, method, arguments)
override def equals(other: IPeripheral) = other match {
case rp: RouterPeripheral => rp.router == router
case _ => false
}
}
}
|
rombie/contrail-test | scripts/scale/control-node/bgp_scale.py | from __future__ import print_function
import time
import sys
import os
import re
import signal
from netaddr import *
from datetime import datetime, timedelta
from pytz import timezone
import pytz
import subprocess
import traceback
#
# Contrail libs
#
from commands import Command
from cn_introspect_bgp import ControlNodeInspect
from ssh_interactive_commnds import *
#
# Used to kill any residual bgp_stress_test processes including zombies
#
BGP_STRESS = None
def log_print(line, fd=''):
#
# Get id val from logfilename
#
run_id = 0
if re.search('\d+$', fd.name):
run_id = re.search('\d+$', fd.name).group()
msg = "{0} scale{1} {2}".format(datetime.now(), run_id, line)
print (msg)
print (msg, file=fd)
fd.flush()
# end log_print
def open_logfile(fname):
#
# Create logdir if needed
#
if not os.path.exists('log'):
os.mkdir('log')
#
# Open file
#
try:
fd = open(fname, 'w')
except (SystemExit, KeyboardInterrupt):
raise
except Exception, e:
print (
'ABORT: Failed to open file:%s, since this is a scaling script, check that the number of file descriptors (max files) has not been exceeded:[lsof -n, and ulimit -n]', fname)
sys.exit()
return fd
# end open_logfile
def get_ssh_fd(usr, pw, ip, fd):
#
# Check for 0 IP address, return None if so
#
if ip == 0:
return None
#
# Get ssh fd
#
ssh_fd = remoteCmdExecuter()
ssh_fd.execConnect(ip, usr, pw)
return ssh_fd
# end get_ssh_fd
def get_cn_ispec(ip):
cn_ispec = ControlNodeInspect(ip)
return cn_ispec
# end get_cn_ispec
def check_if_swapping(result, who, fd):
'''Check if system is swapping
'''
return_val = False
try:
result
if not re.search(' 0 kB', result):
log_print(
"WARNING: system swapping:{0} -> reboot device".format(who), fd)
return_val = True
except:
pass
return return_val
# end check_if_swapping
def check_for_crash(result, who, fd):
'''Check if system crashed
'''
return_val = False
if re.search('core', result):
log_print(
"WARNING: crash found on: {0} crash files:\n{1}".format(who, result), fd)
return_val = True
return return_val
# end check_for_crash
def get_instance_name(ninstances, ri_name, index, ri_domain):
# stress code uses "instance1" for one instance
# if ninstances == 1:
# name = "%s%s" % (ri_name, index)
# stress code uses "instance" for iterating, just return full name with index
# else:
# name = ri_name
name = ri_name
full_name = "{0}:{1}{2}:{1}{2}".format(ri_domain, ri_name, index)
return (name, full_name)
# end get_instance_name
def get_total_prefix_expectations(ninstances, import_targets_per_instance, nagents, nroutes, overlapped_prefixes):
#
# Agents/intances may be grouped into vrfs, *unless only on instance)
#
ntargets = 1
if ninstances > 1:
if import_targets_per_instance > 1:
ntargets = int(ninstances / import_targets_per_instance)
#
# Compute num prefixes per instance per agent. Note that if the
# "overlapped_prefixes" flag is set, then all agents are getting
# the same set of prefiexes (xmpp_prefix used in the call).
#
if not overlapped_prefixes:
prefixes_per_instance = nagents * nroutes * ntargets
else:
prefixes_per_instance = nroutes * ntargets
#
# Total vpn prefixes - as seen in route bgp.l3vpn table
#
vpn_prefixes = ninstances * nagents * nroutes
return (prefixes_per_instance, vpn_prefixes)
# end get_total_prefix_expectations
def bgp_scale_mock_agent(cn_usr, cn_pw, rt_usr, rt_pw, cn_ip, rt_ip, rt_ip2, xmpp_src, ri_domain, ri_name, ninstances, import_targets_per_instance, family, nh, test_id, nagents, nroutes, oper, sleep_time, logfile_name_bgp_stress, logfile_name_results, timeout_minutes_poll_prefixes, background, xmpp_prefix, xmpp_prefix_large_option, skip_krt_check, report_stats_during_bgp_scale, report_cpu_only_at_peak_bgp_scale, skip_rtr_check, bgp_env, no_verify_routes, logging):
'''Performs bgp stress test
'''
#
# For exception handler since it runs as a separate process
#
global BGP_STRESS
#
# Open logfile for this function/results
#
fd = open_logfile(logfile_name_results)
#
# Get fd/handles
#
if skip_rtr_check != 0:
rt_self = None
rt_self2 = None
else:
rt_self = get_ssh_fd(rt_usr, rt_pw, rt_ip, fd)
rt_self2 = get_ssh_fd(rt_usr, rt_pw, rt_ip2, fd)
cn_self = get_cn_ispec(cn_ip)
#
# Get control node ssh fd only if not running in the background - bug w ssh blocking on multiprocessing
#
# cnshell_self = 0
# if not background:
cnshell_self = get_ssh_fd(cn_usr, cn_pw, cn_ip, fd)
#
# xmpp_prefix is optional, if it is 0, do not use it
#
overlapped_prefixes = 0
if not xmpp_prefix:
xmpp_start_prefix = ''
omsg = ''
else:
xmpp_start_prefix = "--xmpp-prefix=%s" % (xmpp_prefix)
#
# If there is an xmpp_start_prefix, then all agents will get the same set of
# prefixes. And in this case, the number of expected prefixes changes..
#
overlapped_prefixes = 1
omsg = "Overlapping prefixes/agent (xmpp_prefix provided)"
#
#
# xmpp-prefix-large option is an optinal paramter
#
if xmpp_prefix_large_option == 0:
xmpp_prefix_large = ''
else:
xmpp_prefix_large = "--xmpp-prefix-format-large"
#
# Derive total prefixes expected for each agent vs total
#
prefixes_per_instance, vpn_prefixes = get_total_prefix_expectations(
ninstances, import_targets_per_instance, nagents, nroutes, overlapped_prefixes)
#
# Normalize the name
#
op = (oper.rsplit()[0][:3]).lower()
#
# Get localhost IP
#
localhost_ip = get_localhost_ip()
#
# process ID
#
pid = os.getpid()
#
# Record test title
#
msg = get_msg_ninst_x_agents_x_nroutes(ninstances, nagents, nroutes)
log_print("INFO: BGP Stress Test PID:%s" % pid, fd)
log_print(
"INFO: BGP Stress Test - CN:{0} family:{1} Operation:{2} ninst X nagent X nroutes = {5}x{3}x{4} NumImportTargetsPerRinstance:{6}".format(cn_ip,
family, oper, nagents, nroutes, ninstances, import_targets_per_instance), fd)
#
# Logfile name is passed to the bgp_stress call
#
logfile_name = "--log-file=%s" % (logfile_name_bgp_stress)
#
# Derive instance name if only one instance, otherwise use base name (polling and bgp_stress iterates over the names)
#
instance_name, full_instance_name = get_instance_name(
ninstances, ri_name, 1, ri_domain)
#
# Check if "--routes-send-trigger" paramter is set. Retrieve the associated
# file name if so.
#
#import pdb; pdb.set_trace ()
try:
trigger_file = re.search(
'send-trigger(\s+|=)(.*)$', logging, re.IGNORECASE)
if trigger_file != None:
trigger_file = trigger_file.group(2)
new_trigger_file = trigger_file + str(pid)
logging = re.sub(trigger_file, new_trigger_file, logging)
log_print("DEBUG: found trigger file: %s new_trigger_file:%s" %
(trigger_file, new_trigger_file), fd)
except:
trigger_file = 0
#
# Command to instantiate bgp_stress_test
#
bgp_stress_test_command = '%s ./bgp_stress_test --no-multicast --xmpp-port=5269 --xmpp-server=%s --xmpp-source=%s --ninstances=%s --instance-name=%s --test-id=%s --nagents=%s --nroutes=%s --xmpp-nexthop=%s %s %s %s %s' % (
bgp_env, cn_ip, xmpp_src, ninstances, instance_name, test_id, nagents, nroutes, nh, xmpp_start_prefix, xmpp_prefix_large, logging, logfile_name)
#
# Get stats before test run
#
if report_stats_during_bgp_scale:
report_stats(cn_self, rt_self, cnshell_self, cn_ip, rt_ip,
"Stats Before Test Run {0}".format(oper), report_cpu_only_at_peak_bgp_scale, fd)
#
# Log expected values and bgp_stress_test command
#
msg = "ninst X nagent X nroutes = {0}x{1}x{2}".format(
ninstances, nagents, nroutes)
log_print(
"INFO: BGP Stress Test - {0} Total Prefixes Expected/all-instances-cn:{1} Expected/instance-cn:{2} VPN Prefixes-rtr:{3}".format(omsg,
prefixes_per_instance * ninstances, prefixes_per_instance, vpn_prefixes), fd)
log_print("INFO: %s" % bgp_stress_test_command, fd)
#
# Delete prefixes if operation requested is a delete
#
(rc, out, err) = (0, 0, 0)
if re.search('del', oper, re.IGNORECASE):
#
# Poll prefix delete time
#
del_start_time = datetime.now()
get_prefix_install_or_delete_time(
cn_self, rt_self, cn_ip, rt_ip, ri_domain, instance_name, ninstances, prefixes_per_instance, vpn_prefixes,
op, family, nagents, nroutes, timeout_minutes_poll_prefixes, skip_krt_check, skip_rtr_check, no_verify_routes, xmpp_src, del_start_time, fd)
#
# Install prefixes
#
elif re.search('add', oper, re.IGNORECASE):
#
# Start bgp_stress_test in the background
#
log_print("INFO: Starting bgp_stress on localhost %s" %
(localhost_ip), fd)
BGP_STRESS = Command(bgp_stress_test_command)
BGP_STRESS.start()
bgp_start_time = datetime.now()
log_print("INFO: notable_event started bgp_stress at timestamp: %s" %
str(bgp_start_time), fd)
#
# Time how long it takes for peers to come up - abort gracefully if timed out
#
tdelta, timestamp_all_peers_up, timestamp_at_least_one_peer_up = get_agent_bringup_time(
cn_self, xmpp_src, full_instance_name, nagents, "xmpp", op, False, bgp_start_time, fd)
#
# If a trigger file is indicated, routes will not start until the file
# is touched. Start "route_add" timer if so.
#
if trigger_file and len(trigger_file):
log_print(
"INFO: trigger file found: %s sleeping for %s seconds before starting prefix announcements" %
(new_trigger_file, sleep_time), fd)
time.sleep(sleep_time)
cmd = 'touch %s' % new_trigger_file
try:
return_val = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, shell=True)
log_print("DEBUG: executed cmd:%s" % cmd, fd)
except:
log_print("ERROR: problem executing cmd:%s" % cmd, fd)
timestamp_trigger_prefix_announcements = datetime.now()
log_print(
"INFO: notable_event prefix adds triggered at timestamp: %s" %
str(timestamp_trigger_prefix_announcements), fd)
timestamp_start_prefix_announcement = timestamp_trigger_prefix_announcements
else:
timestamp_start_prefix_announcement = timestamp_at_least_one_peer_up
#
# Hold if this is an agent only test with no prefixes
#
if re.search('agents_only', oper, re.IGNORECASE):
#
# Get stats after test run
#
if not background:
report_stats(cn_self, rt_self, cnshell_self, cn_ip, rt_ip,
"Stats After Install - Peak period {0}".format(oper), report_cpu_only_at_peak_bgp_scale, fd)
log_print(
"INFO: sleeping a long time %s seconds or until ctrl-c..." %
sleep_time, fd)
time.sleep(sleep_time)
#
# Gracefully terminate BGP session with control node,
# then kill bgp_tress_test
#
kill_bgp_stress_python_call('bgp_stress_test', 'python', fd)
rc, out, err = BGP_STRESS.stop()
BGP_STRESS = False
return 0
#
# Get prefix install time (polls introspect)
#
get_prefix_install_or_delete_time(
cn_self, rt_self, cn_ip, rt_ip, ri_domain, instance_name, ninstances, prefixes_per_instance, vpn_prefixes, op, family,
nagents, nroutes, timeout_minutes_poll_prefixes, skip_krt_check, skip_rtr_check, no_verify_routes, xmpp_src, timestamp_start_prefix_announcement, fd)
#
# Perform post-install tasks such as stats reporting and sleeping
#
post_install_tasks(
cnshell_self, cn_self, rt_self, cn_ip, rt_ip, oper, sleep_time,
msg, background, report_stats_during_bgp_scale, report_cpu_only_at_peak_bgp_scale, fd)
#
# Terminate BGP session with control node by t stopping bgp_stress python
# child first, grab timestamp (routes stop at that point), then stop bgp_stress
#
log_print(
"DEBUG: stopping route announcements/agents.. stopping python first", fd)
kill_bgp_stress_python_call('bgp_stress_test', 'python', fd)
del_start_time = datetime.now()
log_print(
"INFO: notable_event stopping prefix announcements at timestamp: %s" %
(str(del_start_time)), fd)
#
# Stop bgp_stress
#
rc, out, err = BGP_STRESS.stop()
log_print("DEBUG: after stop attempt for bgp_stress..", fd)
BGP_STRESS = False
#
# Get prefix delete time (polls introspect)
#
get_prefix_install_or_delete_time(
cn_self, rt_self, cn_ip, rt_ip, ri_domain, instance_name, ninstances, prefixes_per_instance, vpn_prefixes,
"del", family, nagents, nroutes, timeout_minutes_poll_prefixes, skip_krt_check, skip_rtr_check, no_verify_routes, xmpp_src, del_start_time, fd)
#
# Get stats after test run
#
if report_stats_during_bgp_scale:
report_stats(cn_self, rt_self, cnshell_self, cn_ip, rt_ip,
"Stats After Test Run {0}".format(oper), report_cpu_only_at_peak_bgp_scale, fd)
#
# Print logfile info
#
log_print("INFO: Log file: %s" % fd.name, fd)
return
# end bgp_scale_mock_agent
def kill_bgp_stress_python_call(child_name, child_of_child_name, fd, retry=2, delay=5):
#
# Parent is this process
#
parent_pid = os.getpid()
#
# Get correspnding child process (bgp_stress_test) pid (noting that many may be running from other parents)
#
child_ps_line = subprocess.check_output(
'ps -efww | \grep %s | grep " %s " | grep -v grep' %
(child_name, parent_pid), stderr=subprocess.STDOUT, shell=True)
log_print(
"DEBUG: bgp_stress ps line:%s that matched this child:%s and parent pid:%s" %
(child_ps_line, child_name, parent_pid), fd)
bgp_stress_test_pid = 0
try:
re.search('\d+', child_ps_line)
bgp_stress_test_pid = int(re.search('\d+', child_ps_line).group())
log_print("DEBUG: bgp_stress_test pid found:%s" %
bgp_stress_test_pid, fd)
except:
pass
while True and bgp_stress_test_pid:
cmd = 'ps -efww | grep %s | grep " %s " | grep -v %s | grep -v grep' % (
child_of_child_name, bgp_stress_test_pid, child_name)
#log_print ("DEBUG: cmd: %s" % cmd, fd)
#
# Search for the child_of_the_child (python), where the pid matches it's parent,
# bgp_stress_test pid, but precludes the child itself (bgp_stress_test name)
#
try:
child_of_child_ps_line = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, shell=True)
python_pid = int(re.search('\d+', child_of_child_ps_line).group())
log_print(
"DEBUG: found python pid:%s ps of bgp_stress_test child python:%s" %
(python_pid, child_of_child_ps_line), fd)
#
# Send SIGKILL to python shild of bgp_stress_test
#
try:
os.kill(python_pid, signal.SIGKILL)
log_print("DEBUG: SIGKILL %s pid:%s" %
(child_of_child_name, python_pid), fd)
break
except OSError:
pass
except subprocess.CalledProcessError, error:
retry -= 1
log_print("WARNING: command %s failed: %s" %
(cmd, error.output), fd)
#
# Retry
#
if retry > 0:
log_print("WARNING: command %s failed: %s" %
(cmd, error.output), fd)
#
# Check if bgp_stress is defunct (possibly due to assert on hold-time expire), if so send SIGKILL
#
if re.search('defunct', child_ps_line, re.IGNORECASE):
log_print(
"ERROR: bgp_stress_test pid:%s state is defunct (check if it crashed), no child python process, sending it SIGKILL line:%s" %
(bgp_stress_test_pid, child_ps_line), fd)
if type(bgp_stress_test_pid) == int:
try:
os.kill(bgp_stress_test_pid, signal.SIGKILL)
log_print("INFO: sending SIGKILL %s pid:%s" %
(child_name, bgp_stress_test_pid), fd)
except OSError:
pass
break
log_print(
"WARNING: retry:%d Command %s after sleeping %d seconds" %
(retry, cmd, delay), fd)
time.sleep(delay)
continue
else:
log_print(
"ERROR: problem bringing down python (called by bgp_stress_test pid:%s) move'n on.." %
bgp_stress_test_pid, fd)
break
# end kill_bgp_stress_python_call
def _cleanup_fds(rt_self, cn_self, fd):
if rt_self.close:
rt_self.close()
if cn_self.close:
cn_self.close()
if fd:
fd.close()
# end _cleanup_fds
def post_install_tasks(cnshell_self, cn_self, rt_self, cn_ip, rt_ip, oper, sleep_time, test_info, background, report_stats_during_bgp_scale, report_cpu_only_at_peak_bgp_scale, fd):
#
# Get stats after test run
#
# if not background and report_stats_during_bgp_scale != 0:
if report_stats_during_bgp_scale:
report_stats(cn_self, rt_self, cnshell_self, cn_ip, rt_ip,
"Stats After Install - Peak period {0}".format(oper), report_cpu_only_at_peak_bgp_scale, fd)
#
# Get sleep messages (sleep_time determined by calling script)
#
if re.search('hold', oper, re.IGNORECASE):
msg = "CTL-C to get out, or wait a really long time..."
else:
msg = "%s %s test" % (test_info, oper)
log_print(
"INFO: sleeping for %s seconds after prefix installation of: %s" %
(sleep_time, msg), fd)
time.sleep(sleep_time)
return
# end post_install_tasks
def get_agent_bringup_time(self, ip, full_instance_name, num_peers, encoding, oper, log_details, start_time, fd):
log_print(
"INFO: polling for %s agent peers to come up instance:%s oper:%s at:%s" %
(num_peers, full_instance_name, oper, str(start_time)), fd)
#
# Note that only one instance name is used
#
tdelta, timestamp_done, timestamp_at_least_one_peer_up = get_time_bringup_or_teardown_peers(
self, ip, full_instance_name, num_peers, encoding, oper, log_details, start_time, fd)
#
# Abort this instance of bgp_stress if agent peers bring-up times out
#
if tdelta == 'TimeoutWaitingPeersToComeUp':
log_print(
"ERROR: agent peers not coming up, aborting this bgp_stress_test result:%s " %
tdelta, fd)
kill_bgp_stress_python_call('bgp_stress_test', 'python', fd)
rc, out, err = BGP_STRESS.stop()
fd.close()
sys.exit()
return (tdelta, timestamp_done, timestamp_at_least_one_peer_up)
# end get_agent_bringup_time
def get_agent_teardown_time(self, ip, instance_full_name, num_peers, encoding, oper, log_details, start_time, fd):
log_print("INFO: Waiting for {0} agent peers to go dn.. instance:{1}".format(
num_peers, full_instance_name), fd)
tdelta, timestamp_done = get_time_bringup_or_teardown_peers(
self, ip, instance_full_name, num_peers, encoding, oper, log_details, start_time, fd)
return (tdelta, timestamp_done)
# end get_agent_teardown_time
def get_time_bringup_or_teardown_peers(self, ip, instance_full_name, num_peers, encoding, oper, log_details, start_time, fd):
sleeptime_between_introspect_polls = 1
at_least_one_peer_up_noted = 0
time_chk = 0
if re.search('del', oper, re.IGNORECASE):
peers_up = num_peers
max_time = 15 # min
while peers_up > 0:
peers_up = self.get_cn_bgp_neighbor_stats_element(
'count', 'xmpp', 'up', instance_full_name)
if (peers_up == 0):
delta_time, timestamp_done = get_delta_time(start_time)
log_print("INFO: notable_event peers down at timestamp: %s",
str(timestamp_done), fd)
continue
#log_print ("DEBUG: in del oper: %s peers_up:%s (out of: %s)" % (oper, peers_up, num_peers), fd)
#
# Check timeout val
#
time_chk, time_now = get_delta_time(start_time, 'minutes')
if time_chk >= max_time:
log_print(
"ERROR: timeout waiting for peers to go down... waited:%s minutes" %
time_chk, fd)
return ('TimeoutWaitingPeersToComeUp', time_chk)
#log_print ("DEBUG: sleeping for %s seconds inbetween introspect polling for peers oper:%s" % (sleep_time, oper), fd)
time.sleep(sleeptime_between_introspect_polls)
elif re.search('add', oper, re.IGNORECASE):
peers_up = 0
max_time = 15 # min
waited = 1
while peers_up < num_peers:
peers_up = int(self.get_cn_bgp_neighbor_stats_element(
'count', 'xmpp', 'up', instance_full_name))
#
# Record at least one or more peers up - prefix adds have started
#
if peers_up > 0 and at_least_one_peer_up_noted == 0:
at_least_one_peer_up_noted = 1
t1, timestamp_at_least_one_peer_up = get_delta_time(
start_time)
log_print(
"INFO: notable_event at least one peer up at timestamp: %s" %
str(timestamp_at_least_one_peer_up), fd)
#
# Grab the add timestamp as soon as peers are up
#
if (peers_up == num_peers):
delta_time, timestamp_done = get_delta_time(start_time)
log_print("INFO: notable_event peers up at timestamp: %s" %
str(timestamp_done), fd)
continue
#
# Sleep longer if already waited 2 min.. Just to avoid hammering the DUT
#
if waited > 120:
log_print(
"DEBUG: sleeping %s sec in get_time_bringup_or_teardown_peers oper: %s peers_up:%s (out of: %s) timeout_in:%sm" %
(sleeptime_between_introspect_polls * 4, oper, peers_up, num_peers, max_time - time_chk), fd)
time.sleep(sleeptime_between_introspect_polls * 4)
#
# Check timeout val
#
time_chk, time_now = get_delta_time(start_time, "minutes")
if time_chk >= max_time:
log_print(
"ERROR: timeout waiting for peers to come up... inst:%s waited:%s minutes tnow:%s" %
(instance_full_name, str(time_chk), str(time_now)), fd)
return ('TimeoutWaitingPeersToComeUp', 0, 0)
#log_print ("INFO: sleeping for %s seconds inbetween introspect polling for peers oper:%s" % (sleeptime_between_introspect_polls, oper), fd)
time.sleep(sleeptime_between_introspect_polls)
waited += 1
else:
log_print(
"ERROR: invalid operation in timing peer bringup/teardown:%s" %
oper, fd)
return None
#
# Log the time it took for peers to come up (or go down)
#
delta_time_str = timedelta_to_string(delta_time)
log_print(
"INFO: Elapsed time to %s %s peers:%ss (total peers found:%s)" %
(oper, num_peers, delta_time_str, peers_up), fd)
return (delta_time, timestamp_done, timestamp_at_least_one_peer_up)
# end get_time_bringup_or_teardown_peers
def get_time_diffs_seconds(t1, t2, decimal_places):
return_val = 0
if type(t1) == datetime and type(t2) == datetime:
#
# Check date is not in the past
#
delta_time = (t2 - t1)
if delta_time.days < 0:
log_print("ERROR: time diff results in a past date t1:%s t2:%s" %
(t1, t2))
return 0
return_val = float("%s.%s" % (str(abs(delta_time).seconds),
str(abs((delta_time)).microseconds)[:decimal_places]))
else:
log_print(
"ERROR: time1 or time2 not type datatime: t1 type:%s t2 type:%s" %
(type(t1), type(t2)))
return return_val
# end get_time_diffs_seconds
def timedelta_to_string(delta_time):
return_val = 0
if type(delta_time) == timedelta:
return_val = float("%s.%s" %
(str(abs(delta_time).seconds), str(abs((delta_time)).microseconds)[:3]))
#seconds = (delta_time).seconds
#microseconds = (delta_time).microseconds
#return_val = float("%s.%s" % (seconds, str(microseconds)[:3]))
else:
log_print(
"ERROR: delta_time wrong type, expecting timedelta type(%s) is:%s" %
(delta_time, type(delta_time)))
return return_val
# end timedelta_to_string
def get_time_units(num_bgp_peers, oper):
if re.search('del', oper, re.IGNORECASE):
return_val = 'seconds'
elif num_bgp_peers < 2500:
return_val = 'microseconds'
else:
#
# default to seconds
#
return_val = 'seconds'
return return_val
# end get_time_units
def get_kernel_routes_light(self):
'''Use this commad for scale tests on juniper routers to get kernel routes
It is much lighter weight than "show route forwarding-table summary.
Must be run as root.
'''
#
# Extract route field after executing command
#
routes = re.search('\d+', self.execCmd('ifsmon -Id | grep ROUTE'))
#
# Return kernel routes
#
return routes.group()
# end get_kernel_routes_light
def get_localhost_ip():
ip = subprocess.check_output(
'resolveip -s $HOSTNAME', stderr=subprocess.STDOUT, shell=True)
return ip[:-1] # chop newline
# end get_localhost_ip
def check_krt_queue_empty(self, oper, rt_prefixes, expected_prefixes, fd):
'''This is a cli show command on the router. Only use this periodically
during the test, it is cpu intense and slow.. Only issue when near
the end of rib install. Otherwise return "False"
'''
#
# Check if we are even close yet for an add
#
if re.search('add', oper, re.IGNORECASE):
if (rt_prefixes < (expected_prefixes - 500)):
return "WaitChk"
elif re.search('del', oper, re.IGNORECASE):
if (rt_prefixes > (expected_prefixes + 500)):
return "WaitChk"
#
# Get krt info
#
cmd = 'cli -c "show krt queue | match %s | match gf"' % oper
result = self.execCmd(cmd)
return_val = False
if result == None or re.search('gf', result):
return_val = False
else:
return_val = True
return return_val
# end check_krt_queue_empty
def get_peer_states(self, xmpp_src, nagents, instance, pending_updates, fd):
'''Get peer state
'''
#
# Iterate through a list of peers checking if they are up or not
#
number_peers_up = 0
ip = IPAddress(xmpp_src)
for i in range(nagents):
status, peer_state = self.get_cn_bgp_neighbor_element(
str(ip), "state")
log_print(
"INFO: instance:%s xmpp_peer:%s pending_updates:%s STATE:%s" %
(instance, str(ip), pending_updates, peer_state), fd)
ip += 1
return
# end get_peer_states
def check_peers_up(self, ip_start, num_peers, encoding, oper, print_peer_status, fd):
'''Check that all the peers are up
'''
#
# Iterate through a list of peers checking if they are up or not
#
number_peers_up = 0
ip = IPAddress(ip_start)
for i in range(num_peers):
t1 = datetime.now()
status, peer_state = self.get_cn_bgp_neighbor_element(
str(ip), "state")
status, peer_encoding = self.get_cn_bgp_neighbor_element(
str(ip), "encoding")
t2 = datetime.now()
#
# Check if the peer is up with matchinng encoding
#
if status is True and re.match('Established', peer_state, re.IGNORECASE):
if re.match(encoding, peer_encoding, re.IGNORECASE):
number_peers_up += 1
#
# Optionally log peers not up yet
#
elif oper == 'add':
if print_peer_status:
log_print("INFO: Peer:{0} status:{1} state:{2} encoding_param:{3} encoding_found:{4}".format(
ip, status, peer_state, encoding, peer_encoding), fd)
t3 = datetime.now()
ip += 1
log_print("INFO: Total peers established: {0} (out ot {1})".format(
number_peers_up, num_peers), fd)
return number_peers_up
# end check_peers_up
def check_peer_error(self, ip, fd):
status, last_error = self.get_cn_bgp_neighbor_element(
str(ip), 'last_error')
#
# Log if there is an error
#
if status is True and last_error:
status, peer_encoding = self.get_cn_bgp_neighbor_element(
str(ip), 'encoding')
log_print("WARNING: {0} peer:{1} error notification:{2} peer found?:{3}".format(
peer_encoding, ip, last_error, status), fd)
return last_error
# end check_peer_error
def check_peers_for_errors(self, ip_start, num_peers, print_logs, fd):
'''See if a peer has an error
'''
#
# Iterate through a list of peers checking if they are up or not
#
number_peers_with_errs = 0
ip = IPAddress(ip_start)
for i in range(num_peers):
#
# Success if the last_state_at matches previous time for last_state_at
#
err = check_peer_error(self, ip, fd)
if err is not None:
number_peers_with_errs += 1
if print_logs:
log_print("INFO: Peer:{0} err:{1}".format(ip, err), fd)
ip += 1
#
# Log it if needed
#
if number_peers_with_errs:
log_print("INFO: Total peers with errors logged:{0} (out of {1} checked)".format(
number_peers_with_errs, num_peers), fd)
return number_peers_with_errs
# end check_peers_for_errors
def get_shell_cmd_output(self, cmd, fd):
'''This is a cli show command on the router to get dram memory usage info
'''
#
# Execute command at the shell
#
return self.execCmd(cmd)
# end get_shell_cmd_output
def get_rtr_dram_pct_utlization(self, fd):
'''This is a cli show command on the router to get dram memory usage info
'''
#
# Get chassis routing-engine memory total and % utilized
#
mem1 = self.execCmd(
'cli -c "show chassis routing-engine | display xml | match memory-dram-size"')
memory_dram_size = int(re.search('\d+', mem1).group())
mem2 = self.execCmd(
'cli -c "show chassis routing-engine | display xml | match memory-buffer-utilization"')
memory_re_utlization = int(re.search('\d+', mem2).group())
return (memory_dram_size, memory_re_utlization)
# end get_rtr_dram_pct_utlization
def get_rt_l3vpn_prefixes(self, instance_name, ninstances, nbr_ip, fd):
'''This is a cli show command on the router
'''
if not nbr_ip:
log_print("ERROR: Missing bgp neighbor IP address parameter", fd)
return (0, 0)
#
# Get xml output of show bgp neighbor
#
active_prefixes_resp = self.execCmd(
'cli -c "show bgp neighbor {0} | display xml | grep active-prefix-count"'.format(nbr_ip))
#
# Get out if no bgp neigbor present yet
#
if not active_prefixes_resp:
return (0, 0)
#
# The first count is from the bgp.l3vpn.inet.0 table
#
var = active_prefixes_resp.splitlines()
l3vpn_prefix_count = int(re.search('\d+', var[0]).group())
#
# TODO: this is a hack so that just the bgp.l3vpn table is counted
#
return (l3vpn_prefix_count, l3vpn_prefix_count)
#
# Get xml output of instance names, note that this does not include the bgp.l3vpn table name
#
#cmd = 'cli -c "show bgp neighbor {0} | display xml | grep name | grep {1}"'.format(nbr_ip, instance_name)
cmd = 'cli -c "show bgp neighbor {0} | display xml | grep name"'.format(nbr_ip)
names_resp = self.execCmd(cmd)
#
# Get out if no instance info present yet
#
if not names_resp:
log_print("WARNING: no bgp neighbor:{0} on router".format(nbr_ip), fd)
return (0, 0)
#
# Iterate over the bgp neighbor instance names, skipping non-instance names
#
var_names = names_resp.splitlines()
instance_prefix_count = 0
index_prefixes = 1 # Skip first element, it is the bgp.l3vpn count
for i in range(len(var_names)):
#
# "shouldn't" get here..
#
if index_prefixes >= len(var):
break
#
# Extract element value
#("DEBUG: i:%s, instance_prefix_count:%s, index_prefix:%s, var[index_prefix]:%s, var_names[i]:%s" %(i, instance_prefix_count, index_prefix, var[index_prefix], var_names[i]), fd)
#
element_val = re.search('\d+', var[index_prefixes])
index_prefixes += 1
#
# Check for non-existant match before adding to tally
#
if element_val:
#
# Check instance name matches before tallying - TODO, not sure we care..
#
instance_prefix_count += int(element_val.group())
#log_print ("DEBUG: time how long this call takes..".format(instance_prefix_count, fd))
return (l3vpn_prefix_count, instance_prefix_count)
# end get_rt_l3vpn_prefixes
def get_cn_introspect_elements(self, ninstances, ri_domain, ri_name, family, xmpp_src, oper, nagents, time_chk, fd):
cn_prefixes = 0
cn_pending_updates = 0
cn_markers = 0
cn_paths = 0
cn_primary_paths = 0
cn_secondary_paths = 0
cn_infeasible_paths = 0
status = 0
peer_state = 0
for i in range(ninstances):
#
# Get full instance name
#
instance_name, full_instance_name = get_instance_name(
ninstances, ri_name, i + 1, ri_domain)
#
# Get control node active prefiexes for this instance
#
#nprefixes = self.get_cn_routing_instance_bgp_active_paths (full_instance_name, family)
status, nprefixes = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'prefixes')
#
# Get control node pending_updates for this instance
#
# paths, primary_paths, secondary_paths and infeasible_paths
#
status, pending_updates = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'pending_updates')
status, markers = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'markers')
status, paths = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'paths')
status, primary_paths = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'primary_paths')
status, secondary_paths = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'secondary_paths')
status, infeasible_paths = self.get_cn_routing_instance_table_element(
full_instance_name, family, 'infeasible_paths')
#
# Get xmpp status if there are pending updates for a long period of time - note that time_chk is in minutes
#
if oper == "add" and type(pending_updates) == int and pending_updates > 0 and time_chk > 1:
get_peer_states(self, xmpp_src, nagents,
full_instance_name, pending_updates, fd)
#
# Get integer return values, or 0 (log errors)
#
cn_prefixes += check_introspect_return_values(nprefixes,
'prefixes', full_instance_name, fd)
cn_pending_updates += check_introspect_return_values(pending_updates,
'pending_updates', full_instance_name, fd)
cn_markers += check_introspect_return_values(markers,
'markers', full_instance_name, fd)
# New:paths, primary_paths, secondary_paths and infeasible_paths
cn_paths += check_introspect_return_values(paths,
'paths', full_instance_name, fd)
cn_primary_paths += check_introspect_return_values(primary_paths,
'primary_paths', full_instance_name, fd)
cn_secondary_paths += check_introspect_return_values(secondary_paths,
'secondary_paths', full_instance_name, fd)
cn_infeasible_paths += check_introspect_return_values(
infeasible_paths,
'infeasible_paths', full_instance_name, fd)
return (cn_prefixes, cn_pending_updates, cn_markers, cn_paths, cn_primary_paths, cn_secondary_paths, cn_infeasible_paths)
# end get_cn_introspect_elements
def check_introspect_return_values(val, element_name, instance, fd):
#
# Check if error codes, if so, log and return 0
#
return_val = 0
if type(val) == str:
#log_print ("WARNING: rtn_val:{0} val_ype:{1} while retrieving {2} from instance: {3} chk if cn crashed..".format(val, type(val), element_name, instance), fd)
return_val = 0
elif type(val) == None:
#log_print ("WARNING: rtn_val:{0} val_ype:{1} while retrieving {2} from instance: {3} chk if cn crashed..".format(val, type(val), element_name, instance), fd)
return_val = 0
elif type(val) == int:
return_val = val
return return_val
# end check_introspect_return_values
def check_done_flags(cn_done, rt_done, skip_rtr_check, skip_krt_check, krt_clear, oper, timestamp_done_cn, timestamp_done_rt, fd):
#
# See if all operations are finished
#
return_val = False
if (cn_done == True and rt_done == True and (skip_krt_check != 0 or krt_clear == True)):
#
# Get the later of the two timestamps, that is, the entire test is only
# done when both are done.
#
timestamp_done = timestamp_done_cn
if skip_rtr_check != 1 and timestamp_done_rt > timestamp_done_cn:
timestamp_done = timestamp_done_rt
log_print("INFO: notable_event finished prefix %s timestamp: %s" %
(oper, str(timestamp_done)), fd)
return_val = True
return return_val
# end check_done_flags
def get_prefix_install_or_delete_time(cn_self, rt_self, cn_ip, rt_ip, ri_domain, ri_name, ninstances, prefixes_per_instance, vpn_prefixes, oper, family, nagents, nroutes, timeout_minutes_poll_prefixes, skip_krt_check, skip_rtr_check, no_verify_routes, xmpp_src, start_time, fd):
#
# Return if no_verify is set
#
if no_verify_routes:
return 0
#
# Sometimes there are no prefixes, if so, just end it now..
#
if prefixes_per_instance == 0:
return 0
#
# Each instances has the same number of expected prefixes
#
total_expected_prefixes = prefixes_per_instance * ninstances
#
# Loop until it the number of routes has been reached, or timeout if count does not change for <n> times
#
time_chk = 0
cn_prefixes = 0
cn_pending_updates = 0
cn_markers = 0
rt_prefixes = 0
rt_vpn_prefixes = 0
cn_delta_seconds = 0
rt_delta_seconds = 0
return_val = 0
timestamp_done_cn = ''
timestamp_done_rt = ''
sleeptime_between_introspect_polls = 5
#
# Set expected prefixe count and timeout according to add or delete
#
if re.search('add', oper, re.IGNORECASE):
expected_prefix_count = total_expected_prefixes
max_time = timeout_minutes_poll_prefixes
if re.search('del', oper, re.IGNORECASE):
expected_prefix_count = 0
max_time = timeout_minutes_poll_prefixes
#
# Check routes until either all are installed or all are deleted, depending on the oper
#
cn_done = False
if skip_rtr_check != 0:
rt_done = True
krt_clear = True
else:
rt_done = False
krt_clear = False
while True:
#
# Iterate throuh the control node instance tables
#
if not cn_done:
cn_prefixes, cn_pending_updates, cn_markers, cn_paths, cn_primary_paths, cn_secondary_paths, cn_infeasible_paths = get_cn_introspect_elements(
cn_self, ninstances, ri_domain, ri_name, family, xmpp_src, oper, nagents, time_chk, fd)
#
# Iterate through the router instance tables
#
if not rt_done:
if rt_self != None:
rt_vpn_prefixes, rt_prefixes = get_rt_l3vpn_prefixes(
rt_self, ri_name, ninstances, cn_ip, fd)
#
# Check if control node is done, but only if not already done in previous loop iteration
#
if not cn_done:
cn_done = check_if_done_polling_for_prefixes(
oper, cn_prefixes, expected_prefix_count, "cn", cn_pending_updates, fd)
#
# Get delta times if done - only call this once per test
#
if cn_done:
timestamp_done_cn = datetime.now()
cn_delta_seconds = get_time_diffs_seconds(
start_time, timestamp_done_cn, decimal_places=2)
#
# Check if rotuer is done, note it's total is based on the bgp.l3vpn table.
#
if (rt_done == False or krt_clear == False):
rt_done = check_if_done_polling_for_prefixes(
oper, rt_vpn_prefixes, expected_prefix_count, "rt", cn_pending_updates, fd)
krt_clear = skip_krt_check != 0 or check_krt_queue_empty(
rt_self, oper, rt_vpn_prefixes, expected_prefix_count, fd)
#log_print("DEBUG: prefixes: %s vpn_prefixes %s, expected: %s" %(rt_vpn_prefixes, vpn_prefixes, expected_prefix_count), fd)
#
# Get delta times if done - only call this once per test
#
if (rt_done == True and krt_clear == True):
rt_delta_seconds, timestamp_done_rt = get_delta_time(
start_time, 'seconds')
#
# Timeout if we're spinning..
#
time_chk, time_now = get_delta_time(start_time, 'minutes')
if time_chk >= max_time:
log_print("ERROR: timeout waiting for route install, total expected prefixes:{0} cn had:{1} cn_pending_updates:{2} rtr had:{3} waited {4} min, expected_prefix_count {5}, rt_done {6}, cn_done {7}, krt_clear {8}".format(
total_expected_prefixes, cn_prefixes, cn_pending_updates, rt_prefixes, max_time, expected_prefix_count, rt_done, cn_done, krt_clear), fd)
return_val = 'GetRouteTimeout'
break
#
# Control-node stats for log
#
msg1 = "INFO: pfxes:%s pending:%s marker:%s paths:%s primry:%s secondry:%s infeasbl:%s" % (
cn_prefixes, cn_pending_updates, cn_markers, cn_paths, cn_primary_paths, cn_secondary_paths, cn_infeasible_paths)
msg_last = "ri:%s op:%s cdone:%s timeout-in:%sm pfx_expected:%s" % (
ri_name, oper, cn_done, max_time - time_chk, expected_prefix_count)
#
# Optionally append router stats for log
#
if skip_rtr_check == 0:
msg1 = "%s rt_vpn:%s rt:%s rdone:%s" % (
msg1, rt_vpn_prefixes, rt_prefixes, rt_done)
#
# Optionally append router krt stats for log
#
if skip_krt_check == 0:
msg1 = "%s krt:%s" % (msg1, krt_clear)
#
# Log stats
#
log_print("%s %s" % (msg1, msg_last), fd)
#
# Check if we reached expected prefix values - use the greater timestamp for
# overall completion time.
#
# Note:
# - timetamps were already recorded for cn_done (and rt_done if applicable)
# - this is called after either cn or rt is checked for done status (to keep timings accurate)
#
if check_done_flags(cn_done, rt_done, skip_rtr_check, skip_krt_check, krt_clear, oper, timestamp_done_cn, timestamp_done_rt, fd):
break
#
# Wait a bit before continuing
#
#log_print ("DEBUG: sleeping for %s seconds inbetween introspect polling for prefix add :%s" % (sleeptime_between_introspect_polls, oper), fd)
time.sleep(sleeptime_between_introspect_polls)
# end while loop
#
# Unless it's a timeout, report delta time
#
if not re.search('timeout', str(return_val), re.IGNORECASE):
report_delta_times(oper, total_expected_prefixes, cn_delta_seconds,
rt_delta_seconds, ninstances, nagents, nroutes, skip_rtr_check, fd)
return return_val
# end get_prefix_install_or_delete_time
def report_stats(cn_self, rt_self, cnshell_self, cn_ip, rt_ip, msg, report_cpu_only_at_peak_bgp_scale, fd):
local_ip = get_localhost_ip()
pid = os.getpid()
#
# Beginning stats info
#
log_print(" ", fd)
log_print(
"============================ <Begin> {0} ===============================".format(msg), fd)
#
# Control Node cpu
#
result1 = get_shell_cmd_output(
cnshell_self, 'cat /proc/stat | grep -i cpu', fd)
result2 = get_shell_cmd_output(cnshell_self, 'top -b | head -15', fd)
result3 = get_shell_cmd_output(cnshell_self, 'mpstat -P ALL', fd)
log_print("ip:{0} Control Node CPU info (brief):".format(cn_ip), fd)
log_print(
"ip:{0} cat /proc/stat | grep -i cpu\n{1}".format(cn_ip, result1), fd)
log_print("ip:{0} top -b | head -15\n{1}".format(cn_ip, result2), fd)
log_print("ip:{0} mpstat -P ALL\n{1}".format(cn_ip, result3), fd)
log_print(
"============================ <End> {0} ===============================".format(msg), fd)
# TODO - reorganize cpu call
if report_cpu_only_at_peak_bgp_scale:
return
#
# Localhost ulimit settings
#
result1 = subprocess.check_output(
'ulimit -a', stderr=subprocess.STDOUT, shell=True)
log_print("ip:{0} Localhost Ulimit Settings:".format(local_ip), fd)
log_print("ip:{0} ulimit -a \n{1}".format(local_ip, result1), fd)
#
# Localhost memory info
#
mem_result1 = subprocess.check_output(
'egrep "Mem|Cache|Swap" /proc/meminfo', stderr=subprocess.STDOUT, shell=True)
mem_result2 = subprocess.check_output(
'ps -e -orss=,args= | sort -b -k1,1n | pr -TW195 | sort -rn | head -n 20', stderr=subprocess.STDOUT, shell=True)
mem_result3 = subprocess.check_output(
'ps -e -ovsz=,args= | sort -b -k1,1n | pr -TW195 | sort -rn | head -n 20', stderr=subprocess.STDOUT, shell=True)
mem_result4 = subprocess.check_output(
'vmstat', stderr=subprocess.STDOUT, shell=True)
mem_result5 = subprocess.check_output(
'pmap {0} | grep -i total'.format(pid), stderr=subprocess.STDOUT, shell=True)
log_print("ip:{0} Localhost Memory:".format(local_ip), fd)
log_print(
'ip:{0} egrep "Mem|Cache|Swap" /proc/meminfo\n{1}'.format(local_ip, mem_result1), fd)
log_print(
'ip:{0} ps -e -orss=,args= | sort -b -k1,1n \n{1}'.format(local_ip, mem_result2), fd)
log_print(
'ip:{0} ps -e -ovsz=,args= | sort -b -k1,1n \n{1}'.format(local_ip, mem_result3), fd)
log_print('ip:{0} vmstat\n{1}'.format(local_ip, mem_result4), fd)
log_print(
'ip:{0} pmap {1} | grep -i total\n{2}'.format(local_ip, pid, mem_result5), fd)
#
# Localhost file descriptprs
#
result1 = subprocess.check_output(
'lsof -n | wc -l', stderr=subprocess.STDOUT, shell=True)
result2 = subprocess.check_output(
'lsof -n | grep -i tcp | wc -l', stderr=subprocess.STDOUT, shell=True)
log_print(
"ip:{0} Localhost File Descriptors (lsof -n):".format(local_ip), fd)
log_print("ip:{0} Total fds: {1}".format(local_ip, result1), fd)
log_print("ip:{0} TCP fds: {1}".format(local_ip, result2), fd)
#
# Localhost cpu
#
result1 = subprocess.check_output(
'cat /proc/stat | grep -i cpu', stderr=subprocess.STDOUT, shell=True)
result2 = subprocess.check_output(
'top -b | head -15', stderr=subprocess.STDOUT, shell=True)
log_print("ip:{0} Localhost CPU info (brief):".format(local_ip), fd)
log_print("ip:{0} top -b | head -15".format(local_ip), fd)
log_print(result1, fd)
log_print("ip:{0} cat /proc/stat | grep -i cpu".format(local_ip), fd)
log_print(result2, fd)
if re.search('Before', msg, re.IGNORECASE):
defs = "Field definitions for /proc/stat, in case you remembered to forget: \n- user: normal processes executing in user mode \n- nice: niced processes executing in user mode \n- system: processes executing in kernel mode \n- idle: twiddling thumbs \n- iowait: waiting for I/O to complete \n- irq: servicing interrupts \n- softirq: servicing softirqs \n- steal: involuntary wait \n- guest: running a normal guest \n- guest_nice: running a niced guest\n"
log_print(defs, fd)
#
# Localhost crash info
#
#localhost_crash_info = subprocess.check_output('ls -lt /var/crashes; ls -lt /var/crash', stderr=subprocess.STDOUT, shell=True)
#log_print ("ip:{0} Localhost Crash info:".format(local_ip), fd)
#log_print ("ip:{0} ls -lt /var/crashes; ls -lt /var/crash\n{1}".format(cn_ip, localhost_crash_info), fd)
#
# Get router RE memory usage info
#
if rt_self != None:
total, used = get_rtr_dram_pct_utlization(rt_self, fd)
log_print('ip:{0} Router Memory:'.format(rt_ip), fd)
log_print("ip:{0} DRAM: {1}".format(rt_ip, total), fd)
log_print(
"ip:{0} Memory utilization {1} percent".format(rt_ip, used), fd)
#
# Control node run command, including env variables
#
#result1 = cnshell_self.execCmd ('ps e `pidof control-node.optimized`')
result1 = cnshell_self.execCmd('ps e `pidof control-node`')
log_print(
"ip:{0} Control Node ps info with env variables:".format(cn_ip), fd)
log_print(
"ip:{0} ps e `pidof control-node`\n{1}".format(cn_ip, result1), fd)
#
# Control node ulimit settings
#
result1 = get_shell_cmd_output(
cnshell_self, 'cat /proc/`pidof control-node`/limits', fd)
log_print("ip:{0} Control Node Ulimit Settings:".format(cn_ip), fd)
log_print(
"ip:{0} cat /proc/`pidof control-node`/limits\n{1}".format(cn_ip, result1), fd)
#
# Control Node memory info
#
mem_result1 = get_shell_cmd_output(
cnshell_self, 'egrep "Mem|Cache|Swap" /proc/meminfo', fd)
mem_result2 = get_shell_cmd_output(
cnshell_self, 'ps -e -orss=,args= | sort -b -k1,1n | pr -TW195 | sort -rn | head -n 20', fd)
mem_result3 = get_shell_cmd_output(
cnshell_self, 'ps -e -ovsz=,args= | sort -b -k1,1n | pr -TW195 | sort -rn | head -n 20', fd)
mem_result4 = get_shell_cmd_output(cnshell_self, 'vmstat', fd)
mem_result5 = get_shell_cmd_output(
cnshell_self, 'pmap `pidof control-node` | grep -i total', fd)
log_print('ip:{0} Control Node Memory:'.format(cn_ip), fd)
log_print(
'ip:{0} egrep "Mem|Cache|Swap" /proc/meminfo\n{1}'.format(cn_ip, mem_result1), fd)
log_print(
'ip:{0} ps -e -orss=,args= | sort -b -k1,1n \n{1}'.format(cn_ip, mem_result2), fd)
log_print(
'ip:{0} ps -e -ovsz=,args= | sort -b -k1,1n \n{1}'.format(cn_ip, mem_result3), fd)
log_print('ip:{0} vmstat\n{1}'.format(cn_ip, mem_result4), fd)
log_print(
'ip:{0} pmap `pidof control-node` | grep -i total\n{1}'.format(cn_ip, mem_result5), fd)
#
# Control Node file descriptprs
#
result1 = get_shell_cmd_output(cnshell_self, 'lsof -n | wc -l', fd)
result2 = get_shell_cmd_output(
cnshell_self, 'lsof -n | grep -i tcp | wc -l', fd)
# result3 = get_shell_cmd_output (cnshell_self, 'lsof -i | wc -l', fd) #
# can hang system...
log_print(
"ip:{0} Control Node File Descriptors (lsof -n):".format(cn_ip), fd)
log_print("ip:{0} Total fds: {1}".format(cn_ip, result1), fd)
log_print("ip:{0} TCP fds: {1}".format(cn_ip, result2), fd)
result1 = get_shell_cmd_output(cnshell_self, 'netstat -vatn | wc -l', fd)
log_print(
'ip:{0} Control Node Open Ports and Established TCP Sessions "netstat -vatn | wc -l"\n{1}'.format(cn_ip, result1), fd)
#
# Control Node cpu
#
result1 = get_shell_cmd_output(
cnshell_self, 'cat /proc/stat | grep -i cpu', fd)
result2 = get_shell_cmd_output(cnshell_self, 'top -b | head -15', fd)
result3 = get_shell_cmd_output(cnshell_self, 'mpstat -P ALL', fd)
log_print("ip:{0} Control Node CPU info (brief):".format(cn_ip), fd)
log_print(
"ip:{0} cat /proc/stat | grep -i cpu\n{1}".format(cn_ip, result1), fd)
log_print("ip:{0} top -b | head -15\n{1}".format(cn_ip, result2), fd)
log_print("ip:{0} mpstat -P ALL\n{1}".format(cn_ip, result3), fd)
#
# Control node Crash info
#
#cn_crash_info = get_shell_cmd_output (cnshell_self, 'ls -lt /var/crashes; ls -lt /var/crash', fd)
#log_print ("ip:{0} Localhost Crash info:".format(cn_ip), fd)
#log_print ("ip:{0} ls -lt /var/crashes; ls -lt /var/crash\n{1}".format(cn_ip, cn_crash_info), fd)
log_print(
"============================ {0} <End> =======================".format(msg), fd)
log_print(" ", fd)
#
# Get control node swap info
#
result = get_shell_cmd_output(
cnshell_self, 'egrep "SwapCached" /proc/meminfo', fd)
swap1 = check_if_swapping(result, "Control node:%s" % cn_ip, fd)
#
# Get localhost swap info
#
result = subprocess.check_output(
'egrep "SwapCached" /proc/meminfo', stderr=subprocess.STDOUT, shell=True)
swap2 = check_if_swapping(
result, "Localhost:%s running bgp_stress_test code" % local_ip, fd)
if swap1 or swap2:
log_print(
"WARNING: control node swap status:{0}, localhost swap status:{1}".format(swap1, swap2), fd)
#
# Check if crashing
#
#check_for_crash (localhost_crash_info, "Localhost:%s running bgp_stress_test code" %local_ip, fd)
#check_for_crash (cn_crash_info, "Control node:%s" %cn_ip, fd)
return (swap1, swap2)
# end report_stats
def check_if_done_polling_for_prefixes(oper, current_prefixes, expected_prefixes, who, pending_updates, fd):
return_val = False
#
# Check if prefix install is done
#
if re.search('add', oper, re.IGNORECASE):
if current_prefixes >= expected_prefixes:
return_val = True
else:
return_val = False
#
# Check if prefix delete is done
#
elif re.search('del', oper, re.IGNORECASE):
if current_prefixes <= expected_prefixes:
return_val = True
else:
return_val = False
#
# Check if pending updates are clear
#
if who == 'cn':
if pending_updates:
return_val = False
#log_print ("DEBUG: pending_updates present:%s returning:%s oper:%s who:%s current:%s expected:%s" %(pending_updates, return_val, oper, who, current_prefixes, expected_prefixes), fd)
#log_print ("DEBUG: check_if_done_polling_for_prefixes: returning:%s oper:%s who:%s current:%s expected:%s pending_updates:%s" %(return_val, oper, who, current_prefixes, expected_prefixes, pending_updates), fd)
return return_val
# end check_if_done_polling_for_prefixes
def report_delta_times(oper, expected_prefixes, cn_delta_seconds, rt_delta_seconds, ninstances, nagents, nroutes, skip_rtr_check, fd):
#
# Compute the number of add/delete per second
#
cn_ips = get_ops_per_second(int(cn_delta_seconds), expected_prefixes)
rt_ips = get_ops_per_second(int(rt_delta_seconds), expected_prefixes)
tunit = 'seconds'
msg = get_msg_ninst_x_agents_x_nroutes(ninstances, nagents, nroutes)
if skip_rtr_check != 0:
log_print(
"INFO: Elapsed time to {0} {1} prefixes on_control_node:{2}{6} prefixes/{6}:{4} {7}".format(oper,
expected_prefixes, cn_delta_seconds, rt_delta_seconds, cn_ips, rt_ips, tunit[:1], msg), fd)
else:
log_print(
"INFO: Elapsed time to {0} {1} prefixes on_control_node:{2}{6}, and on_router:{3}{6} prefixes/{6}: ({4} and {5}) {7}".format(oper,
expected_prefixes, cn_delta_seconds, rt_delta_seconds, cn_ips, rt_ips, tunit[:1], msg), fd)
return
# end report_delta_times
def get_msg_ninst_x_agents_x_nroutes(ninstances, nagents, nroutes):
return "ninst X nagent X nroutes = {0}x{1}x{2}".format(ninstances, nagents, nroutes)
# end get_msg_ninst_x_agents_x_nroutes
def get_ops_per_second(delta_seconds, expected_prefixes):
#
# Compute prefix install (or delete) per second
#
if delta_seconds > 0:
return_val = expected_prefixes / delta_seconds
return return_val
else:
return expected_prefixes
# end get_ops_per_second
def get_delta_time(t1, units=''):
t2 = datetime.now()
#
# Use direct time diff
#
if not units:
return_val = (t2 - t1)
#
# The has GOT to be a better way..
#
if units == 'minutes':
return_val = int(((t2 - t1).seconds) / 60)
elif units == 'seconds':
return_val = (t2 - t1).seconds
elif units == 'microseconds':
return_val = "%s.%s" % (
int(((t2 - t1).seconds) / 60), ((t2 - t1).microseconds))
return (return_val, t2)
# end get_delta_time
def utc_to_tz(ut, time_zone=''):
#
# Check if we have a valid timestamp..
#
try:
new_utc = datetime.strptime(ut, '%Y-%b-%d %H:%M:%S.%f')
except ValueError:
return 'InvalidDateFormat'
#
# Default to PST (if no timezone param)
#
if not time_zone:
time_zone = 'US/Pacific'
#
# Convert string to datetime format (introspect returns string format..)
#
if type(ut) is str:
# Using the format introspect returns
new_utc = datetime.strptime(ut, '%Y-%b-%d %H:%M:%S.%f')
#
# Attach utc timezone
#
new_utc = new_utc.replace(tzinfo=pytz.utc)
#
# Convert timezone, use format: %Y-%m-%d %H:%M:%S.%f ex: 2013-06-01 18:53:30.308432
#
new_tz = timezone(time_zone)
new_time = new_tz.normalize(
d.astimezone(new_tz)).strftime('%Y-%b-%d %H:%M:%S.%f')
#
# Convert back to type "datetime"
#
new_time = datetime.strptime(new_time, '%Y-%b-%d %H:%M:%S.%f')
return new_time
# end utc_to_tz
def set_tcp_keepalives(who, fd, self=''):
''' NOT USEED
Set tcp values low so that if bgp_stress gets killed, the xmpp session go down
sysctl -w net.ipv4.tcp_keepalive_time=30 net.ipv4.tcp_keepalive_probes=3 net.ipv4.tcp_keepalive_intvl=3
old defail vales:
net.ipv4.tcp_keepalive_intvl = 75
net.ipv4.tcp_keepalive_probes = 9
net.ipv4.tcp_keepalive_time = 7200
'''
#
# Command to set tcp session timeout lower
#
cmd = "sysctl -w net.ipv4.tcp_keepalive_intvl=75 net.ipv4.tcp_keepalive_probes=9 net.ipv4.tcp_keepalive_time=7200"
cmd = "sysctl -w net.ipv4.tcp_keepalive_intvl=3 net.ipv4.tcp_keepalive_probes=3 net.ipv4.tcp_keepalive_time=30"
if (who == 'localhost'):
#
# Instantiate command on localhost
#
result = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, shell=True)
else:
#
# Instantiate command on remote node
#
result = get_shell_cmd_output(self, cmd, fd)
log_print("INFO: Executing command on:%s cmd:%s" % (who, cmd), fd)
return result
# end set_tcp_keepalives
def main():
local_server = get_localhost_ip()
if __name__ == '__bgp_scale_mock_agent__':
try:
bgp_scale_mock_agent()
except Exception, msg:
# print traceback.format_exc()
log_print("WARNING: Hit exception in bgp.py after main..", fd)
finally:
if BGP_STRESS:
kill_bgp_stress_python_call('bgp_stress_test', 'python', fd)
BGP_STRESS.stop()
|
ysden123/poc | prometheus/manager/src/main/java/com/stulsoft/poc/prometheus/manager/package-info.java | /**
* Created by <NAME> 17 May 2018
*/
/**
* API for usage the Prometheus metrics
*
* @author <NAME>
*
*/
package com.stulsoft.poc.prometheus.manager; |
alibaba/fast-modeling-language | fastmodel-driver/fastmodel-driver-client/src/test/java/com/aliyun/fastmodel/driver/client/command/sample/SampleCommandFactory.java | package com.aliyun.fastmodel.driver.client.command.sample;
import java.util.Properties;
import com.aliyun.fastmodel.driver.client.command.CommandFactory;
import com.aliyun.fastmodel.driver.client.command.ExecuteCommand;
/**
* Desc:
*
* @author panguanjing
* @date 2022/4/30
*/
public class SampleCommandFactory implements CommandFactory {
@Override
public ExecuteCommand createStrategy(String commandType, Properties properties) {
return new SampleExecuteCommand();
}
}
|
MathiasBerwig/reactnd-project-readable | frontend/src/components/menu/SortPostsDropdown.js | /* eslint-disable react/jsx-filename-extension */
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { Dropdown } from 'semantic-ui-react';
import { handleSavePreferenceOrderPosts as saveOrderByPref } from '../../actions/preferences';
const options = [
{ key: 1, text: 'Biggest Score', value: 'score' },
{ key: 2, text: 'Newest', value: 'newest' },
{ key: 3, text: 'Oldest', value: 'oldest' },
];
class SortPostsDropdown extends Component {
handleChange = (e, { value }) => {
const { dispatch } = this.props;
dispatch(saveOrderByPref(value));
}
render() {
const { orderBy } = this.props;
return (
<Dropdown
item
simple
value={orderBy}
options={options}
text="Order posts by"
onChange={this.handleChange}
/>
);
}
}
SortPostsDropdown.propTypes = {
dispatch: PropTypes.func.isRequired,
orderBy: PropTypes.string,
};
SortPostsDropdown.defaultProps = {
orderBy: 'score',
};
function mapStateToProps({ preferences = {} }) {
const { orderBy } = preferences;
return {
orderBy,
};
}
export default connect(mapStateToProps)(SortPostsDropdown);
|
ravi688/VulkanRenderer | source/renderer/vulkan/vulkan_logical_device.c | <gh_stars>0
#include <renderer/internal/vulkan/vulkan_defines.h>
#include <renderer/internal/vulkan/vulkan_logical_device.h>
#include <renderer/internal/vulkan/vulkan_physical_device.h>
#include <renderer/internal/vulkan/vulkan_result.h>
#include <renderer/assert.h>
#include <renderer/memory_allocator.h>
#ifdef GLOBAL_DEBUG
static void check_pre_condition(vulkan_logical_device_t* device);
#else
# define check_pre_condition(device)
#endif /* GLOBAL_DEBUG */
RENDERER_API vulkan_logical_device_t* vulkan_logical_device_new()
{
vulkan_logical_device_t* device = heap_new(vulkan_logical_device_t);
memset(device, 0, sizeof(vulkan_logical_device_t));
return device;
}
RENDERER_API vulkan_logical_device_t* vulkan_logical_device_create(vulkan_physical_device_t* physical_device, vulkan_logical_device_create_info_t* device_create_info)
{
assert(device_create_info != NULL);
vulkan_logical_device_t* device = vulkan_logical_device_new();
u32* family_indices = device_create_info->queue_family_indices;
u32 family_index_count = device_create_info->queue_family_index_count;
// union operation, complexity O(n) == linear
u32 max_index = (family_indices == NULL) ? 0 : family_indices[0];
for(u32 i = 1; i < family_index_count; i++) // find the max index
if(family_indices[i] > max_index) max_index = family_indices[i];
u32 look_up[max_index + 1];
memset(look_up, 0, sizeof(u32) * (max_index + 1)); // intiailize the look up table with zeros
u32 union_result[family_index_count]; // allocate for maximum possible elements in the final result
u32 queue_family_count = 0;
for(u32 i = 0; i < family_index_count; i++) // union operation
{
if(look_up[family_indices[i]] == 1)
continue;
look_up[family_indices[i]] = 1;
union_result[queue_family_count] = family_indices[i];
queue_family_count++;
}
// TODO: Make priorities configurable
float priority = 1;
VkDeviceQueueCreateInfo* queue_create_infos = heap_newv(VkDeviceQueueCreateInfo, queue_family_count);
memset(queue_create_infos, 0, sizeof(VkDeviceQueueCreateInfo) * queue_family_count);
for(u32 i = 0; i < queue_family_count; i++)
{
queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queue_create_infos[i].queueFamilyIndex = family_indices[i];
// TODO: Make queue count configurable
queue_create_infos[i].queueCount = 1;
queue_create_infos[i].pQueuePriorities = &priority;
}
log_msg("Queue count: %u\n", queue_family_count);
const char* extensions[device_create_info->extension_count];
u32 extension_count = 0;
for(u32 i = 0; i < device_create_info->extension_count; i++)
{
if(!vulkan_physical_device_is_extension_supported(physical_device, device_create_info->extensions[i]))
{
LOG_WRN("Device extension \"%s\" isn't supported, ignored\n", device_create_info->extensions[i]);
continue;
}
extensions[extension_count] = device_create_info->extensions[extension_count];
extension_count++;
}
VkDeviceCreateInfo create_info =
{
.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
.queueCreateInfoCount = queue_family_count,
.pQueueCreateInfos = queue_create_infos,
.enabledExtensionCount = extension_count,
.ppEnabledExtensionNames = (extension_count == 0) ? NULL : extensions,
.pEnabledFeatures = device_create_info->features
};
VkResult result = vkCreateDevice(physical_device->handle, &create_info, NULL, &device->handle);
vulkan_result_assert_success(result);
heap_free(queue_create_infos);
log_msg("Logical device created successfully\n");
return device;
}
RENDERER_API void vulkan_logical_device_destroy(vulkan_logical_device_t* device)
{
vkDestroyDevice(device->handle, NULL);
log_msg("Logical device destroyed successfully\n");
}
RENDERER_API void vulkan_logical_device_release_resources(vulkan_logical_device_t* device)
{
heap_free(device);
}
RENDERER_API VkQueue vulkan_logical_device_get_queue(vulkan_logical_device_t* device, u32 family_index, u32 queue_index)
{
check_pre_condition(device);
VkQueue queue;
vkGetDeviceQueue(device->handle, family_index, queue_index, &queue);
}
#ifdef GLOBAL_DEBUG
static void check_pre_condition(vulkan_logical_device_t* device)
{
assert(device != NULL);
assert(device->handle != VK_NULL_HANDLE);
}
#endif /* GLOBAL_DEBUG */
|
damb/seiscomp3 | src/trunk/libs/seiscomp3/seismology/ttt.cpp | <gh_stars>10-100
/***************************************************************************
* Copyright (C) by <NAME> *
* *
* You can redistribute and/or modify this program under the *
* terms of the SeisComP Public License. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* SeisComP Public License for more details. *
***************************************************************************/
#include <seiscomp3/seismology/ttt.h>
#include <seiscomp3/math/geo.h>
#include <seiscomp3/core/interfacefactory.ipp>
IMPLEMENT_INTERFACE_FACTORY(Seiscomp::TravelTimeTableInterface, SC_SYSTEM_CORE_API);
extern "C" {
#include "f2c.h"
void distaz2_(double *lat1, double *lon1, double *lat2, double *lon2, double *delta, double *azi1, double *azi2);
int elpcor_(const char *phid, real *del, real *z__, real *azi, real *ecolat, real *ecorr, int phid_len);
}
namespace Seiscomp {
bool ellipcorr(const std::string &phase, double lat1, double lon1, double lat2, double lon2, double depth, double &corr)
{
corr = 0.;
double delta, azi1, azi2;
Seiscomp::Math::Geo::delazi(lat1, lon1, lat2, lon2, &delta, &azi1, &azi2);
real staazi=azi1, stadel=delta, zfoc = depth, colat = 90. - lat1, ecorr=0;
if (phase=="P" || phase=="Pn" || phase=="Pg" || phase=="Pb" || phase=="Pdif" || phase=="Pdiff")
elpcor_("P ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="PcP")
elpcor_("PcP ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="PKPab")
elpcor_("PKPab ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="PKPbc")
elpcor_("PKPbc ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="PKPdf")
elpcor_("PKPdf ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="PKiKP")
elpcor_("PKiKP ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="S" || phase=="Sn" || phase=="Sg" || phase=="Sb" || phase=="Sdif" || phase=="Sdiff")
elpcor_("S ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="ScS")
elpcor_("ScS ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="SKSac")
elpcor_("SKSac ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="SKSdf")
elpcor_("SKSdf ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="ScP")
elpcor_("SKP ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else if (phase=="SKP")
elpcor_("ScP ", &stadel, &zfoc, &staazi, &colat, &ecorr, 8);
else return false;
corr = ecorr;
return true;
}
TravelTime::TravelTime() {}
TravelTime::TravelTime(const std::string &_phase,
double _time, double _dtdd, double _dtdh, double _dddp,
double _takeoff)
{
phase = _phase;
time = _time;
dtdd = _dtdd;
dtdh = _dtdh;
dddp = _dddp;
takeoff = _takeoff;
}
bool TravelTime::operator==(const TravelTime &other) const {
return phase == other.phase &&
time == other.time &&
dtdd == other.dtdd &&
dtdh == other.dtdh &&
dddp == other.dddp &&
takeoff == other.takeoff;
}
bool TravelTime::operator<(const TravelTime &other) const {
return phase < other.phase &&
time < other.time &&
dtdd < other.dtdd &&
dtdh < other.dtdh &&
dddp < other.dddp &&
takeoff < other.takeoff;
}
namespace {
struct TTpred {
bool operator()(const TravelTime &t1, const TravelTime &t2)
{
return t1.time < t2.time;
}
};
}
void TravelTimeList::sortByTime() {
sort(TTpred());
}
const TravelTime *getPhase(const TravelTimeList *list, const std::string &phase) {
TravelTimeList::const_iterator it;
for ( it = list->begin(); it != list->end(); ++it ) {
// direct match
if ( (*it).phase == phase ) break;
// no match for 1st character -> don't keep trying
if ( (*it).phase[0] != phase[0] )
continue;
if ( phase == "P" ) {
if ( list->delta < 120 ) {
if ( (*it).phase == "Pn" ) break;
if ( (*it).phase == "Pb" ) break;
if ( (*it).phase == "Pg" ) break;
if ( (*it).phase == "Pdiff" ) break;
}
else
if ( (*it).phase.substr(0,3) == "PKP" ) break;
}
else if ( phase == "pP" ) {
if ( list->delta < 120 ) {
if ( (*it).phase == "pPn" ) break;
if ( (*it).phase == "pPb" ) break;
if ( (*it).phase == "pPg" ) break;
if ( (*it).phase == "pPdiff") break;
}
else {
if ( (*it).phase.substr(0,4) == "pPKP" ) break;
}
}
else if ( phase == "PKP" ) {
if ( list->delta > 100 ) {
if ( (*it).phase == "PKPab" ) break;
if ( (*it).phase == "PKPbc" ) break;
if ( (*it).phase == "PKPdf" ) break;
}
}
else if ( phase == "PKKP" ) {
if ( list->delta > 100 && list->delta < 130 ) {
if ( (*it).phase == "PKKPab" ) break;
if ( (*it).phase == "PKKPbc" ) break;
if ( (*it).phase == "PKKPdf" ) break;
}
}
else if ( phase == "SKP" ) {
if ( list->delta > 115 && list->delta < 145 ) {
if ( (*it).phase == "SKPab" ) break;
if ( (*it).phase == "SKPbc" ) break;
if ( (*it).phase == "SKPdf" ) break;
}
}
else if ( phase == "PP" ) {
if ( (*it).phase == "PnPn" ) break;
}
else if ( phase == "sP" ) {
if ( list->delta < 120 ) {
if ( (*it).phase == "sPn" ) break;
if ( (*it).phase == "sPb" ) break;
if ( (*it).phase == "sPg" ) break;
if ( (*it).phase == "sPdiff") break;
}
else {
if ( (*it).phase.substr(0,4)=="sPKP" ) break;
}
}
else if ( phase == "S" ) {
if ( (*it).phase == "Sn" ) break;
if ( (*it).phase == "Sb" ) break;
if ( (*it).phase == "Sg" ) break;
if ( (*it).phase == "S" ) break;
if ( (*it).phase == "Sdiff") break;
if ( (*it).phase.substr(0,3) == "SKS" ) break;
}
}
if ( it == list->end() )
return NULL;
return &(*it);
}
const TravelTime* firstArrivalP(const TravelTimeList *ttlist)
{
TravelTimeList::const_iterator it;
for (it = ttlist->begin(); it != ttlist->end(); ++it) {
// direct match
if ((*it).phase == "P")
break;
// no match for 1st character -> don't keep trying
if ( (*it).phase[0] != 'P' ) continue;
if ( ttlist->delta < 120 ) {
if ( (*it).phase == "Pn" ) break;
if ( (*it).phase == "Pb" ) break;
if ( (*it).phase == "Pg" ) break;
if ( (*it).phase == "Pdiff") break;
}
else {
if ( (*it).phase.substr(0,3)=="PKP" ) break;
}
}
if ( it == ttlist->end() )
return NULL;
return &(*it);
}
TravelTimeTableInterface::TravelTimeTableInterface() {}
TravelTimeTableInterface::~TravelTimeTableInterface() {}
TravelTimeTableInterface *TravelTimeTableInterface::Create(const char *name) {
return TravelTimeTableInterfaceFactory::Create(name);
}
TravelTime TravelTimeTableInterface::compute(const char *phase,
double lat1, double lon1, double dep1,
double lat2, double lon2, double alt2,
int ellc) {
TravelTimeList *ttlist = compute(lat1, lon1, dep1, lat2, lon2, alt2, ellc);
if ( ttlist == NULL )
throw NoPhaseError();
TravelTime ret;
const TravelTime *tt = getPhase(ttlist, phase);
if ( tt == NULL ) {
delete ttlist;
throw NoPhaseError();
}
ret = *tt;
delete ttlist;
return ret;
}
TravelTimeTableInterfacePtr TravelTimeTable::_interface;
TravelTimeTable::TravelTimeTable() {
if ( !_interface )
_interface = TravelTimeTableInterfaceFactory::Create("libtau");
}
bool TravelTimeTable::setModel(const std::string &model) {
if ( _interface )
return _interface->setModel(model);
return false;
}
const std::string &TravelTimeTable::model() const {
static std::string empty;
if ( _interface )
return _interface->model();
return empty;
}
TravelTimeList *
TravelTimeTable::compute(double lat1, double lon1, double dep1,
double lat2, double lon2, double alt2,
int ellc) {
if ( _interface )
return _interface->compute(lat1, lon1, dep1, lat2, lon2, alt2, ellc);
return NULL;
}
TravelTime
TravelTimeTable::compute(const char *phase,
double lat1, double lon1, double dep1,
double lat2, double lon2, double alt2,
int ellc) {
if ( _interface )
return _interface->compute(phase, lat1, lon1, dep1, lat2, lon2, alt2, ellc);
throw NoPhaseError();
}
TravelTime
TravelTimeTable::computeFirst(double lat1, double lon1, double dep1,
double lat2, double lon2, double alt2,
int ellc) {
if ( _interface )
return _interface->computeFirst(lat1, lon1, dep1, lat2, lon2, alt2, ellc);
throw NoPhaseError();
}
}
|
Aufree/ting | db/migrate/20141208045147_create_songs.rb | class CreateSongs < ActiveRecord::Migration
def change
create_table :songs do |t|
t.integer :s_id
t.string :title
t.string :artist
t.string :pic
t.text :content
t.integer :user_id
t.integer :comments_count, default: 0
t.timestamps
end
add_index :songs, [:user_id, :created_at]
end
end |
LeidosLabs/holeshot | analytics/log-mapreduce/src/main/java/com/leidoslabs/holeshot/analytics/simulateddata/GenFakeLogs.java | /*
* Licensed to Leidos, Inc. under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information regarding copyright ownership.
* Leidos, Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.leidoslabs.holeshot.analytics.simulateddata;
import com.google.gson.JsonObject;
import com.leidoslabs.holeshot.elt.tileserver.TileRef;
import com.leidoslabs.holeshot.elt.tileserver.TileserverImage;
import org.locationtech.jts.geom.Coordinate;
import java.awt.*;
import java.awt.geom.Point2D;
import java.io.*;
import java.net.URL;
import java.util.List;
import java.util.Queue;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
public class GenFakeLogs {
private static final boolean FIXED_TSIS = false; //Determine images of interest in s3 or use fixed for testing.
private static final String OUTPUT_DIR = System.getProperty("user.home") + File.separator + "dev"
+ File.separator + "emr-gen-logs";
private static final Long seed = 3093854875328L;
private final String endpoint = "https://tileserver.leidoslabs.com/tileserver";
private Coordinate nw;
private Coordinate se;
private List<Coordinate> randCords;
public GenFakeLogs(Coordinate nw, Coordinate se) {
this.nw = nw;
this.se = se;
this.randCords = kRandom(nw, se, 5);
}
/*public void generateLog() {
List<TileserverImage> validTsis = new ArrayList<TileserverImage>();
if (!FIXED_TSIS) {
ListObjectsIterator<String> itemsIt = ListObjectsIterator.listPrefixes("advanced-analytics-geo-tile-images", "");
Stream<String> items = itemsIt.stream().filter(s -> !s.contains("XVIEWCHALLENGE"));
Stream<String> imgAndTimestamp = items
.flatMap(s -> ListObjectsIterator.listPrefixes("advanced-analytics-geo-tile-images", s).stream());
List<TileserverImage> tsis = imgAndTimestamp
.map(s -> {
try {
return new URL(String.join("/", endpoint, s + "metadata.json"));
} catch (MalformedURLException e) {
e.printStackTrace();
}
return null;
}).map(s -> {
try {
return new TileserverImage(s);
} catch (IOException | TypeNotPresentException e) {
e.printStackTrace();
}
return null;
})
.collect(Collectors.toList());
validTsis = tsis.stream().filter(i -> i != null && containedPoints(i).size() != 0).collect(Collectors.toList());
}
else {
validTsis = getFixedTSIS();
}
validTsis.forEach(tsi -> browseImageAndSave(tsi));
}*/
public List<TileserverImage> getFixedTSIS(){
List<TileserverImage> tsis = new ArrayList<TileserverImage>();
try (InputStream is = getClass().getClassLoader().getResourceAsStream("tsis_shayrat_fixed.txt")){
try (BufferedReader reader = new BufferedReader(new InputStreamReader(is))){
String line = "";
while ((line = reader.readLine()) != null){
tsis.add(new TileserverImage(new URL(line)));
}
}
} catch (IOException e) {
e.printStackTrace();
}
return tsis;
}
public void browseImageAndSave(TileserverImage tsi) {
String fName = String.format("fakelog-%s-%s-%d", tsi.getCollectionID(), tsi.getTimestamp(), new Date().getTime());
String browseText = browseImage(tsi).stream().map(tile -> tileToJSONString(tile))
.collect(Collectors.joining("\n"));
try (PrintWriter out = new PrintWriter(String.format("%s%s%s", OUTPUT_DIR, File.separator, fName))){
out.print(browseText);
} catch (IOException e) {
e.printStackTrace();
}
}
public List<TileRef> browseImage(TileserverImage tsi) {
List<TileRef> browsedTiles = new ArrayList<TileRef>();
List<Coordinate> validPoints = containedPoints(tsi);
for (Coordinate point: validPoints) {
Point2D imagePoint = tsi.getCameraModel().worldToImage(new Coordinate(point.getX(), point.getY(), 0));
int tileX = (int) (imagePoint.getX() / tsi.getTileWidth()),
tileY = (int) (imagePoint.getY() / tsi.getTileHeight());
TileRef poiTile = new TileRef(tsi, 0, tileX, tileY);
Queue<TileRef> browsing = new LinkedList<TileRef>();
browsing.add(poiTile);
browsing.addAll(adjacentTiles(tsi, poiTile));
Set<TileRef> visited = new HashSet<TileRef>(browsing);
while(!browsing.isEmpty()) {
TileRef curTile = browsing.poll();
browsedTiles.add(curTile);
TileRef parent = curTile.getParentTile();
if (parent != null && !visited.contains(parent)) {
browsing.add(parent);
visited.add(parent);
}
}
}
return browsedTiles;
}
public List<TileRef> adjacentTiles(TileserverImage tsi, TileRef center) {
int rset = center.getRset();
Dimension dim = center.getRowsAndColumnsForRset(rset);
int tx = center.getX(), ty = center.getY();
List<TileRef> adjs = new ArrayList<TileRef>(
Arrays.asList(new TileRef(tsi, rset, tx - 1, ty),
new TileRef(tsi, rset, tx - 1, ty - 1),
new TileRef(tsi, rset, tx, ty - 1),
new TileRef(tsi, rset, tx + 1, ty - 1),
new TileRef(tsi, rset, tx + 1, ty),
new TileRef(tsi, rset, tx + 1, ty + 1),
new TileRef(tsi, rset, tx, ty + 1),
new TileRef(tsi, rset, tx - 1, ty + 1)));
Predicate<TileRef> inBounds = tf -> tf.getX() < dim.getWidth()
&& tf.getX() > 0
&& tf.getY() < dim.getHeight()
&& tf.getY() > 0;
return adjs.stream().filter(inBounds).collect(Collectors.toList());
}
public List<Coordinate> kRandom(Coordinate tl, Coordinate br, int k) {
ArrayList<Coordinate> result = new ArrayList<Coordinate>();
Random rand1 = new Random(seed), rand2 = new Random(seed / 81 * 21);
Iterator<Double> d1 = rand1.doubles(k).iterator(), d2 = rand2.doubles(k).iterator();
while (d1.hasNext() && d2.hasNext()) {
double lon = (d1.next() * (this.se.getX() - this.nw.getX())) + this.nw.getX();
double lat = (d2.next() * (this.se.getY() - this.nw.getY())) + this.nw.getY();
result.add(new Coordinate(lon, lat));
}
return result;
}
public List<Coordinate> containedPoints(TileserverImage tsi) {
double[] bounds = tsi.getTilePyramidDescriptor().getBoundingBox();
Predicate<Coordinate> inBounds = c -> c.getX() >= bounds[0]
&& c.getX() <= bounds[2]
&& c.getY() >= bounds[1]
&& c.getY() <= bounds[3];
return this.randCords.stream().filter(inBounds).collect(Collectors.toList());
}
public static String tileToJSONString(TileRef tile) {
JsonObject jo = new JsonObject();
jo.addProperty("x", tile.getX());
jo.addProperty("y", tile.getX());
jo.addProperty("imageID", tile.getX());
jo.addProperty("rSet", tile.getX());
return jo.toString();
}
public static void main(String[] args) {
// Rough Shayrat bounding box
//new GenFakeLogs(new Coordinate(36.939168, 34.498387), new Coordinate(36.954446, 34.488855)).generateLog();
}
}
|
enricodg/leaf-utilities | database/sql/integrations/gorm/postgresql/connection.go | <filename>database/sql/integrations/gorm/postgresql/connection.go
package leafGormPostgreSql
import (
"fmt"
"strings"
)
type DbConnection struct {
Address []string
Username string
Password string
DbName string
}
func (db DbConnection) URI() string {
return fmt.Sprintf(`postgres://%s:%s@%s/%s?sslmode=disable`,
db.Username, db.Password, strings.Join(db.Address, ","), db.DbName)
}
|
GeSup/Hands-on-JavaScript-for-Python-Developers | chapter-9/error-object/index-mitigated.js | const typoError = () => {
try {
cnosole.error('my fault')
} catch(e) {
console.error(e)
}
}
const fetchAttempt = () => {
fetch("https://swapi.dev/api/undefined")
.then((response) => {
try {
return response.json()
} catch (e) {
return response.error()
}
}).then((data) => {
console.log(data)
}).catch((error) => {
throw new Error(error)
})
}
typoError()
fetchAttempt()
|
mohad12211/skia | docs/examples/Bitmap_allocPixelsFlags.cpp | // Copyright 2019 Google LLC.
// Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
#include "tools/fiddle/examples.h"
// HASH=737e721c7d9e0f367d25521a1b823b9d
REG_FIDDLE(Bitmap_allocPixelsFlags, 256, 128, false, 0) {
void draw(SkCanvas* canvas) {
SkBitmap bitmap;
bitmap.allocPixelsFlags(SkImageInfo::MakeN32(44, 16, kPremul_SkAlphaType),
SkBitmap::kZeroPixels_AllocFlag);
SkCanvas offscreen(bitmap);
SkPaint paint;
SkFont font;
offscreen.drawString("!@#$%", 0, 12, font, paint);
canvas->scale(6, 6);
canvas->drawBitmap(bitmap, 0, 0);
canvas->drawBitmap(bitmap, 8, 8);
}
} // END FIDDLE
|
marmolak/gray386linux | src/linux-3.7.10/drivers/i2c/i2c-mux.c | <filename>src/linux-3.7.10/drivers/i2c/i2c-mux.c
/*
* Multiplexed I2C bus driver.
*
* Copyright (c) 2008-2009 <NAME> <<EMAIL>>
* Copyright (c) 2008-2009 Eurotech S.p.A. <<EMAIL>>
* Copyright (c) 2009-2010 NSN GmbH & Co KG <<EMAIL>>
*
* Simplifies access to complex multiplexed I2C bus topologies, by presenting
* each multiplexed bus segment as an additional I2C adapter.
* Supports multi-level mux'ing (mux behind a mux).
*
* Based on:
* i2c-virt.c from <NAME> <<EMAIL>>
* i2c-virtual.c from <NAME>, Copyright (c) 2004 Google, Inc.
* i2c-virtual.c from <NAME> <<EMAIL>>
*
* This file is licensed under the terms of the GNU General Public
* License version 2. This program is licensed "as is" without any
* warranty of any kind, whether express or implied.
*/
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/i2c.h>
#include <linux/i2c-mux.h>
#include <linux/of.h>
#include <linux/of_i2c.h>
/* multiplexer per channel data */
struct i2c_mux_priv {
struct i2c_adapter adap;
struct i2c_algorithm algo;
struct i2c_adapter *parent;
void *mux_priv; /* the mux chip/device */
u32 chan_id; /* the channel id */
int (*select)(struct i2c_adapter *, void *mux_priv, u32 chan_id);
int (*deselect)(struct i2c_adapter *, void *mux_priv, u32 chan_id);
};
static int i2c_mux_master_xfer(struct i2c_adapter *adap,
struct i2c_msg msgs[], int num)
{
struct i2c_mux_priv *priv = adap->algo_data;
struct i2c_adapter *parent = priv->parent;
int ret;
/* Switch to the right mux port and perform the transfer. */
ret = priv->select(parent, priv->mux_priv, priv->chan_id);
if (ret >= 0)
ret = parent->algo->master_xfer(parent, msgs, num);
if (priv->deselect)
priv->deselect(parent, priv->mux_priv, priv->chan_id);
return ret;
}
static int i2c_mux_smbus_xfer(struct i2c_adapter *adap,
u16 addr, unsigned short flags,
char read_write, u8 command,
int size, union i2c_smbus_data *data)
{
struct i2c_mux_priv *priv = adap->algo_data;
struct i2c_adapter *parent = priv->parent;
int ret;
/* Select the right mux port and perform the transfer. */
ret = priv->select(parent, priv->mux_priv, priv->chan_id);
if (ret >= 0)
ret = parent->algo->smbus_xfer(parent, addr, flags,
read_write, command, size, data);
if (priv->deselect)
priv->deselect(parent, priv->mux_priv, priv->chan_id);
return ret;
}
/* Return the parent's functionality */
static u32 i2c_mux_functionality(struct i2c_adapter *adap)
{
struct i2c_mux_priv *priv = adap->algo_data;
struct i2c_adapter *parent = priv->parent;
return parent->algo->functionality(parent);
}
/* Return all parent classes, merged */
static unsigned int i2c_mux_parent_classes(struct i2c_adapter *parent)
{
unsigned int class = 0;
do {
class |= parent->class;
parent = i2c_parent_is_i2c_adapter(parent);
} while (parent);
return class;
}
struct i2c_adapter *i2c_add_mux_adapter(struct i2c_adapter *parent,
struct device *mux_dev,
void *mux_priv, u32 force_nr, u32 chan_id,
unsigned int class,
int (*select) (struct i2c_adapter *,
void *, u32),
int (*deselect) (struct i2c_adapter *,
void *, u32))
{
struct i2c_mux_priv *priv;
int ret;
priv = kzalloc(sizeof(struct i2c_mux_priv), GFP_KERNEL);
if (!priv)
return NULL;
/* Set up private adapter data */
priv->parent = parent;
priv->mux_priv = mux_priv;
priv->chan_id = chan_id;
priv->select = select;
priv->deselect = deselect;
/* Need to do algo dynamically because we don't know ahead
* of time what sort of physical adapter we'll be dealing with.
*/
if (parent->algo->master_xfer)
priv->algo.master_xfer = i2c_mux_master_xfer;
if (parent->algo->smbus_xfer)
priv->algo.smbus_xfer = i2c_mux_smbus_xfer;
priv->algo.functionality = i2c_mux_functionality;
/* Now fill out new adapter structure */
snprintf(priv->adap.name, sizeof(priv->adap.name),
"i2c-%d-mux (chan_id %d)", i2c_adapter_id(parent), chan_id);
priv->adap.owner = THIS_MODULE;
priv->adap.algo = &priv->algo;
priv->adap.algo_data = priv;
priv->adap.dev.parent = &parent->dev;
/* Sanity check on class */
if (i2c_mux_parent_classes(parent) & class)
dev_err(&parent->dev,
"Segment %d behind mux can't share classes with ancestors\n",
chan_id);
else
priv->adap.class = class;
/*
* Try to populate the mux adapter's of_node, expands to
* nothing if !CONFIG_OF.
*/
if (mux_dev->of_node) {
struct device_node *child;
u32 reg;
for_each_child_of_node(mux_dev->of_node, child) {
ret = of_property_read_u32(child, "reg", ®);
if (ret)
continue;
if (chan_id == reg) {
priv->adap.dev.of_node = child;
break;
}
}
}
if (force_nr) {
priv->adap.nr = force_nr;
ret = i2c_add_numbered_adapter(&priv->adap);
} else {
ret = i2c_add_adapter(&priv->adap);
}
if (ret < 0) {
dev_err(&parent->dev,
"failed to add mux-adapter (error=%d)\n",
ret);
kfree(priv);
return NULL;
}
dev_info(&parent->dev, "Added multiplexed i2c bus %d\n",
i2c_adapter_id(&priv->adap));
of_i2c_register_devices(&priv->adap);
return &priv->adap;
}
EXPORT_SYMBOL_GPL(i2c_add_mux_adapter);
int i2c_del_mux_adapter(struct i2c_adapter *adap)
{
struct i2c_mux_priv *priv = adap->algo_data;
int ret;
ret = i2c_del_adapter(adap);
if (ret < 0)
return ret;
kfree(priv);
return 0;
}
EXPORT_SYMBOL_GPL(i2c_del_mux_adapter);
MODULE_AUTHOR("<NAME> <<EMAIL>>");
MODULE_DESCRIPTION("I2C driver for multiplexed I2C busses");
MODULE_LICENSE("GPL v2");
|
PrakharPipersania/LeetCode-Solutions | Questions Level-Wise/Easy/sum-of-digits-in-base-k.cpp | class Solution {
public:
int sumBase(int n, int k)
{
int sum=0;
while(n)
{
sum+=n%k;
n/=k;
}
return sum;
}
}; |
akaifi/bettermeans | app/helpers/attachments_helper.rb | # BetterMeans - Work 2.0
# Copyright (C) 2006-2011 See readme for details and license#
module AttachmentsHelper
# Displays view/delete links to the attachments of the given object
# Options:
# :author -- author names are not displayed if set to false
def link_to_attachments(container, options = {})
options.assert_valid_keys(:author)
if container.attachments.any?
options = {:deletable => container.attachments_deletable?, :author => true}.merge(options)
render :partial => 'attachments/links', :locals => {:attachments => container.attachments, :options => options}
end
end
def link_to_attachments_table(container, options = {})
options.assert_valid_keys(:author)
if container.attachments.any?
options = {:deletable => container.attachments_deletable?, :author => true}.merge(options)
render :partial => 'attachments/table', :locals => {:attachments => container.attachments, :options => options}
end
end
def to_utf8(str)
str
end
end
|
othuntgithub/AdditionalEnderItems | src/main/java/com/hidoni/additionalenderitems/network/Networking.java | package com.hidoni.additionalenderitems.network;
import com.hidoni.additionalenderitems.AdditionalEnderItems;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.chunk.Chunk;
import net.minecraftforge.fml.network.NetworkRegistry;
import net.minecraftforge.fml.network.PacketDistributor;
import net.minecraftforge.fml.network.simple.SimpleChannel;
public class Networking
{
private static SimpleChannel INSTANCE;
private static int ID;
private static final String PROTOCOL_VERSION = "1.0";
private static int nextID()
{
return ID++;
}
public static void registerMessages()
{
INSTANCE = NetworkRegistry.newSimpleChannel(new ResourceLocation(AdditionalEnderItems.MOD_ID, "main"), () -> PROTOCOL_VERSION, PROTOCOL_VERSION::equals, PROTOCOL_VERSION::equals);
INSTANCE.messageBuilder(PacketStopJukebox.class, nextID()).encoder(PacketStopJukebox::toBytes).decoder(PacketStopJukebox::new).consumer(PacketStopJukebox::handle).add();
INSTANCE.messageBuilder(PacketStartJukebox.class, nextID()).encoder(PacketStartJukebox::toBytes).decoder(PacketStartJukebox::new).consumer(PacketStartJukebox::handle).add();
}
public static void sendAll(Object packet)
{
INSTANCE.send(PacketDistributor.ALL.noArg(), packet);
}
public static void sendChunkTrackers(Object packet, Chunk chunkIn)
{
INSTANCE.send(PacketDistributor.TRACKING_CHUNK.with(() -> chunkIn), packet);
}
}
|
holylovenia/undepress-mobile | app/src/main/java/com/hulahoop/mentalhealth/undepress/ResultActivity.java | <reponame>holylovenia/undepress-mobile<filename>app/src/main/java/com/hulahoop/mentalhealth/undepress/ResultActivity.java
package com.hulahoop.mentalhealth.undepress;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import com.hulahoop.mentalhealth.undepress.models.Symptoms;
import java.util.ArrayList;
public class ResultActivity extends AppCompatActivity {
private Button getBetterButton;
private Symptoms symptoms;
private TextView welcomeText;
private TextView depressionVerdictText;
private TextView numberOfDepressionSymptomsText;
private TextView symptomsDetails;
private SharedPreferences mPreferences;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_result);
mPreferences = getSharedPreferences("authorization", MODE_PRIVATE);
welcomeText = findViewById(R.id.welcome);
welcomeText.setText("Welcome, " + mPreferences.getString("current_user_name", "Dear"));
depressionVerdictText = findViewById(R.id.depression_verdict);
numberOfDepressionSymptomsText = findViewById(R.id.number_of_symptoms);
symptomsDetails = findViewById(R.id.symptoms_details);
symptoms = (Symptoms) getIntent().getSerializableExtra("symptoms");
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.hide();
}
if(symptoms.getSum().size() > 0) {
boolean requiredRulesFulfilled;
if (symptoms.getSum().size() == 0 || symptoms.getSum().get(0) == 0 || symptoms.getSum().get(0) == 0) {
requiredRulesFulfilled = false;
} else {
requiredRulesFulfilled = true;
}
int sum = 0;
StringBuilder details = new StringBuilder();
for (int i = 0; i < symptoms.getSum().size(); i++) {
if (symptoms.getSum().get(i) == 1) {
sum++;
details.append(sum).append(". ").append(symptoms.getSymptomDescription().get(i)).append("\n");
}
}
if (sum == 0) {
details.append("You have no symptoms");
}
symptomsDetails.setText(details.toString().trim());
numberOfDepressionSymptomsText.setText(sum + " of 9");
if (requiredRulesFulfilled && sum >= 5) {
depressionVerdictText.setText("have");
} else {
depressionVerdictText.setText("do not have");
}
} else {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}
getBetterButton = findViewById(R.id.get_better_button);
getBetterButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), MainActivity.class);
intent.putExtra("getBetter", 2);
startActivity(intent);
finish();
}
});
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.