repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
lookfwd/thornode
x/thorchain/types/type_event.go
package types import ( "fmt" "strconv" "gitlab.com/thorchain/thornode/common" "gitlab.com/thorchain/thornode/common/cosmos" ) // all event types support by THORChain const ( SwapEventType = `swap` StakeEventType = `stake` UnstakeEventType = `unstake` AddEventType = `add` PoolEventType = `pool` RewardEventType = `rewards` RefundEventType = `refund` BondEventType = `bond` GasEventType = `gas` ReserveEventType = `reserve` SlashEventType = `slash` ErrataEventType = `errata` FeeEventType = `fee` OutboundEventType = `outbound` ) // PoolMod pool modifications type PoolMod struct { Asset common.Asset `json:"asset"` RuneAmt cosmos.Uint `json:"rune_amt"` RuneAdd bool `json:"rune_add"` AssetAmt cosmos.Uint `json:"asset_amt"` AssetAdd bool `json:"asset_add"` } // PoolMods a list of pool modifications type PoolMods []PoolMod // NewPoolMod create a new instance of PoolMod func NewPoolMod(asset common.Asset, runeAmt cosmos.Uint, runeAdd bool, assetAmt cosmos.Uint, assetAdd bool) PoolMod { return PoolMod{ Asset: asset, RuneAmt: runeAmt, RuneAdd: runeAdd, AssetAmt: assetAmt, AssetAdd: assetAdd, } } // EventSwap event for swap action type EventSwap struct { Pool common.Asset `json:"pool"` PriceTarget cosmos.Uint `json:"price_target"` TradeSlip cosmos.Uint `json:"trade_slip"` LiquidityFee cosmos.Uint `json:"liquidity_fee"` LiquidityFeeInRune cosmos.Uint `json:"liquidity_fee_in_rune"` InTx common.Tx `json:"in_tx"` // this is the Tx that cause the swap to happen, if it is a double swap , then the txid will be blank OutTxs common.Tx `json:"out_txs"` } // NewEventSwap create a new swap event func NewEventSwap(pool common.Asset, priceTarget, fee, tradeSlip, liquidityFeeInRune cosmos.Uint, inTx common.Tx) EventSwap { return EventSwap{ Pool: pool, PriceTarget: priceTarget, TradeSlip: tradeSlip, LiquidityFee: fee, LiquidityFeeInRune: liquidityFeeInRune, InTx: inTx, } } // Type return a string that represent the type, it should not duplicated with other event func (e EventSwap) Type() string { return SwapEventType } // Events convert EventSwap to key value pairs used in cosmos func (e EventSwap) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String()), cosmos.NewAttribute("price_target", e.PriceTarget.String()), cosmos.NewAttribute("trade_slip", e.TradeSlip.String()), cosmos.NewAttribute("liquidity_fee", e.LiquidityFee.String()), cosmos.NewAttribute("liquidity_fee_in_rune", e.LiquidityFeeInRune.String()), ) evt = evt.AppendAttributes(e.InTx.ToAttributes()...) return cosmos.Events{evt}, nil } // EventStake stake event type EventStake struct { Pool common.Asset `json:"pool"` StakeUnits cosmos.Uint `json:"stake_units"` RuneAddress common.Address `json:"rune_address"` RuneAmount cosmos.Uint `json:"rune_amount"` AssetAmount cosmos.Uint `json:"asset_amount"` RuneTxID common.TxID `json:"rune_tx_id"` AssetTxID common.TxID `json:"asset_tx_id"` } // NewEventStake create a new stake event func NewEventStake(pool common.Asset, su cosmos.Uint, runeAddress common.Address, runeAmount, assetAmount cosmos.Uint, runeTxID, assetTxID common.TxID) EventStake { return EventStake{ Pool: pool, StakeUnits: su, RuneAddress: runeAddress, RuneAmount: runeAmount, AssetAmount: assetAmount, RuneTxID: runeTxID, AssetTxID: assetTxID, } } // Type return the event type func (e EventStake) Type() string { return StakeEventType } // Events return cosmos.Events which is cosmos.Attribute(key value pairs) func (e EventStake) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String()), cosmos.NewAttribute("stake_units", e.StakeUnits.String()), cosmos.NewAttribute("rune_address", e.RuneAddress.String()), cosmos.NewAttribute("rune_amount", e.RuneAmount.String()), cosmos.NewAttribute("asset_amount", e.AssetAmount.String()), ) if !e.RuneTxID.Equals(e.AssetTxID) { evt = evt.AppendAttributes(cosmos.NewAttribute(fmt.Sprintf("%s_txid", common.RuneAsset().Chain), e.RuneTxID.String())) } evt = evt.AppendAttributes(cosmos.NewAttribute(fmt.Sprintf("%s_txid", e.Pool.Chain), e.AssetTxID.String())) return cosmos.Events{ evt, }, nil } // EventUnstake represent unstake type EventUnstake struct { Pool common.Asset `json:"pool"` StakeUnits cosmos.Uint `json:"stake_units"` BasisPoints int64 `json:"basis_points"` // 1 ==> 10,0000 Asymmetry cosmos.Dec `json:"asymmetry"` // -1.0 <==> 1.0 InTx common.Tx `json:"in_tx"` } // NewEventUnstake create a new unstake event func NewEventUnstake(pool common.Asset, su cosmos.Uint, basisPts int64, asym cosmos.Dec, inTx common.Tx) EventUnstake { return EventUnstake{ Pool: pool, StakeUnits: su, BasisPoints: basisPts, Asymmetry: asym, InTx: inTx, } } // Type return the unstake event type func (e EventUnstake) Type() string { return UnstakeEventType } // Events return the cosmos event func (e EventUnstake) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String()), cosmos.NewAttribute("stake_units", e.StakeUnits.String()), cosmos.NewAttribute("basis_points", strconv.FormatInt(e.BasisPoints, 10)), cosmos.NewAttribute("asymmetry", e.Asymmetry.String())) evt = evt.AppendAttributes(e.InTx.ToAttributes()...) return cosmos.Events{evt}, nil } // EventAdd represent add operation type EventAdd struct { Pool common.Asset `json:"pool"` InTx common.Tx `json:"in_tx"` } // NewEventAdd create a new add event func NewEventAdd(pool common.Asset, inTx common.Tx) EventAdd { return EventAdd{ Pool: pool, InTx: inTx, } } // Type return add event type func (e EventAdd) Type() string { return AddEventType } // Events get all events func (e EventAdd) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String())) evt = evt.AppendAttributes(e.InTx.ToAttributes()...) return cosmos.Events{evt}, nil } // EventPool represent pool change event type EventPool struct { Pool common.Asset `json:"pool"` Status PoolStatus `json:"status"` } // NewEventPool create a new pool change event func NewEventPool(pool common.Asset, status PoolStatus) EventPool { return EventPool{ Pool: pool, Status: status, } } // Type return pool event type func (e EventPool) Type() string { return PoolEventType } // Events provide an instance of cosmos.Events func (e EventPool) Events() (cosmos.Events, error) { return cosmos.Events{ cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String()), cosmos.NewAttribute("pool_status", e.Status.String())), }, nil } // PoolAmt pool asset amount type PoolAmt struct { Asset common.Asset `json:"asset"` Amount int64 `json:"amount"` } // EventRewards reward event type EventRewards struct { BondReward cosmos.Uint `json:"bond_reward"` PoolRewards []PoolAmt `json:"pool_rewards"` } // NewEventRewards create a new reward event func NewEventRewards(bondReward cosmos.Uint, poolRewards []PoolAmt) EventRewards { return EventRewards{ BondReward: bondReward, PoolRewards: poolRewards, } } // Type return reward event type func (e EventRewards) Type() string { return RewardEventType } func (e EventRewards) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("bond_reward", e.BondReward.String()), ) for _, item := range e.PoolRewards { evt = evt.AppendAttributes(cosmos.NewAttribute(item.Asset.String(), strconv.FormatInt(item.Amount, 10))) } return cosmos.Events{evt}, nil } // EventRefund represent a refund activity , and contains the reason why it get refund type EventRefund struct { Code uint32 `json:"code"` Reason string `json:"reason"` InTx common.Tx `json:"in_tx"` Fee common.Fee `json:"fee"` } // NewEventRefund create a new EventRefund func NewEventRefund(code uint32, reason string, inTx common.Tx, fee common.Fee) EventRefund { return EventRefund{ Code: code, Reason: reason, InTx: inTx, Fee: fee, } } // Type return reward event type func (e EventRefund) Type() string { return RefundEventType } // Events return events func (e EventRefund) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("code", strconv.FormatUint(uint64(e.Code), 10)), cosmos.NewAttribute("reason", e.Reason), ) evt = evt.AppendAttributes(e.InTx.ToAttributes()...) return cosmos.Events{evt}, nil } type BondType string const ( BondPaid BondType = `bond_paid` BondReturned BondType = `bond_returned` ) // EventBond bond paid or returned event type EventBond struct { Amount cosmos.Uint `json:"amount"` BondType BondType `json:"bond_type"` TxIn common.Tx `json:"tx_in"` } // NewEventBond create a new Bond Events func NewEventBond(amount cosmos.Uint, bondType BondType, txIn common.Tx) EventBond { return EventBond{ Amount: amount, BondType: bondType, TxIn: txIn, } } // Type return bond event Type func (e EventBond) Type() string { return BondEventType } // Events return all the event attributes func (e EventBond) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("amount", e.Amount.String()), cosmos.NewAttribute("bound_type", string(e.BondType))) evt = evt.AppendAttributes(e.TxIn.ToAttributes()...) return cosmos.Events{evt}, nil } type GasType string type GasPool struct { Asset common.Asset `json:"asset"` AssetAmt cosmos.Uint `json:"asset_amt"` RuneAmt cosmos.Uint `json:"rune_amt"` Count int64 `json:"transaction_count"` } // EventGas represent the events happened in thorchain related to Gas type EventGas struct { Pools []GasPool `json:"pools"` } // NewEventGas create a new EventGas instance func NewEventGas() *EventGas { return &EventGas{ Pools: make([]GasPool, 0), } } // UpsertGasPool update the Gas Pools hold by EventGas instance // if the given gasPool already exist, then it merge the gasPool with internal one , otherwise add it to the list func (e *EventGas) UpsertGasPool(pool GasPool) { for i, p := range e.Pools { if p.Asset == pool.Asset { e.Pools[i].RuneAmt = p.RuneAmt.Add(pool.RuneAmt) e.Pools[i].AssetAmt = p.AssetAmt.Add(pool.AssetAmt) return } } e.Pools = append(e.Pools, pool) } // Type return event type func (e *EventGas) Type() string { return GasEventType } func (e *EventGas) Events() (cosmos.Events, error) { events := make(cosmos.Events, 0, len(e.Pools)) for _, item := range e.Pools { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("asset", item.Asset.String()), cosmos.NewAttribute("asset_amt", item.AssetAmt.String()), cosmos.NewAttribute("rune_amt", item.RuneAmt.String()), cosmos.NewAttribute("transaction_count", strconv.FormatInt(item.Count, 10))) events = append(events, evt) } return events, nil } // EventReserve Reserve event type type EventReserve struct { ReserveContributor ReserveContributor `json:"reserve_contributor"` InTx common.Tx `json:"in_tx"` } // NewEventReserve create a new instance of EventReserve func NewEventReserve(contributor ReserveContributor, inTx common.Tx) EventReserve { return EventReserve{ ReserveContributor: contributor, InTx: inTx, } } func (e EventReserve) Type() string { return ReserveEventType } func (e EventReserve) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("contributor_address", e.ReserveContributor.Address.String()), cosmos.NewAttribute("amount", e.ReserveContributor.Amount.String()), ) evt = evt.AppendAttributes(e.InTx.ToAttributes()...) return cosmos.Events{ evt, }, nil } // EventSlash represent a change in pool balance which caused by slash a node account type EventSlash struct { Pool common.Asset `json:"pool"` SlashAmount []PoolAmt `json:"slash_amount"` } func NewEventSlash(pool common.Asset, slashAmount []PoolAmt) EventSlash { return EventSlash{ Pool: pool, SlashAmount: slashAmount, } } // Type return slash event type func (e EventSlash) Type() string { return SlashEventType } func (e EventSlash) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("pool", e.Pool.String())) for _, item := range e.SlashAmount { evt.AppendAttributes(cosmos.NewAttribute(item.Asset.String(), strconv.FormatInt(item.Amount, 10))) } return cosmos.Events{evt}, nil } // EventErrata represent a change in pool balance which caused by an errata transaction type EventErrata struct { TxID common.TxID `json:"tx_id"` Pools PoolMods `json:"pools"` } func NewEventErrata(txID common.TxID, pools PoolMods) EventErrata { return EventErrata{ TxID: txID, Pools: pools, } } // Type return slash event type func (e EventErrata) Type() string { return ErrataEventType } // Events return a cosmos.Events type func (e EventErrata) Events() (cosmos.Events, error) { events := make(cosmos.Events, 0, len(e.Pools)) for _, item := range e.Pools { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("in_tx_id", e.TxID.String()), cosmos.NewAttribute("asset", item.Asset.String()), cosmos.NewAttribute("rune_amt", item.RuneAmt.String()), cosmos.NewAttribute("rune_add", strconv.FormatBool(item.RuneAdd)), cosmos.NewAttribute("asset_amt", item.AssetAmt.String()), cosmos.NewAttribute("asset_add", strconv.FormatBool(item.AssetAdd))) events = append(events, evt) } return events, nil } // EventFee represent fee type EventFee struct { TxID common.TxID Fee common.Fee } // NewEventFee create a new EventFee func NewEventFee(txID common.TxID, fee common.Fee) EventFee { return EventFee{ TxID: txID, Fee: fee, } } // Type get a string represent the event type func (e EventFee) Type() string { return FeeEventType } // Events return events of cosmos.Event type func (e EventFee) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("tx_id", e.TxID.String()), cosmos.NewAttribute("coins", e.Fee.Coins.String()), cosmos.NewAttribute("pool_deduct", e.Fee.PoolDeduct.String())) return cosmos.Events{evt}, nil } // EventOutbound represent an outbound message from thornode type EventOutbound struct { InTxID common.TxID // the inbound tx hash which triggered this outbound , it could be empty, because there are migration etc Tx common.Tx } // NewEventOutbound create a new instance of EventOutbound func NewEventOutbound(inTxID common.TxID, tx common.Tx) EventOutbound { return EventOutbound{ InTxID: inTxID, Tx: tx, } } // Type return a string which represent the type of this event func (e EventOutbound) Type() string { return OutboundEventType } // Events return sdk events func (e EventOutbound) Events() (cosmos.Events, error) { evt := cosmos.NewEvent(e.Type(), cosmos.NewAttribute("in_tx_id", e.InTxID.String())) evt = evt.AppendAttributes(e.Tx.ToAttributes()...) return cosmos.Events{evt}, nil }
fgrid/iso20022
RejectionReason1Choice.go
<gh_stars>10-100 package iso20022 // Allows the sender of the rejection message to indicate only one rejection reason that applies to the entire rejected message. type RejectionReason1Choice struct { // Rejection reason that applies to the whole report. GlobalRejectionReason *Reason2 `xml:"GblRjctnRsn"` // Specifies a rejection reason for each individual element of a report. RejectedElement []*RejectedElement1 `xml:"RjctdElmt"` } func (r *RejectionReason1Choice) AddGlobalRejectionReason() *Reason2 { r.GlobalRejectionReason = new(Reason2) return r.GlobalRejectionReason } func (r *RejectionReason1Choice) AddRejectedElement() *RejectedElement1 { newValue := new(RejectedElement1) r.RejectedElement = append(r.RejectedElement, newValue) return newValue }
boosterconf/booster
spec/services/ticket_ordering_service_spec.rb
<reponame>boosterconf/booster require 'rails_helper' describe TicketOrderingService do describe "#" end
TomTranter/pybamm_pnm
post_scripts/animate_saved_data.py
<filename>post_scripts/animate_saved_data.py # -*- coding: utf-8 -*- """ Created on Thu Feb 6 08:46:59 2020 @author: Tom """ import pybamm import openpnm as op import matplotlib.pyplot as plt import ecm import os from scipy import io import numpy as np plt.style.use('default') plt.close("all") pybamm.set_logging_level("INFO") wrk = op.Workspace() wrk.clear() if __name__ == "__main__": save_parent = 'C:\\Code\\pybamm_pnm_couple' for prefix in ['']: # , 'b']: for sub in ['4A_Q_cc']: # , '2A', '3A', '4A', '5A']: save_root = save_parent + prefix + '\\' + sub file_lower = os.path.join(save_root, 'var_Current_collector_current_density_lower') file_upper = os.path.join(save_root, 'var_Current_collector_current_density_upper') data_lower = io.loadmat(file_lower)['data'] data_upper = io.loadmat(file_upper)['data'] file_lower = os.path.join(save_root, 'var_Temperature_lower') file_upper = os.path.join(save_root, 'var_Temperature_upper') temp_lower = io.loadmat(file_lower)['data'] temp_upper = io.loadmat(file_upper)['data'] cwd = os.getcwd() file_lower = os.path.join(save_root, 'var_Time_lower') file_upper = os.path.join(save_root, 'var_Time_upper') time_lower = io.loadmat(file_lower)['data'] time_upper = io.loadmat(file_upper)['data'] cwd = os.getcwd() input_dir = os.path.join(cwd, 'input') wrk.load_project(os.path.join(input_dir, 'MJ141-mid-top_m_cc_new.pnm')) sim_name = list(wrk.keys())[-1] project = wrk[sim_name] net = project.network Nspm = net.num_throats('spm_resistor') data_amalg = np.hstack((data_lower, data_upper)) temp_amalg = np.hstack((temp_lower, temp_upper)) time_amalg = np.hstack((time_lower, time_upper)) weights = net['throat.arc_length'][net.throats('spm_resistor')] variables = {} plot_left = 'Current Collector Current Density [A.m-2]' plot_right = 'Temperature [K]' plot_time = 'Time [h]' variables[plot_left] = data_amalg variables[plot_right] = temp_amalg variables[plot_time] = time_amalg save_path = os.path.join(save_root, 'Current collector current density') ecm.animate_data4(project, variables, plot_left, plot_right, weights, filename=save_path)
alam8064/main
src/test/java/seedu/jelphabot/logic/parser/ReminderCommandParserTest.java
//@@author yapdianhao package seedu.jelphabot.logic.parser; import static seedu.jelphabot.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static seedu.jelphabot.logic.parser.CommandParserTestUtil.assertParseFailure; import static seedu.jelphabot.logic.parser.CommandParserTestUtil.assertParseSuccess; import static seedu.jelphabot.testutil.TypicalIndexes.INDEX_FIRST_TASK; import static seedu.jelphabot.testutil.TypicalReminders.ASSESSMENT_REMINDER; import org.junit.jupiter.api.Test; import seedu.jelphabot.logic.commands.ReminderCommand; public class ReminderCommandParserTest { private ReminderCommandParser parser = new ReminderCommandParser(); @Test public void parse_emptyArg_throwsParseException() { assertParseFailure( parser, " ", String.format(MESSAGE_INVALID_COMMAND_FORMAT, ReminderCommand.MESSAGE_USAGE)); } @Test public void parse_validArg_throwsParseException() { ReminderCommand reminderCommand = new ReminderCommand(INDEX_FIRST_TASK, ASSESSMENT_REMINDER); assertParseSuccess(parser, "1 days/1 hours/1", reminderCommand); assertParseSuccess(parser, " 1 days/ 1 \n hours/ 1 \t", reminderCommand); } }
raintherrien/hammer
include/hammer/well.h
<filename>include/hammer/well.h<gh_stars>0 #ifndef HAMMER_WELL_H_ #define HAMMER_WELL_H_ #include <stdint.h> /* * Implementation of WELL512 by <NAME>, published in his paper: * Random Number Generation (2008) and released into the public domain. * http://lomont.org/papers/2008/Lomont_PRNG_2008.pdf */ #define WELL512SZ 16 typedef uint32_t WELL512[WELL512SZ+1]; /* +1 is additional index */ void WELL512_seed(WELL512, uint64_t seed); uint32_t WELL512i(WELL512); float WELL512f(WELL512); #endif /* HAMMER_WELL_H_ */
diguits/domainmodeldesigner
DomainModelDefinition/src/main/java/com/diguits/domainmodeldefinition/definitions/DomainModelDefinitionVisitorBase.java
<filename>DomainModelDefinition/src/main/java/com/diguits/domainmodeldefinition/definitions/DomainModelDefinitionVisitorBase.java package com.diguits.domainmodeldefinition.definitions; public class DomainModelDefinitionVisitorBase implements IEntityDefinitionVisitor { protected void visitBaseDef(BaseDef baseDef, BaseDef owner) { } public void visitDomainModelDef(DomainModelDef domainModelDef) { visitBaseDef(domainModelDef, null); } public void visitBoundedContextDef(BoundedContextDef boundedContext, BaseDef owner) { visitBaseDef(boundedContext, owner); } public void visitEntityGroupDef(ModuleDef module, BaseDef owner) { visitBaseDef(module, owner); } public void visitEnumDef(EnumDef enumDef, BaseDef owner) { visitBaseDef(enumDef, owner); } public void visitEnumValueDef(EnumValueDef enumValue, BaseDef owner) { visitBaseDef(enumValue, owner); } public void visitEntityDef(EntityDef entity, BaseDef owner) { visitBaseDef(entity, owner); } public void visitFieldGroupDef(FieldGroupDef fieldGroup, BaseDef owner) { visitBaseDef(fieldGroup, owner); } public void visitFieldSubgroupDef(FieldSubgroupDef fieldSubgroup, BaseDef owner) { visitBaseDef(fieldSubgroup, owner); } public void visitFieldDef(FieldDef field, BaseDef owner) { visitBaseDef(field, owner); } public void visitIndexDef(IndexDef index, BaseDef owner) { visitBaseDef(index, owner); } public void visitRelationshipDef(RelationshipDef relationship, BaseDef owner) { visitBaseDef(relationship, owner); } public void visitRelationOverrideDef(RelationOverrideDef relationshipOverride, BaseDef owner) { visitBaseDef(relationshipOverride, owner); } public void visitFilterDef(FilterDef filterDef, BaseDef owner) { visitBaseDef(filterDef, owner); } public void visitColumnDef(ColumnDef columnDef, BaseDef owner) { visitBaseDef(columnDef, owner); } public void visitApplicationDef(ApplicationDef applicationDef, BaseDef owner) { visitBaseDef(applicationDef, owner); } public void visitRelationshipPartDef(RelationshipPartDef RelationshipPartDef, BaseDef owner) { visitBaseDef(RelationshipPartDef, owner); } @Override public void visitFieldRelationDataDef(FieldRelationshipDataDef relationshipData, BaseDef owner) { visitBaseDef(relationshipData, owner); } @Override public void visiCustomFieldDef(CustomFieldDef customFieldDef, BaseDef owner) { visitBaseDef(customFieldDef, owner); } @Override public void visitCustomFieldValueDef(CustomFieldValueDef customFieldValueDef, BaseDef owner) { visitBaseDef(customFieldValueDef, owner); } @Override public void visitLocalizedDataDef(LocalizedDataDef localizedDataDef, BaseDef owner) { visitBaseDef(localizedDataDef, owner); } @Override public void visitLocaleDef(LocaleDef localeDef, BaseDef owner) { visitBaseDef(localeDef, owner); } @Override public void visitFilterValueDef(FilterValueDef filterValueDef, BaseDef owner) { visitBaseDef(filterValueDef, owner); } @Override public void visitValueObjectDef(ValueObjectDef valueObjectDef, BaseDef owner) { visitBaseDef(valueObjectDef, owner); } }
duannd/java-tutorials
java-core/src/main/java/com/duanndz/core/multithreading/ThreadJoinExample.java
<filename>java-core/src/main/java/com/duanndz/core/multithreading/ThreadJoinExample.java package com.duanndz.core.multithreading; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * duan.nguyen * Datetime 3/30/20 11:21 */ public class ThreadJoinExample { private static final Logger log = LoggerFactory.getLogger(ThreadJoinExample.class); public static void main(String[] args) throws InterruptedException { Thread t1 = new MyThread("t1", 6); Thread t2 = new MyThread("t2", 8); Thread t3 = new MyThread("t3", 10); log.info("Start t1 and call join(2000)"); // second: 0 t1.start(); // t1 will sleep 7 seconds t1.join(2000); // second: 2 t2.start(); // start second thread after waiting for 2 seconds or t1 dead (start after 2000 s). t1.join(); // second: 7 t3.start(); //start third thread only when t1 is dead //let all threads finish execution before finishing main thread t1.join(); // t1 done t2.join(); // t2 done at second 11. t3.join(); // t3 done at second 18 log.info("All threads are dead, exiting main thread (total is 18 seconds)"); } }
SanjanaB/gocd
config/config-api/src/main/java/com/thoughtworks/go/domain/PipelineGroups.java
<reponame>SanjanaB/gocd /* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain; import java.util.*; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.exceptions.PipelineGroupNotEmptyException; import com.thoughtworks.go.config.exceptions.PipelineGroupNotFoundException; import com.thoughtworks.go.config.materials.PackageMaterialConfig; import com.thoughtworks.go.config.materials.PluggableSCMMaterialConfig; import com.thoughtworks.go.config.materials.ScmMaterialConfig; import com.thoughtworks.go.domain.materials.MaterialConfig; import com.thoughtworks.go.domain.packagerepository.PackageDefinition; import com.thoughtworks.go.domain.packagerepository.PackageRepository; import com.thoughtworks.go.domain.scm.SCM; import com.thoughtworks.go.util.Pair; @ConfigCollection(value = BasicPipelineConfigs.class) public class PipelineGroups extends BaseCollection<PipelineConfigs> implements Validatable { private final ConfigErrors configErrors = new ConfigErrors(); private Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> packageToPipelineMap; private Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> pluggableSCMMaterialToPipelineMap; public PipelineGroups() { } public PipelineGroups(PipelineConfigs... configses) { for (PipelineConfigs configs : configses) { this.add(configs); } } public void update(String groupName, String pipelineName, PipelineConfig pipeline) { for (PipelineConfigs pipelines : this) { pipelines.update(groupName, pipeline, pipelineName); } } public void addPipeline(String groupName, PipelineConfig pipeline) { String sanitizedGroupName = BasicPipelineConfigs.sanitizedGroupName(groupName); if (!this.hasGroup(sanitizedGroupName)) { createNewGroup(sanitizedGroupName, pipeline); return; } for (PipelineConfigs pipelines : this) { if (pipelines.save(pipeline, sanitizedGroupName)) { return; } } } public void addPipelineWithoutValidation(String groupName, PipelineConfig pipelineConfig) { if (!this.hasGroup(groupName)) { createNewGroup(groupName, pipelineConfig); } else { PipelineConfigs group = findGroup(groupName); group.addWithoutValidation(pipelineConfig); } } public void deletePipeline(PipelineConfig pipelineConfig) { for (PipelineConfigs group : this) { if(group.hasPipeline(pipelineConfig.name())){ group.remove(pipelineConfig); return; } } } private void createNewGroup(String sanitizedGroupName, PipelineConfig pipeline) { PipelineConfigs configs = new BasicPipelineConfigs(pipeline); configs.setGroup(sanitizedGroupName); this.add(0, configs); } public PipelineConfigs findGroup(String groupName) { for (PipelineConfigs pipelines : this) { if (pipelines.isNamed(groupName)) { return pipelines; } } throw new PipelineGroupNotFoundException("Failed to find the group [" + groupName + "]"); } public boolean hasGroup(String groupName) { try { findGroup(groupName); return true; } catch (PipelineGroupNotFoundException e) { return false; } } public PipelineConfig findPipeline(String groupName, int pipelineIndex) { return findGroup(groupName).get(pipelineIndex); } public void accept(PipelineGroupVisitor visitor) { for (PipelineConfigs group : this) { visitor.visit(group); } } public String findGroupNameByPipeline(CaseInsensitiveString pipelineName) { PipelineConfigs group = findGroupByPipeline(pipelineName); return group == null ? null : group.getGroup(); } public PipelineConfigs findGroupByPipeline(CaseInsensitiveString pipelineName) { for (PipelineConfigs group : this) { if (group.hasPipeline(pipelineName)) { return group; } } return null; } public void validate(ValidationContext validationContext) { Map<String, PipelineConfigs> nameToConfig = new HashMap<>(); List<PipelineConfigs> visited = new ArrayList(); for (PipelineConfigs group : this) { group.validateNameUniqueness(nameToConfig); } validatePipelineNameUniqueness(); } public void validatePipelineNameUniqueness() { List<PipelineConfig> visited = new ArrayList<>(); HashMap<CaseInsensitiveString, Set<String>> duplicates = new HashMap<>(); for (PipelineConfigs group : this) { for (PipelineConfig pipeline : group) { for (PipelineConfig visitedPipeline : visited) { if (visitedPipeline.name().equals(pipeline.name())) { if(!duplicates.containsKey(pipeline.name())){ duplicates.put(pipeline.name(), new HashSet<>()); } duplicates.get(pipeline.name()).add(pipeline.getOriginDisplayName()); duplicates.get(pipeline.name()).add(visitedPipeline.getOriginDisplayName()); pipeline.errors().remove(PipelineConfig.NAME); pipeline.addError(PipelineConfig.NAME, String.format("You have defined multiple pipelines named '%s'. Pipeline names must be unique. Source(s): %s", pipeline.name(), duplicates.get(pipeline.name()))); visitedPipeline.errors().remove(PipelineConfig.NAME); visitedPipeline.addError(PipelineConfig.NAME, String.format("You have defined multiple pipelines named '%s'. Pipeline names must be unique. Source(s): %s", pipeline.name(), duplicates.get(pipeline.name()))); } } visited.add(pipeline); } } } public ConfigErrors errors() { return configErrors; } public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } public Set<MaterialConfig> getAllUniquePostCommitSchedulableMaterials() { Set<MaterialConfig> materialConfigs = new HashSet<>(); Set<String> uniqueMaterials = new HashSet<>(); for (PipelineConfigs pipelineConfigs : this) { for (PipelineConfig pipelineConfig : pipelineConfigs) { for (MaterialConfig materialConfig : pipelineConfig.materialConfigs()) { if ((materialConfig instanceof ScmMaterialConfig || materialConfig instanceof PluggableSCMMaterialConfig) && !materialConfig.isAutoUpdate() && !uniqueMaterials.contains(materialConfig.getFingerprint())) { materialConfigs.add(materialConfig); uniqueMaterials.add(materialConfig.getFingerprint()); } } } } return materialConfigs; } public Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> getPackageUsageInPipelines() { if (packageToPipelineMap == null) { synchronized (this) { if (packageToPipelineMap == null) { packageToPipelineMap = new HashMap<>(); for (PipelineConfigs pipelineConfigs : this) { for (PipelineConfig pipelineConfig : pipelineConfigs) { for (PackageMaterialConfig packageMaterialConfig : pipelineConfig.packageMaterialConfigs()) { String packageId = packageMaterialConfig.getPackageId(); if (!packageToPipelineMap.containsKey(packageId)) { packageToPipelineMap.put(packageId, new ArrayList<>()); } packageToPipelineMap.get(packageId).add(new Pair<>(pipelineConfig, pipelineConfigs)); } } } } } } return packageToPipelineMap; } public boolean canDeletePackageRepository(PackageRepository packageRepository) { Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> packageUsageInPipelines = getPackageUsageInPipelines(); for (PackageDefinition packageDefinition : packageRepository.getPackages()) { if (packageUsageInPipelines.containsKey(packageDefinition.getId())) { return false; } } return true; } public Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> getPluggableSCMMaterialUsageInPipelines() { if (pluggableSCMMaterialToPipelineMap == null) { synchronized (this) { if (pluggableSCMMaterialToPipelineMap == null) { pluggableSCMMaterialToPipelineMap = new HashMap<>(); for (PipelineConfigs pipelineConfigs : this) { for (PipelineConfig pipelineConfig : pipelineConfigs) { for (PluggableSCMMaterialConfig pluggableSCMMaterialConfig : pipelineConfig.pluggableSCMMaterialConfigs()) { String scmId = pluggableSCMMaterialConfig.getScmId(); if (!pluggableSCMMaterialToPipelineMap.containsKey(scmId)) { pluggableSCMMaterialToPipelineMap.put(scmId, new ArrayList<>()); } pluggableSCMMaterialToPipelineMap.get(scmId).add(new Pair<>(pipelineConfig, pipelineConfigs)); } } } } } } return pluggableSCMMaterialToPipelineMap; } public boolean canDeletePluggableSCMMaterial(SCM scmConfig) { Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> packageUsageInPipelines = getPluggableSCMMaterialUsageInPipelines(); if (packageUsageInPipelines.containsKey(scmConfig.getId())) { return false; } return true; } public PipelineGroups getLocal() { PipelineGroups locals = new PipelineGroups(); for(PipelineConfigs pipelineConfigs : this) { PipelineConfigs local = pipelineConfigs.getLocal(); if(local != null) locals.add(local); } return locals; } public void deleteGroup(String groupName) { Iterator<PipelineConfigs> iterator = this.iterator(); while (iterator.hasNext()) { PipelineConfigs currentGroup = iterator.next(); if (currentGroup.isNamed(groupName)) { if (!currentGroup.isEmpty()) throw new PipelineGroupNotEmptyException("Failed to delete group [" + groupName + "] not empty"); iterator.remove(); break; } } } }
moktar/DesignPattern
designpattern/src/main/java/com/example/designpattern/behavioralpattern/statepattern/Management.java
<filename>designpattern/src/main/java/com/example/designpattern/behavioralpattern/statepattern/Management.java package com.example.designpattern.behavioralpattern.statepattern; import android.util.Log; public class Management implements Connection { @Override public void open() { Log.i("State: ","open database for accounting"); } @Override public void close() { Log.i("State: ","close the database"); } @Override public void log() { Log.i("State: ","log activities"); } @Override public void update() { Log.i("State: ","Management has been updated"); } }
Presto412/Inaam-Admin-Management-UI
src/Components/realms/RealmHome.js
import { List } from "antd"; import React, { Component } from "react"; import { Link } from "react-router-dom"; const data = [ { label: "Users", link: "/users", }, { label: "Groups", link: "/groups", }, { label: "Coins", link: "/coins", }, ]; class RealmHome extends Component { makeCompleteLink = (childPath) => { const currentPath = this.props.location.pathname; const parentPath = currentPath.substr(0, currentPath.lastIndexOf("/")); const newPath = parentPath + childPath; return newPath; }; render() { return ( <div> <List header={<div>Realm Home</div>} bordered dataSource={data} renderItem={(item) => ( <List.Item> <h3> <Link to={`${this.makeCompleteLink(item.link)}`}> {item.label} </Link> </h3> </List.Item> )} /> </div> ); } } export default RealmHome;
nachawati/zorba
src/runtime/update/update.h
/* * Copyright 2006-2016 zorba.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef ZORBA_RUNTIME_UPDATE #define ZORBA_RUNTIME_UPDATE #include <vector> #include "runtime/base/binarybase.h" #include "runtime/base/unarybase.h" #include "store/api/update_consts.h" namespace zorba { /******************************************************************************* ********************************************************************************/ class InsertIterator : public BinaryBaseIterator<InsertIterator, PlanIteratorState> { private: store::UpdateConsts::InsertType theType; bool theDoCopy; public: SERIALIZABLE_CLASS(InsertIterator); SERIALIZABLE_CLASS_CONSTRUCTOR2T( InsertIterator, BinaryBaseIterator<InsertIterator, PlanIteratorState>); void serialize(::zorba::serialization::Archiver& ar) { serialize_baseclass(ar, (BinaryBaseIterator<InsertIterator, PlanIteratorState>*)this); SERIALIZE_ENUM(store::UpdateConsts::InsertType, theType); ar & theDoCopy; } public: InsertIterator ( static_context* sctx, const QueryLoc& loc, store::UpdateConsts::InsertType aType, PlanIter_t source, PlanIter_t target); ~InsertIterator() {} void accept(PlanIterVisitor& v) const; zstring getNameAsString() const; bool nextImpl(store::Item_t&, PlanState&) const; }; /******************************************************************************* ********************************************************************************/ UNARY_ITER(DeleteIterator); /******************************************************************************* ********************************************************************************/ class ReplaceIterator : public BinaryBaseIterator<ReplaceIterator, PlanIteratorState> { private: store::UpdateConsts::ReplaceType theType; bool theDoCopy; public: SERIALIZABLE_CLASS(ReplaceIterator); SERIALIZABLE_CLASS_CONSTRUCTOR2T( ReplaceIterator, BinaryBaseIterator<ReplaceIterator, PlanIteratorState>); void serialize(::zorba::serialization::Archiver& ar); public: ReplaceIterator( static_context* sctx, const QueryLoc& aLoc, store::UpdateConsts::ReplaceType aType, PlanIter_t target, PlanIter_t source); ~ReplaceIterator() {} void accept(PlanIterVisitor& v) const; zstring getNameAsString() const; bool nextImpl(store::Item_t&, PlanState&) const; }; /******************************************************************************* ********************************************************************************/ class RenameIterator : public BinaryBaseIterator<RenameIterator, PlanIteratorState> { private: NamespaceContext_t theNsCtx; public: SERIALIZABLE_CLASS(RenameIterator); SERIALIZABLE_CLASS_CONSTRUCTOR2T( RenameIterator, BinaryBaseIterator<RenameIterator, PlanIteratorState>); void serialize(::zorba::serialization::Archiver& ar); public: RenameIterator( static_context* sctx, const QueryLoc& aLoc, PlanIter_t target, PlanIter_t source, const namespace_context* nsctx); ~RenameIterator() {} void accept(PlanIterVisitor& v) const; zstring getNameAsString() const; bool nextImpl(store::Item_t&, PlanState&) const; }; /******************************************************************************* ********************************************************************************/ class CopyClause : public ::zorba::serialization::SerializeBaseClass { friend class TransformIterator; typedef std::vector<CopyClause>::const_iterator const_iter_t; typedef std::vector<CopyClause>::iterator iter_t; std::vector<ForVarIter_t> theCopyVars; PlanIter_t theInput; public: SERIALIZABLE_CLASS(CopyClause); SERIALIZABLE_CLASS_CONSTRUCTOR(CopyClause); void serialize(::zorba::serialization::Archiver& ar); public: CopyClause() {} CopyClause(std::vector<ForVarIter_t>& copyVars, PlanIter_t input) : theCopyVars(copyVars), theInput(input) {} ~CopyClause() {} }; class TransformIterator : public PlanIterator { private: std::vector<CopyClause> theCopyClauses; PlanIter_t theModifyIter; PlanIter_t thePulHolderIter; PlanIter_t theApplyIter; PlanIter_t theReturnIter; public: SERIALIZABLE_CLASS(TransformIterator) SERIALIZABLE_CLASS_CONSTRUCTOR2(TransformIterator, PlanIterator) void serialize(::zorba::serialization::Archiver& ar); public: TransformIterator ( static_context* sctx, const QueryLoc& aLoc, std::vector<CopyClause>& aCopyClauses, PlanIter_t aModifyIter, PlanIter_t aPulHolderIter, PlanIter_t aApplyIter, PlanIter_t aReturnIter); ~TransformIterator(); zstring getNameAsString() const; uint32_t getStateSize() const { return sizeof(PlanIteratorState); } uint32_t getStateSizeOfSubtree() const; void accept(PlanIterVisitor&) const; void openImpl(PlanState& planState, uint32_t& offset); bool nextImpl(store::Item_t&, PlanState& planState) const; void resetImpl(PlanState& planState) const; void closeImpl(PlanState& planState); }; } // namespace zorba #endif /* * Local variables: * mode: c++ * End: */ /* vim:set et sw=2 ts=2: */
Nels885/csd_dashboard
dashboard/management/commands/sendemail.py
<gh_stars>0 from django.core.management.base import BaseCommand from django.core.management import call_command from django.utils import timezone from django.utils.html import strip_tags from django.core.mail import send_mail from django.template.loader import render_to_string from constance import config from squalaetp.models import Xelon, Indicator from utils.conf import string_to_list from utils.data.analysis import ProductAnalysis class Command(BaseCommand): help = 'Send email for Late products' def add_arguments(self, parser): parser.add_argument( '--late_products', action='store_true', dest='late_products', help='Send email for late products', ) parser.add_argument( '--vin_error', action='store_true', dest='vin_error', help='Send email for VIN error', ) parser.add_argument( '--vin_corvet', action='store_true', dest='vin_corvet', help='Send email for VIN no CORVET', ) parser.add_argument( '--reman', action='store_true', dest='reman', help='Send email for REMAN in progress', ) def handle(self, *args, **options): date_joined = timezone.datetime.strftime(timezone.localtime(), "%d/%m/%Y %H:%M:%S") last_7_days = timezone.datetime.today() - timezone.timedelta(7) if options['late_products']: subject = 'Stocks et Retards {}'.format(date_joined) prods = ProductAnalysis() data = prods.late_products() indicator = Indicator.objects.filter(date=timezone.now()).first() if indicator: data.update({ 'products_to_repair': indicator.products_to_repair, 'late_products': indicator.late_products, 'express_products': indicator.express_products }) html_message = render_to_string('dashboard/email_format/lp_email.html', data) plain_message = strip_tags(html_message) send_mail( subject, plain_message, None, string_to_list(config.LATE_PRODUCTS_TO_EMAIL_LIST), html_message=html_message ) self.stdout.write(self.style.SUCCESS("Envoi de l'email des produits en retard terminée!")) if options['vin_error']: subject = "Erreur VIN Xelon {}".format(date_joined) xelons = Xelon.objects.filter(vin_error=True, date_retour__gte=last_7_days).order_by('-date_retour')[:10] if xelons: html_message = render_to_string( 'dashboard/email_format/vin_error_email.html', {'xelons': xelons, 'domain': config.WEBSITE_DOMAIN} ) plain_message = strip_tags(html_message) send_mail( subject, plain_message, None, string_to_list(config.VIN_ERROR_TO_EMAIL_LIST), html_message=html_message ) self.stdout.write(self.style.SUCCESS("Envoi de l'email des erreurs de VIN terminée !")) else: self.stdout.write(self.style.SUCCESS("Pas d'erreurs de VIN a envoyer !")) if options['vin_corvet']: subject = "Problème CORVET {}".format(date_joined) xelons = Xelon.objects.filter(date_retour__gte=last_7_days, vin__regex=r'^V((F[37])|(R[137]))\w{14}$', vin_error=False, corvet__isnull=True).order_by('-date_retour')[:10] if xelons: html_message = render_to_string( 'dashboard/email_format/vin_corvet_email.html', {'xelons': xelons, 'domain': config.WEBSITE_DOMAIN} ) plain_message = strip_tags(html_message) send_mail( subject, plain_message, None, string_to_list(config.VIN_ERROR_TO_EMAIL_LIST), html_message=html_message ) self.stdout.write(self.style.SUCCESS("Envoi de l'email des VINs sans données CORVET terminée !")) else: self.stdout.write(self.style.SUCCESS("Pas de VIN sans données CORVET à envoyer !")) if options['reman']: call_command("emailreman", "--batch")
antopen/alipay-sdk-python-all
alipay/aop/api/response/AlipayBossBaseProcessTicketQueryResponse.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.BPOpenApiTicket import BPOpenApiTicket class AlipayBossBaseProcessTicketQueryResponse(AlipayResponse): def __init__(self): super(AlipayBossBaseProcessTicketQueryResponse, self).__init__() self._ticket = None @property def ticket(self): return self._ticket @ticket.setter def ticket(self, value): if isinstance(value, BPOpenApiTicket): self._ticket = value else: self._ticket = BPOpenApiTicket.from_alipay_dict(value) def parse_response_content(self, response_content): response = super(AlipayBossBaseProcessTicketQueryResponse, self).parse_response_content(response_content) if 'ticket' in response: self.ticket = response['ticket']
Isaac-Tait/Royal-Ridges-Camp
web/src/pages/PaintballPage/PaintballPage.js
<reponame>Isaac-Tait/Royal-Ridges-Camp import { MetaTags } from '@redwoodjs/web' const PaintballPage = () => { return ( <div> <MetaTags title="Paintball" description="Paintball at Royal Ridges Retreat is a blast" /> <div className="max-w-6xl mx-2 md:mx-auto text-xs md:text-base"> <p className="flex flex-wrap text-green-200 font-bold text-xl justify-center md:text-4xl"> It is Paintball, the way it should be!!! </p> <p className="mt-2"> Royal Ridges is committed to offering you the safest possible family-friendly environment for you to immerse yourself in the adventure of woods ball, scenario paintball, or tournament style speed-ball. There are 2 large speed-ball fields, a 20 acre woods-ball field, and a Tactical Village. Book your next extreme adventure with us! All charges cover the cost of a mask, a marker, paintballs, and an hour or two of game play. </p> <p className="mt-2"> Price $175 (group minimum charge covers up to 5 players) </p> <p className="mt-2">Additional players over 5: $35 per person</p> <p className="mt-2">Current available booking times:</p> <p className="ml-4 mt-2">Fridays - 5:00pm</p> <p className="ml-4 mt-2">Saturdays - 10:00 am or 3:00 pm</p> <p className="ml-4 mt-2">Sundays - 3:00pm</p> <p className="mt-2"> Please click on the minimum package link below to see our current availability </p> <p className="mt-2"> Interested in other dates, times or any questions{' '} <a href="email:<EMAIL>" className="text-green-200 underline" > email </a>{' '} us </p> <p className="mt-2"> Interested in Open Play Games and Dates:{' '} <a href="https://www.royalridges.upcsites.org/rrpb-openplay.html" className="text-green-200 underline" target="_blank" rel="noopener noreferrer" > Royal Ridges Open Play Days </a> . </p> </div> </div> ) } export default PaintballPage
signomix/signomix
src/main/java/com/signomix/out/utility/Counter.java
/** * Copyright (C) <NAME> 2018. * Distributed under the MIT License (license terms are at http://opensource.org/licenses/MIT). */ package com.signomix.out.utility; import java.util.HashMap; import java.util.concurrent.atomic.AtomicLong; import org.cricketmsf.Adapter; import org.cricketmsf.Event; import org.cricketmsf.Kernel; import org.cricketmsf.out.OutboundAdapter; /** * * @author <NAME> <<EMAIL>> */ public class Counter extends OutboundAdapter implements Adapter, CounterIface { AtomicLong totalInputs; AtomicLong totalOutputs; AtomicLong correction; private Double inputsDivider = 1.0; private Double outputsDivider = 1.0; @Override public void loadProperties(HashMap<String, String> properties, String adapterName) { super.loadProperties(properties, adapterName); totalInputs = new AtomicLong(0); totalOutputs = new AtomicLong(0); correction = new AtomicLong(0); try { inputsDivider = Double.parseDouble(properties.getOrDefault("incoming-divider", "1.0")); } catch (NumberFormatException ex) { Kernel.handle(Event.logWarning(this, "malformed config")); } try { outputsDivider = Double.parseDouble(properties.getOrDefault("exiting-divider", "1.0")); } catch (NumberFormatException ex) { Kernel.handle(Event.logWarning(this, "malformed config")); } } @Override public void addInputs(long delta) { totalInputs.addAndGet(delta); } @Override public long get() { return getInputs()-getOutputs()+correction.get(); } @Override public long getInputs() { return (long)(totalInputs.get()/inputsDivider); } @Override public long getOutputs() { return (long)(totalOutputs.get()/outputsDivider); } @Override public void addOutputs(long delta) { totalOutputs.addAndGet(delta); } @Override public void add(long delta) { if (delta >= 0) { totalInputs.addAndGet(delta); } else { totalOutputs.addAndGet(-1*delta); } } @Override public void resetAndSet(long newCorrection) { correction.set(newCorrection); totalInputs.set(0); totalOutputs.set(0); } @Override public void setCorrection(long newCorrection){ this.correction.set(newCorrection); } @Override public long getCorrection() { return correction.get(); } }
Sable/MiX10
languages/Natlab/src/natlab/backends/x10/codegen/ArrayGetSet.java
<reponame>Sable/MiX10 package natlab.backends.x10.codegen; import java.util.ArrayList; import natlab.backends.x10.IRx10.ast.ArrayAccess; import natlab.backends.x10.IRx10.ast.ArraySetStmt; import natlab.backends.x10.IRx10.ast.AssignStmt; import natlab.backends.x10.IRx10.ast.DeclStmt; import natlab.backends.x10.IRx10.ast.EmptyExp; import natlab.backends.x10.IRx10.ast.Exp; import natlab.backends.x10.IRx10.ast.IDInfo; import natlab.backends.x10.IRx10.ast.IDUse; import natlab.backends.x10.IRx10.ast.List; import natlab.backends.x10.IRx10.ast.LiterallyExp; import natlab.backends.x10.IRx10.ast.MultiAssignLHS; import natlab.backends.x10.IRx10.ast.Stmt; import natlab.backends.x10.IRx10.ast.StmtBlock; import natlab.backends.x10.IRx10.ast.SubArrayGetExp; import natlab.backends.x10.IRx10.ast.SubArraySetStmt; import natlab.tame.tir.TIRAbstractAssignStmt; import natlab.tame.tir.TIRAbstractAssignToListStmt; import natlab.tame.tir.TIRArrayGetStmt; import natlab.tame.tir.TIRArraySetStmt; import natlab.tame.valueanalysis.components.shape.DimValue; public class ArrayGetSet { private static boolean Debug = true; public static void handleTIRAbstractArraySetStmt(TIRArraySetStmt node, IRx10ASTGenerator target, StmtBlock block) { if (Debug) { System.out.println(node.getIndices().getChild(0)); System.out.println(node.getLHS().getPrettyPrinted() + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"); } String LHS; target.symbolMapKey = node.getArrayName().getID(); LHS = target.symbolMapKey; if (true != target.symbolMap.containsKey(target.symbolMapKey)) { DeclStmt arrayDecl = new DeclStmt(); new IDInfo(); arrayDecl.setLHS(Helper.generateIDInfo(target.analysis, target.index, node, LHS)); arrayDecl.getLHS().setName(LHS); arrayDecl.setRHS(new EmptyExp()); target.symbolMap.put(target.symbolMapKey, arrayDecl.getLHS()); block.addStmt(arrayDecl); } /* * The array has been declared before. This is just an assignment to its * index. If not declared before first declare the array and then set the * index */ boolean hasColon = false; for (int i = 0; i < node.getIndices().getNumChild(); i++) { // array_set.setIndices(Expressions.makeIRx10Exp(node.getIndices() // .getChild(i), false, target), i); if (((IDUse) (Expressions.makeIRx10Exp(node.getIndices().getChild(i), false, target))).getID().equals("__")) { hasColon = true; } } boolean tf = true; IDInfo LhsInfo = new IDInfo(); LhsInfo = Helper.generateIDInfo(target.analysis, target.index, node, LHS); if (null != LhsInfo.getShape()) for (int i = 0; i < LhsInfo.getShape().size(); i++) { if (null != LhsInfo.getShape().get(i)) tf &= ("1").equals(LhsInfo.getShape().get(i).toString()); } /* * If all indices are scalar */ if (!hasColon && tf) { ArraySetStmt array_set = new ArraySetStmt(); array_set.setLHS(Helper.generateIDInfo(target.analysis, target.index, node, LHS)); array_set.getLHS().setName(LHS.toString()); for (int i = 0; i < node.getIndices().getNumChild(); i++) { array_set.setIndices(Expressions.makeIRx10Exp(node.getIndices() .getChild(i), false, target), i); } target.symbolMap.put(target.symbolMapKey, array_set.getLHS()); array_set.setRHS(Expressions.makeIRx10Exp(node.getRHS(), tf, target)); block.addStmt(array_set); } /* * if there is colon operator in LHS */ else { SubArraySetStmt subArraySet = new SubArraySetStmt(); subArraySet.setLHS(Helper.generateIDInfo(target.analysis, target.index, node, LHS)); subArraySet.getLHS().setName(LHS.toString()); subArraySet.setLowerList(new List<Exp>()); subArraySet.setUpperList(new List<Exp>()); if (null != subArraySet.getLHS().getShape()) for (int i = 0; i < node.getIndices().getNumChild(); i++) { if (((IDUse) (Expressions.makeIRx10Exp( node.getIndices().getChild(i), false, target))).getID().equals( "__")) { /* * Case when index is a ':' Note that this fails if number of * indices is < number of dimensions. */ LiterallyExp low = new LiterallyExp("1"); LiterallyExp high = new LiterallyExp(); if (1 < node.getIndices().getNumChild()) high.setVerbatim(LHS.toString() + ".numElems_" + Integer.toString(i + 1)); else high.setVerbatim(LHS.toString() + ".size" + Integer.toString(i + 1)); subArraySet.getLowerList().add(low); subArraySet.getUpperList().add(high); } else { /* * case when index is an ID It can be a scalar or a vector If it is * a sclar : low = high else low = 1st value of vector & upper = * last value */ String indexId = ((IDUse) (Expressions.makeIRx10Exp(node .getIndices().getChild(i), false, target))).getID(); System.err.println(target.symbolMap.get(indexId)); if (Helper.isScalar(target.symbolMap.get(indexId).getShape())) { IDUse low = new IDUse(indexId); IDUse high = new IDUse(indexId); subArraySet.getLowerList().add(low); subArraySet.getUpperList().add(high); } else { LiterallyExp low = new LiterallyExp(indexId + "(0)"); LiterallyExp high = new LiterallyExp(indexId + "(" + indexId + ".size -1)"); subArraySet.getLowerList().add(low); subArraySet.getUpperList().add(high); } } } target.symbolMap.put(target.symbolMapKey, subArraySet.getLHS()); subArraySet.setRHS(Expressions.makeIRx10Exp(node.getRHS(), tf, target)); block.addStmt(subArraySet); } } public static void handleTIRAbstractArrayGetStmt(TIRArrayGetStmt node, IRx10ASTGenerator target, StmtBlock block) { if (1 == (node).getTargets().asNameList().size()) { String LHS; boolean isDecl; target.symbolMapKey = (node).getTargetName().getID(); LHS = target.symbolMapKey; if (true == target.symbolMap.containsKey(target.symbolMapKey)) { isDecl = false; AssignStmt list_single_assign_stmt = new AssignStmt(); list_single_assign_stmt.setLHS(Helper.generateIDInfo(target.analysis, target.index, node, LHS)); list_single_assign_stmt.getLHS().setName( ((TIRAbstractAssignToListStmt) node).getTargets().getChild(0) .getVarName()); setRHSValue(false, list_single_assign_stmt, node, false, target, block); target.symbolMap.put(target.symbolMapKey, list_single_assign_stmt.getLHS()); System.out.println("#####!@@@" + target.symbolMapKey); block.addStmt(list_single_assign_stmt); } else { isDecl = true; DeclStmt decl_stmt = new DeclStmt(); new IDInfo(); decl_stmt.setLHS(Helper.generateIDInfo(target.analysis, target.index, node, LHS)); decl_stmt.getLHS().setName( (node).getTargets().getChild(0).getVarName()); /* * if it has a colon operator, add a null to the shape - this is a hack * to tell the compiler that it is an array */ for (Exp i : Expressions.getArgs(node.getRHS(), target)) { if (i instanceof IDUse && ((IDUse) i).getID().equals("__")) { System.out.println("its a colon..............................."); decl_stmt.getLHS().getShape().add(null); } } System.out.println("#####!!!!!" + target.symbolMapKey); // block.addStmt(decl_stmt); DeclStmt pseudoDecl = new DeclStmt(); pseudoDecl.setLHS(decl_stmt.getLHS()); // AssignStmt pseudoAssign = new AssignStmt(); pseudoAssign.setLHS(decl_stmt.getLHS()); // if (target.currentBlock.size() > 1) { target.currentBlock.get(0).addStmt(pseudoDecl); setRHSValue(isDecl, decl_stmt, node, false, target, block); pseudoAssign.setRHS(decl_stmt.getRHS()); block.addStmt(pseudoAssign); target.symbolMap.put(target.symbolMapKey, decl_stmt.getLHS()); System.out.println(block.getParent().toString() + "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); } } else { AssignStmt list_assign_stmt = new AssignStmt(); MultiAssignLHS LHSinfo = new MultiAssignLHS(); list_assign_stmt.setMultiAssignLHS(LHSinfo); for (ast.Name name : ((TIRAbstractAssignToListStmt) node).getTargets() .asNameList()) { System.out.println("^^" + name.getID()); list_assign_stmt.getMultiAssignLHS().addIDInfo( Helper.generateIDInfo(target.analysis, target.index, node, name.getID())); target.symbolMap.put( name.getID(), Helper.generateIDInfo(target.analysis, target.index, node, name.getID())); } System.out .println("^*^" + list_assign_stmt.getMultiAssignLHS().getIDInfoList() .getNumChild()); list_assign_stmt.setLHS(null); setRHSValue(false, list_assign_stmt, node, false, target, block); block.addStmt(list_assign_stmt); } } public static void setRHSValue(boolean isDecl, Stmt decl_or_assgn, TIRArrayGetStmt node, boolean isScalar, IRx10ASTGenerator target, StmtBlock block) { String arrayName = node.getRHS().getVarName(); List<Exp> indices = Expressions.getArgs(node.getRHS(), target); boolean hascolon = false; boolean allScalar = true; for (Exp e : indices) { if (e instanceof IDUse && ((IDUse) e).getID().equals("__")) { /* * it has colon expression */ hascolon = true; } if (e instanceof IDUse && !hascolon && null != target.symbolMap.get(((IDUse) (e)).getID()).getShape()) { if (target.symbolMap .get(((IDUse) (e)).getID()) .getShape() .get(0) .getClass() .toString() .equals( "class natlab.tame.valueanalysis.components.shape.DimValue")) { System.err.println(node.getNodeString() + target.symbolMap.get(((IDUse) (e)).getID()).getShape() .toString()); for (DimValue s : (ArrayList<DimValue>) (target.symbolMap .get(((IDUse) (e)).getID()).getShape())) { if (null != s) allScalar &= "1".equals(s.toString()); } } else for (Integer s : (ArrayList<Integer>) (target.symbolMap .get(((IDUse) (e)).getID()).getShape())) { allScalar &= "1".equals(s.toString()); } } } if (!hascolon && allScalar) { /* * all indices are scalar values */ ArrayAccess arrayAccess = new ArrayAccess(); arrayAccess.setArrayID(new IDUse(arrayName)); arrayAccess.setIsColVector(false); /* * assumes that shape is correct. */ int sz = target.symbolMap.get(arrayName).getShape().size(); while (target.symbolMap.get(arrayName).getShape().get(sz - 1) == null) { sz--; } if (sz == 2 && "1".equals(target.symbolMap.get(arrayName).getShape().get(1) .toString())) { arrayAccess.setIsColVector(true); } arrayAccess.setIndicesList(Expressions.getArgs(node.getRHS(), target)); if (isDecl) { ((DeclStmt) decl_or_assgn).setRHS(arrayAccess); } else { ((AssignStmt) decl_or_assgn).setRHS(arrayAccess); } } else { /* * one or more indices is colon or a vector */ SubArrayGetExp subArray = new SubArrayGetExp(); subArray.setArrayID(new IDUse(arrayName)); subArray.setLowerList(new List<Exp>()); subArray.setUpperList(new List<Exp>()); int i = 0; for (Exp e : indices) { i++; if (e instanceof IDUse && ((IDUse) e).getID().equals("__")) { /* * colon operator */ LiterallyExp low = new LiterallyExp("1"); LiterallyExp high = new LiterallyExp(); if (1 < indices.getNumChild()) high.setVerbatim(arrayName + ".numElems_" + Integer.toString(i)); else high.setVerbatim(arrayName + ".size" + Integer.toString(i)); subArray.getLowerList().add(low); subArray.getUpperList().add(high); } else if (!Helper.isScalar(target.symbolMap .get(((IDUse) (e)).getID()).getShape())) { /* * vector shape */ String indexId = ((IDUse) (e)).getID(); LiterallyExp low = new LiterallyExp(indexId + "(0)"); LiterallyExp high = new LiterallyExp(indexId + "(" + indexId + ".size -1)"); subArray.getLowerList().add(low); subArray.getUpperList().add(high); } else { /* * scalar index */ String indexId = ((IDUse) (e)).getID(); IDUse low = new IDUse(indexId); IDUse high = new IDUse(indexId); subArray.getLowerList().add(low); subArray.getUpperList().add(high); } } if (isDecl) { ((DeclStmt) decl_or_assgn).setRHS(subArray); } else { ((AssignStmt) decl_or_assgn).setRHS(subArray); } } } // This version handles assignment to multiple variables public static void handleTIRAbstractAssignToListVarStmt( TIRAbstractAssignStmt node, ast.Name name, IRx10ASTGenerator target, AssignStmt assign_stmt) { String LHS; target.symbolMapKey = name.getID(); LHS = target.symbolMapKey; if (true == target.symbolMap.containsKey(LHS)) // variable already // // defined and analyzed { MultiAssignLHS LHSinfo = new MultiAssignLHS(); assign_stmt.setMultiAssignLHS(LHSinfo); assign_stmt.getMultiAssignLHS().addIDInfo( Helper.generateIDInfo(target.analysis, target.index, node, LHS)); target.symbolMap.put( LHS, Helper.generateIDInfo(target.analysis, target.index, node, name.getID())); assign_stmt .getMultiAssignLHS() .getIDInfo(assign_stmt.getMultiAssignLHS().getNumIDInfo() - 1) .setName( ((TIRAbstractAssignToListStmt) node).getTargetName().toString()); } } }
codacy/bitbucket-scala-client
src/main/scala/com/codacy/client/bitbucket/v1/Issue.scala
package com.codacy.client.bitbucket.v1 import java.time.LocalDateTime import play.api.libs.functional.syntax._ import play.api.libs.json._ case class Issue( id: Long, status: String, priority: String, title: String, content: String, owner: String, created_on: LocalDateTime, kind: String ) object Issue { val dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS" implicit val dateTimeReads: Reads[LocalDateTime] = Reads.localDateTimeReads(dateFormat) // format: off implicit val reader: Reads[Issue] = ( (__ \ "local_id").read[Long] and (__ \ "status").read[String] and (__ \ "priority").read[String] and (__ \ "title").read[String] and (__ \ "content").read[String] and (__ \ "reported_by" \ "username").read[String] and (__ \ "created_on").read[LocalDateTime] and (__ \ "metadata" \ "kind").read[String] )(Issue.apply _) // format: on }
tvelagapudi/lenstronomy
test/test_LensModel/test_Profiles/test_shift.py
<reponame>tvelagapudi/lenstronomy __author__ = 'sibirrer' from lenstronomy.LensModel.Profiles.alpha_shift import Shift import numpy as np import numpy.testing as npt import pytest class TestShift(object): """ tests the Gaussian methods """ def setup(self): self.shift = Shift() alpha_x, alpha_y = 10., 0.1 self.kwargs_lens = {'alpha_x': alpha_x, 'alpha_y': alpha_y} def test_function(self): x = np.array([1]) y = np.array([2]) values = self.shift.function(x, y, **self.kwargs_lens) npt.assert_almost_equal(values[0], 0, decimal=5) x = np.array([0]) y = np.array([0]) values = self.shift.function(x, y, **self.kwargs_lens) npt.assert_almost_equal(values[0], 0, decimal=5) x = np.array([2, 3, 4]) y = np.array([1, 1, 1]) values = self.shift.function(x, y, **self.kwargs_lens) npt.assert_almost_equal(values[0], 0, decimal=5) npt.assert_almost_equal(values[1], 0, decimal=5) def test_derivatives(self): x = np.array([1]) y = np.array([2]) f_x, f_y = self.shift.derivatives(x, y, **self.kwargs_lens) npt.assert_almost_equal(f_x[0], 10, decimal=5) npt.assert_almost_equal(f_y[0], 0.1, decimal=5) x = np.array([1, 3, 4]) y = np.array([2, 1, 1]) values = self.shift.derivatives(x, y, **self.kwargs_lens) npt.assert_almost_equal(values[0][0], 10, decimal=5) npt.assert_almost_equal(values[1][0], 0.1, decimal=5) def test_hessian(self): x = np.array([1]) y = np.array([2]) f_xx, f_yy, f_xy = self.shift.hessian(x, y, **self.kwargs_lens) npt.assert_almost_equal(f_xx, 0, decimal=5) npt.assert_almost_equal(f_yy, 0, decimal=5) npt.assert_almost_equal(f_xy, 0, decimal=5) if __name__ == '__main__': pytest.main()
MIk201408/dawdler-series
dawdler/dawdler-client-plug/src/main/java/com/anywide/dawdler/clientplug/dynamicform/control/RadioControl.java
<reponame>MIk201408/dawdler-series /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.anywide.dawdler.clientplug.dynamicform.control; import com.anywide.dawdler.clientplug.velocity.ControlTag; /** * * @Title: RadioControl.java * @Description: 单选框实现 * @author: jackson.song * @date: 2006年08月10日 * @version V1.0 * @email: <EMAIL> */ public class RadioControl extends Control { protected RadioControl(ControlTag tag) { super(tag); } @Override protected String replaceContent() { String controlname =tag.getControlname(); String controltype =tag.getControltype(); String css =tag.getCss(); String viewname= tag.getViewname(); String validaterule =tag.getValidaterule(); String showitems = tag.getShowitems(); String value = tag.getValue(); String additional = tag.getAdditional(); String radiodefault = tag.getRadiodefault(); if(showitems==null){ throw new NullPointerException("show item can't null!"); } String[] showitem = showitems.split(","); StringBuffer sb = new StringBuffer(150); for(int i = 0;i<showitem.length;i++){ sb.append(ControlContent.INPUTSTART.replace(ControlContent.CONTROLNAMEREPLACE,controlname).replace(ControlContent.CONTROLTYPEREPLACE, controltype).replace(ControlContent.VIEWNAMEREPLACE,viewname)); if(css!=null&&!css.trim().equals(""))sb.append(ControlContent.TAGCSS.replace(ControlContent.CSSREPLACE,css)); if(validaterule!=null&&!validaterule.trim().equals(""))sb.append(ControlContent.TAGVALIDATE.replace(ControlContent.VALIDATERULEREPLACE,validaterule)); sb.append(ControlContent.TAGVALUE.replace(ControlContent.VALUEREPLACE,i+"")); sb.append(value!=null?value.equals(""+i)?ControlContent.CHECKED:"":(i==0&&radiodefault!=null&&radiodefault.equals("true"))?ControlContent.CHECKED:""); if(additional!=null)sb.append(" "+additional); sb.append(ControlContent.INPUTEND); sb.append(showitem[i]+" "); } return sb.toString(); } }
mykeylab/MYKEY-Client-SDK
Android/Sample/sdk/src/main/java/com/mykey/sdk/jni/MYKEYWalletJni.java
<gh_stars>1-10 package com.mykey.sdk.jni; import android.content.Context; import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.TypeReference; import com.mykey.sdk.common.constants.ErrorCons; import com.mykey.sdk.common.util.LogUtil; import com.mykey.sdk.jni.entity.response.BaseJniResponse; import com.mykey.sdk.jni.entity.response.BinaryResponse; import com.mykey.sdk.jni.entity.response.EmptyResponse; import com.mykey.sdk.jni.entity.response.EncodeParamResponse; import com.mykey.sdk.jni.entity.response.KeyResponse; import com.mykey.sdk.jni.entity.response.RequestPubKeyResponse; import com.mykey.sdk.jni.entity.response.SignResponse; import mykeycore.ApiRequestCallback; import mykeycore.InitEntity; import mykeycore.Mykeycore; /** * Created by zero on 2019/5/24. */ public class MYKEYWalletJni { private static final String TAG = "MYKEYWalletJni"; // private static final Gson gson = new Gson(); public static void init(InitEntity initEntity) { EmptyResponse encodeEntity = getResultData(Mykeycore.init(initEntity), new TypeReference<BaseJniResponse<EmptyResponse>>() { }); if (null == encodeEntity) { return; } } public static String encodeParam(String param) { EncodeParamResponse encodeEntity = getResultData(Mykeycore.encodeParam(param), new TypeReference<BaseJniResponse<EncodeParamResponse>>() { }); if (null == encodeEntity) { return ""; } return encodeEntity.getEncodeData(); } public static String getRequestPubKey() { RequestPubKeyResponse pubKeyResponse = getResultData(Mykeycore.requestPubKey(), new TypeReference<BaseJniResponse<RequestPubKeyResponse>>() { }); if (null == pubKeyResponse) { return ""; } return pubKeyResponse.getRequestPubKey(); } public static void getBalance(String chain, String code, String symbol, ApiRequestCallback apiRequest) { getResultData(Mykeycore.getBalance(chain, code, symbol, apiRequest), new TypeReference<BaseJniResponse<EmptyResponse>>() { }); } public static void getBindInfo(ApiRequestCallback apiRequest) { getResultData(Mykeycore.getBindInfo(apiRequest), new TypeReference<BaseJniResponse<EmptyResponse>>() { }); } public static void getUnlockList(String chain, String code, String symbol, ApiRequestCallback apiRequest) { getResultData(Mykeycore.getUnlockList(chain, code, symbol, apiRequest), new TypeReference<BaseJniResponse<EmptyResponse>>() { }); } public static KeyResponse createPrivateKey(Context context) { KeyResponse keyResponse = getResultData(Mykeycore.createPrivate(context.getCacheDir().getAbsolutePath()), new TypeReference<BaseJniResponse<KeyResponse>>() { }); return keyResponse; } public static String sign(String encrptyPrivate, String unsignedData) { SignResponse signResponse = getResultData(Mykeycore.sign(encrptyPrivate, unsignedData), new TypeReference<BaseJniResponse<SignResponse>>() { }); if (null == signResponse) { return ""; } return signResponse.getSignedData(); } public static String ethJsonToBinary(String abiJson, String method, String paramJson) { BinaryResponse binaryResponse = getResultData(Mykeycore.ethJsonToBinary(abiJson, method, paramJson), new TypeReference<BaseJniResponse<BinaryResponse>>() { }); if (null == binaryResponse) { return ""; } return binaryResponse.getBinary(); } // private static <T> T getResultData(String responseJson, TypeToken typeToken) { // BaseJniResponse<T> responseEntity = gson.fromJson(responseJson, typeToken.getType()); // // handle exception // handleErrorCode(responseEntity); // return responseEntity.getResultData(); // } private static <T> T getResultData(String responseJson, TypeReference<BaseJniResponse<T>> typeReference) { BaseJniResponse<T> responseEntity = JSONObject.parseObject(responseJson, typeReference); // handle exception handleErrorCode(responseEntity); return responseEntity.getResultData(); } private static void handleErrorCode(BaseJniResponse responseEntity) { switch (responseEntity.getResultCode()) { case ErrorCons.ERROR_CODE_OK: // do nothing break; default: // ToastTool.toast(responseEntity.getResultMessage()); // ToastTool.toast(MyKeyApplication.getMainApplication().getResources().getString(R.string.error_pelease_repeat_after)); LogUtil.e(TAG, responseEntity.getResultMessage()); break; } } }
veriKami/ePF_API_doc
ePF_API_doxy-0.2.x-dev/html/classep___s_p___osoba.js
<filename>ePF_API_doxy-0.2.x-dev/html/classep___s_p___osoba.js var classep___s_p___osoba = [ [ "__toString", "classep___s_p___osoba.html#a7516ca30af0db3cdbf9a7739b48ce91d", null ], [ "getDataStruct", "classep___s_p___osoba.html#a79dabf680e30ee6e62508a8df24ed243", null ], [ "stanowiska", "classep___s_p___osoba.html#a9dd4480bf7002df382ab888d880131c2", null ], [ "$_aliases", "classep___s_p___osoba.html#ab4e31d75f0bc5d512456911e5d01366b", null ], [ "$_field_init_lookup", "classep___s_p___osoba.html#a4a4d54ae35428077a7c61ec8a5139af3", null ], [ "$_stanowiska", "classep___s_p___osoba.html#a1192326d568efecaa005d7af560a56aa", null ] ];
ApacheSourceCode/kylin
core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/CuboidBenefitModel.java
<reponame>ApacheSourceCode/kylin /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.cube.cuboid.algorithm; public class CuboidBenefitModel { private CuboidModel cuboidModel; private BenefitModel benefitModel; public CuboidBenefitModel(CuboidModel cuboidModel, BenefitModel benefitModel) { this.cuboidModel = cuboidModel; this.benefitModel = benefitModel; } public void reset(CuboidModel cuboidModel, BenefitModel benefitModel) { this.cuboidModel = cuboidModel; this.benefitModel = benefitModel; } public Long getCuboidId() { return cuboidModel == null ? null : cuboidModel.cuboidId; } public Double getBenefit() { return benefitModel == null ? null : benefitModel.benefit; } @Override public String toString() { return "CuboidBenefitModel [cuboidModel=" + cuboidModel + ", benefitModel=" + benefitModel + "]"; } public static class CuboidModel { public final long cuboidId; public final long recordCount; public final double spaceSize; public final double hitProbability; public final long scanCount; public CuboidModel(long cuboId, long recordCount, double spaceSize, double hitProbability, long scanCount) { this.cuboidId = cuboId; this.recordCount = recordCount; this.spaceSize = spaceSize; this.hitProbability = hitProbability; this.scanCount = scanCount; } @Override public String toString() { return "CuboidModel [cuboidId=" + cuboidId + ", recordCount=" + recordCount + ", spaceSize=" + spaceSize + ", hitProbability=" + hitProbability + ", scanCount=" + scanCount + "]"; } } public static class BenefitModel { public final double benefit; public final int benefitCount; public BenefitModel(double benefit, int benefitCount) { this.benefit = benefit; this.benefitCount = benefitCount; } @Override public String toString() { return "BenefitModel [benefit=" + benefit + ", benefitCount=" + benefitCount + "]"; } } }
ismo-karkkainen/datalackey
src/StringValueMapper.cpp
<gh_stars>0 // // StringValueMapper.cpp // datalackey // // Created by <NAME> on 23.10.17. // Copyright © 2017 <NAME>. All rights reserved. // // Licensed under Universal Permissive License. See License.txt. #include "StringValueMapper.hpp" StringValueMapper::~StringValueMapper() { }
AlexanderWert/kibana
x-pack/test/api_integration/apis/management/index_management/templates.helpers.js
<filename>x-pack/test/api_integration/apis/management/index_management/templates.helpers.js<gh_stars>1-10 /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ import { API_BASE_PATH, INDEX_PATTERNS } from './constants'; export const registerHelpers = ({ supertest }) => { let templatesCreated = []; const getAllTemplates = () => supertest.get(`${API_BASE_PATH}/index_templates`); const getOneTemplate = (name, isLegacy = false) => supertest.get(`${API_BASE_PATH}/index_templates/${name}?legacy=${isLegacy}`); const getTemplatePayload = ( name, indexPatterns = INDEX_PATTERNS, isLegacy = false, type = 'default' ) => { const baseTemplate = { name, indexPatterns, version: 1, template: { settings: { number_of_shards: 1, index: { lifecycle: { name: 'my_policy', }, }, }, mappings: { _source: { enabled: false, }, properties: { host_name: { type: 'keyword', }, created_at: { type: 'date', format: 'EEE MMM dd HH:mm:ss Z yyyy', }, }, }, aliases: { alias1: {}, }, }, _kbnMeta: { isLegacy, type, }, }; if (isLegacy) { baseTemplate.order = 1; } else { baseTemplate.priority = 1; } return baseTemplate; }; const createTemplate = (template) => { templatesCreated.push({ name: template.name, isLegacy: template._kbnMeta.isLegacy }); return supertest.post(`${API_BASE_PATH}/index_templates`).set('kbn-xsrf', 'xxx').send(template); }; const deleteTemplates = (templates) => supertest .post(`${API_BASE_PATH}/delete_index_templates`) .set('kbn-xsrf', 'xxx') .send({ templates }); const updateTemplate = (payload, templateName) => supertest .put(`${API_BASE_PATH}/index_templates/${templateName}`) .set('kbn-xsrf', 'xxx') .send(payload); // Delete all templates created during tests const cleanUpTemplates = async () => { try { await deleteTemplates(templatesCreated); templatesCreated = []; } catch (e) { // Silently swallow errors } }; return { getAllTemplates, getOneTemplate, getTemplatePayload, createTemplate, updateTemplate, deleteTemplates, cleanUpTemplates, }; };
iootclab/openjdk
openjdk11/test/jdk/sun/net/www/http/KeepAliveStream/KeepAliveStreamCloseWithWrongContentLength.java
/* * Copyright (c) 2002, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 4533243 * @summary Closing a keep alive stream gives NullPointerException * @run main/othervm/timeout=30 KeepAliveStreamCloseWithWrongContentLength */ import java.net.*; import java.io.*; public class KeepAliveStreamCloseWithWrongContentLength { static class XServer extends Thread { ServerSocket srv; Socket s; InputStream is; OutputStream os; XServer (ServerSocket s) { srv = s; } public void run() { try { s = srv.accept (); // read HTTP request from client InputStream is = s.getInputStream(); // read the first ten bytes for (int i=0; i<10; i++) { is.read(); } OutputStreamWriter ow = new OutputStreamWriter((os = s.getOutputStream())); ow.write("HTTP/1.0 200 OK\n"); // Note: The client expects 10 bytes. ow.write("Content-Length: 10\n"); ow.write("Content-Type: text/html\n"); // Note: If this line is missing, everything works fine. ow.write("Connection: Keep-Alive\n"); ow.write("\n"); // Note: The (buggy) server only sends 9 bytes. ow.write("123456789"); ow.flush(); } catch (Exception e) { } finally { try {if (os != null) { os.close(); }} catch (IOException e) {} } } } public static void main (String[] args) throws Exception { ServerSocket serversocket = new ServerSocket (0); try { int port = serversocket.getLocalPort (); XServer server = new XServer (serversocket); server.start (); URL url = new URL ("http://localhost:"+port); HttpURLConnection urlc = (HttpURLConnection)url.openConnection (); InputStream is = urlc.getInputStream (); int c = 0; while (c != -1) { try { c=is.read(); } catch (IOException ioe) { is.read (); break; } } is.close(); } catch (IOException e) { return; } catch (NullPointerException e) { throw new RuntimeException (e); } finally { if (serversocket != null) serversocket.close(); } } }
nelsonalayon/onvacationturismo
src/components/formulario.js
<reponame>nelsonalayon/onvacationturismo import React from 'react' // todos los campos de texto de este formulario son controlados, lo que significa que los datos que se guardan en state // no se repiten en los campos de texto sino que ustan la misma información (this.state.nombredelcampo). Si el state no // se inicializa el navegador va a dar error porque el navegador no va a poder encontrar la información class Formulario extends React.Component { state = {} // handleChange = (e) =>{ // this.setState({[e.target.name]: e.target.value}) // console.log({ // name:e.target.name, // value:e.target.value})} handleClick = () => { console.log("boton apretado") } // handleSubmit = (e) => { // e.preventDefault() // console.log("form was submited") // console.log(this.state) // } render() { return( <div className="container"> <form onSubmit= {this.props.onSubmit}> <label > Nombre completo</label> <input className= "entrada" onChange = {this.props.onChange} name = "nombre" type= "text" value={this.props.formValues.nombre}> </input> <label >Ciudad de residencia</label> <input className= "entrada" onChange = {this.props.onChange} name = "Ciudad" type= "text" value={this.props.formValues.Ciudad} > </input> <label >Dirección</label> <input className= "entrada" onChange = {this.props.onChange} name = "Direccion" type= "text" value={this.props.formValues.Direccion} > </input> <label >Teléfono</label> <input className= "entrada" onChange = {this.props.onChange} name = "Telefono" type= "text" value={this.props.formValues.Telefono} > </input> <button onClick = {this.handleClick} >sdfasd</button> <p>Cuando edites y deselecciones este control, el color de fondo de su contenedor cambiará: <input type="text"/></p> </form> </div> ) } } export default Formulario
betfair/opentsp
cmd/collect-statse/aggregator/snapshot.go
// Copyright 2014 The Sporting Exchange Limited. All rights reserved. // Use of this source code is governed by a free license that can be // found in the LICENSE file. // Package aggregator creates data points based on statse messages. package aggregator import ( "fmt" "log" "sort" "strings" "time" "opentsp.org/cmd/collect-statse/statse" "opentsp.org/internal/tsdb" ) type snapshotJob struct { Time time.Time Store *store Output []*tsdb.Point } func (job *snapshotJob) do() { byCluster := make(map[key]*entry) // Emit byhost stats. job.Store.Do(func(key key, host *entry) { // Update the synthetic cluster entry using the host entry. clusterKey := key clusterKey.Host = "" cluster := byCluster[clusterKey] if cluster == nil { cluster = newEntry() byCluster[clusterKey] = cluster } cluster.CountError += host.CountError cluster.CountOkay += host.CountOkay for key, values := range host.Buffer { for _, v := range values { cluster.Buffer[key].Append(v) } } // Process the host entry. for _, stat := range calc(host) { job.emitf(stat.Value, "%s.byhost.%s %s host=%s", key.Metric, stat.Name, key.Tags, key.Host) for i := range host.Buffer { host.Buffer[i].Reset() } } }) // Emit bycluster stats. for key, cluster := range byCluster { for _, stat := range calc(cluster) { job.emitf(stat.Value, "%s.%s %s host=NA", key.Metric, stat.Name, key.Tags) } } } // emitf emits a data point. func (job *snapshotJob) emitf(value interface{}, format string, arg ...interface{}) { series := fmt.Sprintf(format, arg...) id := strings.Fields(strings.Replace(series, "=", " ", -1)) point, err := tsdb.NewPoint(job.Time, value, id[0], id[1:]...) if err != nil { log.Printf("aggregator: %v", err) return } job.Output = append(job.Output, point) } // calc calculates stats based on the provided entry. func calc(entry *entry) []stat { s := make([]stat, 0, 2+statse.MaxKeys*5) s = append(s, stat{"count error=false", entry.CountOkay}) s = append(s, stat{"count error=true", entry.CountError}) for i, values := range entry.Buffer { if values == nil { // Never updated. continue } if len(values) == 0 { // Updated but not in this cycle. // Put in dummy zero value. values = []float32{0} } sort.Sort(ascending(values)) statseKey := statse.Key(i).String() s = append(s, stat{statseKey + ".min", min(values...)}) s = append(s, stat{statseKey + ".avg", avg(values...)}) s = append(s, stat{statseKey + ".p95", p95(values...)}) s = append(s, stat{statseKey + ".p99", p99(values...)}) s = append(s, stat{statseKey + ".max", max(values...)}) } return s } func min(a ...float32) float32 { return a[0] } func avg(a ...float32) float32 { return sum(a...) / float32(len(a)) } func p95(a ...float32) float32 { return a[95*len(a)/100] } func p99(a ...float32) float32 { return a[99*len(a)/100] } func max(a ...float32) float32 { return a[len(a)-1] } func sum(a ...float32) float32 { var sum float32 for i := range a { sum += a[i] } return sum } // stat represents a statistic based on a buffer. type stat struct { Name string Value interface{} } type ascending []float32 func (a ascending) Len() int { return len(a) } func (a ascending) Swap(i, j int) { a[i], a[j] = a[j], a[i] } func (a ascending) Less(i, j int) bool { return a[i] < a[j] }
tadhglewis/manager
packages/manager/apps/dedicated/client/app/dedicated/server/servers/servers.controller.js
import get from 'lodash/get'; import reduce from 'lodash/reduce'; import set from 'lodash/set'; import snakeCase from 'lodash/snakeCase'; export default class ServersCtrl { /* @ngInject */ constructor( $q, $translate, ouiDatagridService, ) { this.$q = $q; this.$translate = $translate; this.ouiDatagridService = ouiDatagridService; } $onInit() { this.criteria = JSON.parse(this.filter).map((criteria) => ({ property: get(criteria, 'field') || 'name', operator: get(criteria, 'comparator'), value: criteria.reference[0], })); this.stateEnumFilter = this.getEnumFilter(this.serverStateEnum, 'server_configuration_state_'); this.datacenterEnumFilter = this.getEnumFilter(this.datacenterEnum, 'server_datacenter_'); this.columnsConfig = [ { name: 'name', sortable: this.getSorting('name') }, { name: 'reverse', sortable: this.getSorting('reverse') }, { name: 'commercialRange', sortable: this.getSorting('commercialRange') }, { name: 'datacenter', sortable: this.getSorting('datacenter') }, { name: 'state', sortable: this.getSorting('state') }, ]; } static toUpperSnakeCase(str) { return snakeCase(str).toUpperCase(); } getEnumFilter(list, translationPrefix) { return { values: reduce( list, (result, item) => ({ ...result, [item]: this.$translate.instant(`${translationPrefix}${this.constructor.toUpperSnakeCase(item)}`), }), {}, ), }; } getSorting(property) { return this.sort === property ? this.sortOrder.toLowerCase() : ''; } loadServers() { const currentOffset = this.paginationNumber * this.paginationSize; set(this.ouiDatagridService, 'datagrids.dg-servers.paging.offset', currentOffset < this.paginationTotalCount ? currentOffset : this.paginationTotalCount); return this.$q.resolve({ data: get(this.dedicatedServers, 'data'), meta: { totalCount: this.paginationTotalCount, }, }); } onPageChange({ pageSize, offset }) { this.onListParamsChange({ page: parseInt(offset / pageSize, 10) + 1, pageSize, }); } onCriteriaChange($criteria) { const filter = $criteria.map((criteria) => ({ field: get(criteria, 'property') || 'name', comparator: criteria.operator, reference: [criteria.value], })); this.onListParamsChange({ filter: JSON.stringify(filter), }); } onSortChange({ name, order }) { this.onListParamsChange({ sort: name, sortOrder: order, }); } }
zandegran/java-api-sdk
smartling-jobs-api/src/main/java/com/smartling/api/jobs/v3/pto/TranslationJobCustomFieldPTO.java
<gh_stars>1-10 package com.smartling.api.jobs.v3.pto; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Data @NoArgsConstructor @AllArgsConstructor @Builder public class TranslationJobCustomFieldPTO { private String fieldUid; private String fieldName; private String fieldValue; }
jooyesle/jooyesle.github.io
poseMatch/js/monitor.js
<filename>poseMatch/js/monitor.js 'use strict'; class MonitoringData { constructor() {} toString() { return null; } } class SystemMonitoringData extends MonitoringData { constructor() { super(); this.cpuUsage = 0; this.memoryUsage = 0; } toString() { return 'mem:' + this.memoryUsage.toFixed(2) + ' MB'; } } class StreamMonitoringData extends MonitoringData { constructor() { super(); this.videoWidth = 0; this.videoHeight = 0; this.fps = 0; this.videoJitter = 0; this.audioJitter = 0; } toString() { if (this.audioJitter == 0) { return ( ' res:' + this.videoWidth + 'x' + this.videoHeight + ' fps:' + this.fps ); } return ( ' res:' + this.videoWidth + 'x' + this.videoHeight + ' fps:' + this.fps + ' jitter:' + this.audioJitter ); } } class BaseMonitor { constructor(canvasId, videoId, isRemote) { this.canvas = document.querySelector('#' + canvasId); this.video = document.querySelector('#' + videoId); this.canvasId = canvasId; this.isRemote = isRemote; this.textAlign = 'left'; this.data = null; this.x = 0; this.y = 15; } drawData() { if (this.canvas == null) { console.log('draw failed'); return; } var ctx = this.canvas.getContext('2d'); ctx.clearRect(0, 0, this.canvas.width, this.canvas.height); ctx.font = 'bold 12px Courier'; ctx.fillStyle = 'red'; ctx.textAlign = this.textAlign; ctx.fillText(this.data.toString(), this.x, this.y); } clearData() { var ctx = this.canvas.getContext('2d'); ctx.clearRect(0, 0, this.canvas.width, this.canvas.height); } monitoring() { return; } } class SystemMonitor extends BaseMonitor { constructor(canvasId, videoId) { super(canvasId, videoId, false); this.textAlign = 'right'; this.x = 315; this.y = 15; this.data = new SystemMonitoringData(); } monitoring() { this.monitoringMemoryUsage(); } async monitoringMemoryUsage() { var func = null; if (performance.measureMemory) { func = function () { return performance.measureMemory(); }; } else if (performance.measureUserAgentSpecificMemory) { func = function () { return performance.measureUserAgentSpecificMemory(); }; } else { console.log('not supported measureMemory'); return; } let result; try { result = await func(); this.data.memoryUsage = result.bytes / (1024 * 1024); console.log('mem:', this.data.memoryUsage); } catch (error) { if ( error instanceof DOMException && error.name === 'SecurityError' ) { console.log('The context is not secure.'); } else { throw error; } } } } class StreamMonitor extends BaseMonitor { constructor(canvasId, videoId, peerConnection, isRemote) { super(canvasId, videoId, isRemote); console.log('stream monitor id:', canvasId); this.senders = peerConnection.getSenders(); this.receivers = peerConnection.getReceivers(); this.data = new StreamMonitoringData(); } monitoring() { if (this.isRemote) { this.monitoringReceiverStats(); } else { this.monitoringSenderStats(); } } updateData(stats) { for (let report of stats.values()) { if (report.type != 'inbound-rtp' && report.type != 'outbound-rtp') continue; //console.log(report); if ( report.id.indexOf('RTCInboundRTPVideoStream') >= 0 || report.id.indexOf('RTCOutboundRTPVideoStream') >= 0 ) { if (report.frameWidth != null) { this.data.videoWidth = report.frameWidth; this.data.videoHeight = report.frameHeight; this.data.fps = report.framesPerSecond; } } else if ( report.id.indexOf('RTCInboundRTPAudioStream') >= 0 || report.id.indexOf('RTCOutboundRTPAudioStream') >= 0 ) { if (report.jitter != null) this.data.audioJitter = report.jitter; } } } async monitoringReceiverStats() { this.receivers.forEach(async (receiver) => { let stats = await receiver.getStats(); this.updateData(stats); }); } async monitoringSenderStats() { this.senders.forEach(async (sender) => { let stats = await sender.getStats(); this.updateData(stats); }); } } async function monitoringStart(monitor) { if (monitor.enableMonitor) { monitor.monitoring(); setTimeout(monitoringStart, 1000, monitor); } } var gMonitor = null; class Monitor { constructor() { console.log('new Monitor!!'); this.monitors = []; this.enableMonitor = false; } async start() { console.log('start monitor'); this.enableMonitor = true; monitoringStart(this); } async stop() { console.log('stop monitor'); this.enableMonitor = false; this.monitors.forEach((monitor) => { monitor.clearData(); }); } addSystemMonitor(canvasId, videoId) { console.log('add system monitor:', canvasId, videoId); this.monitors.push(new SystemMonitor(canvasId, videoId)); } addStreamMonitor(canvasId, videoId, peerConnection, isRemote) { console.log('add stream monitor:', canvasId); this.monitors.push( new StreamMonitor(canvasId, videoId, peerConnection, isRemote) ); } removeStreamMonitor(canvasId) { let index = this.monitors.findIndex( (monitor) => monitor.canvasId == canvasId ); if (index >= 0) { console.log( 'remove monitor:', this.monitors[index].canvasId, index ); this.monitors.splice(index, 1); } } async monitoring() { console.log('video monitors:', this.monitors.length); this.monitors.forEach(function (monitor) { monitor.monitoring(); monitor.drawData(); }); } static getMonitor() { if (gMonitor == null) gMonitor = new Monitor(); return gMonitor; } static onStateChanged(...args) { let type = args[0]; console.log(args); if (type === 'connected') { let canvasId = args[1]; let videoId = args[2]; let peerConnection = args[3]; if (canvasId == 'remotemonitor1') { Monitor.getMonitor().addStreamMonitor( 'localmonitor', 'localvideo', peerConnection, false ); } Monitor.getMonitor().addStreamMonitor( canvasId, videoId, peerConnection, true ); } else if (type === 'disconnected') { let canvasId = args[1]; Monitor.getMonitor().removeStreamMonitor(canvasId); } } }
wongy91/cryptoexchange
lib/cryptoexchange/exchanges/therocktrading/market.rb
<gh_stars>0 module Cryptoexchange::Exchanges module Therocktrading class Market NAME = 'therocktrading' API_URL = 'https://www.therocktrading.com/api' end end end
DastanIqbal/DastanLib
dastanlib/src/main/java/com/dastanapps/dastanlib/views/captcha/TextCaptcha.java
<gh_stars>0 package com.dastanapps.dastanlib.views.captcha; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Canvas; import android.graphics.LinearGradient; import android.graphics.Paint; import android.graphics.Shader; import java.io.CharArrayWriter; import java.util.ArrayList; import java.util.Random; public class TextCaptcha extends Captcha { protected TextOptions options; private int wordLength; public enum TextOptions { UPPERCASE_ONLY, LOWERCASE_ONLY, NUMBERS_ONLY, LETTERS_ONLY, NUMBERS_AND_LETTERS } public TextCaptcha(int wordLength, TextOptions opt) { init(0, 0, wordLength, opt); } public TextCaptcha(int width, int height, int wordLength, TextOptions opt) { init(width, height, wordLength, opt); } private void init(int width, int height, int wordLength, TextOptions opt) { setHeight(height); setWidth(width); this.options = opt; usedColors = new ArrayList<Integer>(); this.wordLength = wordLength; this.image = image(); } @Override protected Bitmap image() { LinearGradient gradient = new LinearGradient(0, 0, getWidth() / this.wordLength, getHeight() / 2, color(), color(), Shader.TileMode.MIRROR); Paint p = new Paint(); p.setDither(true); p.setShader(gradient); Bitmap bitmap = Bitmap.createBitmap(getWidth(), getHeight(), Config.ARGB_8888); Canvas c = new Canvas(bitmap); c.drawRect(0, 0, getWidth(), getHeight(), p); Paint tp = new Paint(); tp.setDither(true); tp.setTextSize(getWidth() / getHeight() * 20); Random r = new Random(System.currentTimeMillis()); CharArrayWriter cab = new CharArrayWriter(); this.answer = ""; for (int i = 0; i < this.wordLength; i++) { int u_l_n = r.nextInt(3); char ch = ' '; switch (u_l_n) { //UpperCase case 0: ch = (char) (r.nextInt(91 - 65) + (65)); break; //LowerCase case 1: ch = (char) (r.nextInt(123 - 97) + (97)); break; //Numbers case 2: ch = (char) (r.nextInt(58 - 49) + (49)); break; } cab.append(ch); this.answer += ch; } char[] data = cab.toCharArray(); for (int i = 0; i < data.length; i++) { this.x += (30 - (3 * this.wordLength)) + (Math.abs(r.nextInt()) % (65 - (1.2 * this.wordLength))); this.y = 50 + Math.abs(r.nextInt()) % 50; Canvas cc = new Canvas(bitmap); tp.setTextSkewX(r.nextFloat() - r.nextFloat()); tp.setColor(color()); cc.drawText(data, i, 1, this.x, this.y, tp); tp.setTextSkewX(0); } return bitmap; } }
kevinpetersavage/biodata
biodata-models/src/main/java/org/opencb/biodata/models/pedigree/Family.java
/* * <!-- * ~ Copyright 2015-2017 OpenCB * ~ * ~ Licensed under the Apache License, Version 2.0 (the "License"); * ~ you may not use this file except in compliance with the License. * ~ You may obtain a copy of the License at * ~ * ~ http://www.apache.org/licenses/LICENSE-2.0 * ~ * ~ Unless required by applicable law or agreed to in writing, software * ~ distributed under the License is distributed on an "AS IS" BASIS, * ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * ~ See the License for the specific language governing permissions and * ~ limitations under the License. * --> * */ package org.opencb.biodata.models.pedigree; import java.util.Set; import java.util.TreeSet; /** * @author <NAME> &lt;<EMAIL>&gt; */ public class Family { private Individual father; private Individual mother; private Set<Individual> children; public Family(Individual father, Individual mother) { this.father = father; this.mother = mother; this.children = new TreeSet<>(); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Family)) return false; Family family = (Family) o; return this.father.getId().equals(family.getFather().getId()) && this.mother.getId().equals(family.getMother().getId()); } @Override public int hashCode() { int result = father != null ? father.hashCode() : 0; result = 31 * result + (mother != null ? mother.hashCode() : 0); result = 31 * result + (children != null ? children.hashCode() : 0); return result; } public Individual getFather() { return father; } public void setFather(Individual father) { this.father = father; } public Individual getMother() { return mother; } public void setMother(Individual mother) { this.mother = mother; } public void addChild(Individual ind) { this.children.add(ind); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("father="); sb.append(father.getId()); sb.append(", mother="); sb.append(mother.getId()); if (children.size() > 0) { sb.append(", children=["); for (Individual ind : children) { sb.append(ind.getId() + " "); } sb.append("]"); } sb.append("}\n"); return sb.toString(); } }
zheng19851/pipeline-engine
pipeline-engine-api/src/main/java/com/runssnail/pipeline/api/BaseFactory.java
package com.runssnail.pipeline.api; import com.runssnail.pipeline.api.terminate.DefaultTerminateStrategyFactory; import com.runssnail.pipeline.api.terminate.TerminateStrategyFactory; /** * @author zhengwei * Created on 2020-09-12 */ public abstract class BaseFactory implements Lifecycle { /** * 中断策略工厂 */ protected TerminateStrategyFactory terminateStrategyFactory; @Override public void init() { this.initTerminateStrategyFactory(); } @Override public void close() { } protected void initTerminateStrategyFactory() { if (this.terminateStrategyFactory == null) { this.terminateStrategyFactory = new DefaultTerminateStrategyFactory(); } } public TerminateStrategyFactory getTerminateStrategyFactory() { return terminateStrategyFactory; } public void setTerminateStrategyFactory(TerminateStrategyFactory terminateStrategyFactory) { this.terminateStrategyFactory = terminateStrategyFactory; } }
UjalaJha/DataStructures
S9_heap_sort.c
#include<stdio.h> void max_heapify(int a[], int i, int heapsize) { int tmp, largest; int l = (2 * i) + 1; int r = (2 * i) + 2; if ((l <= heapsize) && (a[l] > a[i])) largest = l; else largest = i; if ((r <= heapsize) && (a[r] > a[largest])) largest = r ; if (largest != i) { tmp = a[i]; a[i] = a[largest]; a[largest] = tmp; max_heapify(a, largest, heapsize); } } void build_max_heap(int a[], int heapsize) { int i; for (i = heapsize/2; i >= 0; i--) { max_heapify(a, i, heapsize); } } void heapsort(int a[], int heapsize) { int i, tmp; build_max_heap(a, heapsize); for (i = heapsize; i > 0; i--) { tmp = a[i]; a[i] = a[0]; a[0] = tmp; heapsize--; max_heapify(a, 0, heapsize); } } int main() { int i,n,arr[100]; printf("enter how many number : "); scanf("%d",&n); printf("enter number : "); for(i=0; i<n; i++) { scanf("%d",&arr[i]); } int heapsize=n-1; heapsort(arr,heapsize); printf("Sorted array is : "); for(i=0; i<n; i++) { printf("\t %d",arr[i]); } }
suomenriistakeskus/oma-riista-web
src/main/java/fi/riista/feature/gamediary/GameSpeciesRepository.java
<reponame>suomenriistakeskus/oma-riista-web<gh_stars>10-100 package fi.riista.feature.gamediary; import fi.riista.feature.common.repository.BaseRepository; import java.util.List; import java.util.Optional; public interface GameSpeciesRepository extends BaseRepository<GameSpecies, Long> { Optional<GameSpecies> findByOfficialCode(int officialCode); List<GameSpecies> findByOfficialCodeIn(List<Integer> officialCodes); List<GameSpecies> findBySrvaOrdinalNotNullOrderBySrvaOrdinal(); List<GameSpecies> findAllByOfficialCodeIn(List<Integer> codes); }
bek0s/gbkf
src/gbkfit/utils/miscutils.py
import importlib.abc import importlib.util import os import pathlib def get_attr_from_file(file, attr): module_name = pathlib.Path(file).stem module_spec = importlib.util.spec_from_file_location(module_name, file) module = importlib.util.module_from_spec(module_spec) assert isinstance(module_spec.loader, importlib.abc.Loader) module_spec.loader.exec_module(module) return getattr(module, attr) def merge_lists_and_make_mappings( list_list, prefix, zero_prefix=False, zero_index=False): list_merged = list() list_mappings = list() for i, item in enumerate(list_list): list_mappings.append(dict()) for old_name in item: full_prefix = '' if bool(i) or zero_prefix: full_prefix += prefix if bool(i) or zero_index: full_prefix += str(i) if full_prefix: full_prefix += '_' new_name = f'{full_prefix}{old_name}' list_mappings[i][old_name] = new_name list_merged.append(new_name) return list_merged, list_mappings def merge_dicts_and_make_mappings( dict_list, prefix, zero_prefix=False, zero_index=False): dict_merged = dict() dict_mappings = list() for i, item in enumerate(dict_list): dict_mappings.append(dict()) for old_name, value in item.items(): full_prefix = '' if bool(i) or zero_prefix: full_prefix += prefix if bool(i) or zero_index: full_prefix += str(i) if full_prefix: full_prefix += '_' new_name = f'{full_prefix}{old_name}' dict_mappings[i][old_name] = new_name dict_merged[new_name] = value return dict_merged, dict_mappings def to_native_byteorder(arr): return arr if arr.dtype.isnative else arr.byteswap().newbyteorder() def make_unique_path(path): i = 0 base = path while os.path.exists(path): i += 1 path = f'{base}_{i}' return path
saurabhpro/Life-coding
src/data_structure/graph/bfs/MinimumHeightTree.java
package data_structure.graph.bfs; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.lang.reflect.Array; import java.util.*; public class MinimumHeightTree { MinimumHeightTree mht; List<Integer>[] g; @BeforeEach public void init() { mht = new MinimumHeightTree(); } @Test public void firstTest() { int[][] edges = new int[][]{ {1, 0}, {1, 2}, {1, 3} }; int n = 4; mht.findMinHeightTrees(n,edges); System.out.println(g); Assertions.assertEquals(2, 2); } @Test public void secondTest() { int[][] edges = new int[][]{ {3,0},{3,1},{3,2},{3,4},{5,4} }; int n = 6; mht.findMinHeightTrees(n,edges); System.out.println(g); Assertions.assertEquals(2, 2); } @Test public void thirdTest() { int[][] edges = new int[][]{ }; int n = 1; mht.findMinHeightTrees(n,edges); System.out.println(g); Assertions.assertEquals(2, 2); } @Test public void fourthTest() { int[][] edges = new int[][]{ {0,1} }; int n = 2; mht.findMinHeightTrees(n,edges); System.out.println(g); Assertions.assertEquals(2, 2); } public List<Integer> findMinHeightTrees(int n, int[][] edges) { g = new ArrayList[n]; g = buildGraph(g, edges, n); int[] height = new int[n]; Queue<Integer> q; for (int i = 0; i < n; i++) { q = new LinkedList<>(); q.add(i); int h = findMinHeight(i, q,0,new boolean[n],height); System.out.println("node: "+i); System.out.println("height: "+h); height[i]=h; } TreeMap<Integer,ArrayList<Integer>> map=new TreeMap(); for(int i=0;i<height.length;i++){ map.compute(height[i],(k,v)->v==null?new ArrayList<>():v).add(i); } List<Integer> res=map.entrySet().iterator().next().getValue(); return res; } private int findMinHeight(int root, Queue<Integer> q, int len, boolean[] visited, int[] height) { while (!q.isEmpty()){ int size=q.size(); for(int i=0;i<size;i++){ int r=q.poll(); visited[r]=true; for(int node:g[r]){ if(!visited[node]) q.add(node); } } len+=1; } return len-1; } private List<Integer>[] buildGraph(List<Integer>[] g, int[][] edges, int n) { g = new ArrayList[n]; for (int i = 0; i < n; i++) { g[i] = new ArrayList<>(); } for (int i = 0; i < edges.length; i++) { int[] e = edges[i]; int x = e[0]; int y = e[1]; g[x].add(y); g[y].add(x); } return g; } }
fopnet/greedyAlgorithms
src/main/java/greedyAlgorithms/Java8/TesteJava8.java
<gh_stars>0 package greedyAlgorithms.Java8; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; class TesteJava8 { static List<String> places = new ArrayList<>(); // preparing our data public static List<String> getPlaces(){ places.add("Portugal, Lisboa"); places.add("Portugal, Porto"); places.add("India, Delhi"); places.add("USA, New York"); places.add("Africa, Nigeria"); return places; } public static void main( String[] args ) { List<String> myPlaces = getPlaces(); System.out.println("Places from Portugal:"); // ADD YOUR CODE HERE System.out.println(myPlaces.stream().filter(s -> s.toLowerCase().indexOf("portugal") >=0).collect(Collectors.toList())); myPlaces.stream().filter(s -> s.toUpperCase().contains("PORTUGAL")).map(String::toUpperCase).forEach(System.out::println); // Piloto mesmoPiloto = this.pilotos.stream().filter(p -> p.equals(v.getPiloto())).findFirst().get(); // this.voltas.stream().max(Comparator.comparing(Volta::getHora)).get(); /** * IntStream.range(0, pilotoList.size()).mapToObj(idx -> { Piloto p = pilotoList.get(idx); p.setPosicaoChegada(idx + 1); p.calcularTempoDecorridoDoVencedor(vencedor); return p; }).collect(Collectors.toList()); */ /** static public Function<String, Volta> fromString = (line) -> { String[] data = line.split("(\\s{2,})");// a CSV has comma separated lines Volta v = new Volta(); v.setHora(LocalTime.parse(data[0])); v.setPiloto(new Piloto(data[1])); v.setId(Integer.parseInt(data[2])); v.setTempo(LocalTime.parse("0:".concat(data[3]), DateTimeFormatter.ofPattern("H[H]:m:ss.SSS"))); v.setVelocidadeMedia(Double.parseDouble(data[4].replace(",", "."))); return v; }; private static List<Volta> streamToList(Stream<String> stream) { return stream.skip(1).map(Volta.fromString).collect(Collectors.toList()); } */ /** /** * Retorna o piloto vencedor da corrida que completou 4 voltas public Piloto getVencedor() { return this.voltas.stream() .filter(v -> v.getPiloto().getNumeroVoltasCompletas() == this.numeroVoltasCorrida) .min(Comparator.comparing(Volta::getHora)) .get() .getPiloto(); } */ } }
giancastro/spatialpandas
spatialpandas/geometry/multipoint.py
import numpy as np from dask.dataframe.extensions import make_array_nonempty from pandas.core.dtypes.dtypes import register_extension_dtype from ..geometry._algorithms.intersection import multipoints_intersect_bounds from ..geometry.base import GeometryDtype from ..geometry.baselist import GeometryList, GeometryListArray @register_extension_dtype class MultiPointDtype(GeometryDtype): _geometry_name = 'multipoint' @classmethod def construct_array_type(cls, *args): if len(args) > 0: raise NotImplementedError("construct_array_type does not support arguments") return MultiPointArray class MultiPoint(GeometryList): _nesting_levels = 0 @classmethod def construct_array_type(cls): return MultiPointArray @classmethod def _shapely_to_coordinates(cls, shape): import shapely.geometry as sg if isinstance(shape, (sg.Point, sg.MultiPoint)): # Single line return np.asarray(shape.ctypes) else: raise ValueError(""" Received invalid value of type {typ}. Must be an instance of Point, or MultiPoint""".format(typ=type(shape).__name__)) def to_shapely(self): """ Convert to shapely shape Returns: shapely MultiPoint shape """ import shapely.geometry as sg point_coords = np.array(self.data.as_py(), dtype=self.numpy_dtype) return sg.MultiPoint(point_coords.reshape(len(point_coords) // 2, 2)) @classmethod def from_shapely(cls, shape): """ Build a spatialpandas MultiPoint object from a shapely shape Args: shape: A shapely MultiPoint or Point shape Returns: spatialpandas MultiPoint """ return super().from_shapely(shape) @property def length(self): return 0.0 @property def area(self): return 0.0 def intersects_bounds(self, bounds): x0, y0, x1, y1 = bounds result = np.zeros(1, dtype=np.bool_) offsets = self.buffer_outer_offsets multipoints_intersect_bounds( float(x0), float(y0), float(x1), float(y1), self.buffer_values, offsets[:-1], offsets[1:], result ) return result[0] class MultiPointArray(GeometryListArray): _element_type = MultiPoint _nesting_levels = 1 @property def _dtype_class(self): return MultiPointDtype @classmethod def from_geopandas(cls, ga): """ Build a spatialpandas MultiPointArray from a geopandas GeometryArray or GeoSeries. Args: ga: A geopandas GeometryArray or GeoSeries of MultiPoint or Point shapes. Returns: MultiPointArray """ return super().from_geopandas(ga) @property def length(self): return np.zeros(len(self), dtype=np.float64) @property def area(self): return np.zeros(len(self), dtype=np.float64) def intersects_bounds(self, bounds, inds=None): x0, y0, x1, y1 = bounds offsets0 = self.buffer_outer_offsets start_offsets0 = offsets0[:-1] stop_offsets0 = offsets0[1:] if inds is not None: start_offsets0 = start_offsets0[inds] stop_offsets0 = stop_offsets0[inds] result = np.zeros(len(start_offsets0), dtype=np.bool_) multipoints_intersect_bounds( float(x0), float(y0), float(x1), float(y1), self.buffer_values, start_offsets0, stop_offsets0, result ) return result def _multi_points_array_non_empty(dtype): """ Create an example length 2 array to register with Dask. See https://docs.dask.org/en/latest/dataframe-extend.html#extension-arrays """ return MultiPointArray([ [1, 0, 1, 1], [1, 2, 0, 0] ], dtype=dtype) if make_array_nonempty: make_array_nonempty.register(MultiPointDtype)(_multi_points_array_non_empty)
S4NDER/room_quality_monitor
app/views/chart_by_device_types/_chart_by_device_type.json.jbuilder
<gh_stars>0 json.extract! chart_by_device_type, :id, :created_at, :updated_at json.url chart_by_device_type_url(chart_by_device_type, format: :json)
Phygon/aaf
examples2/axLib/AxStorageErrors.h
<reponame>Phygon/aaf<filename>examples2/axLib/AxStorageErrors.h #ifndef __AxStorageErrors_h__ #define __AxStorageErrors_h__ //=---------------------------------------------------------------------= // // $Id$ $Name$ // // The contents of this file are subject to the AAF SDK Public Source // License Agreement Version 2.0 (the "License"); You may not use this // file except in compliance with the License. The License is available // in AAFSDKPSL.TXT, or you may obtain a copy of the License from the // Advanced Media Workflow Association, Inc., or its successor. // // Software distributed under the License is distributed on an "AS IS" // basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See // the License for the specific language governing rights and limitations // under the License. Refer to Section 3.3 of the License for proper use // of this Exhibit. // // WARNING: Please contact the Advanced Media Workflow Association, // Inc., for more information about any additional licenses to // intellectual property covering the AAF Standard that may be required // to create and distribute AAF compliant products. // (http://www.amwa.tv/policies). // // Copyright Notices: // The Original Code of this file is Copyright 1998-2009, licensor of the // Advanced Media Workflow Association. All rights reserved. // //=---------------------------------------------------------------------= /* +--------------------------------------------------------------- * * Microsoft Windows * Copyright (C) Microsoft Corporation, 1992 - 1996. * * File: storage.h * * Contents: This is the main file to include to use the reference * implementation. * * Note: Some of the defintions have been changed to TCHAR * instead of WCHAR, to add support for ANSI APIs. * TCHAR becomes WCHAR with _UNICODE defined, and char * otherwise. (See tchar.h for details). * *--------------------------------------------------------------- */ // The following error code definitions were copied from // AAF/ss-impl/ref/h/storage.h The are used to build the AxHrMap. The // storage.h file is problematic to include directly. It has macro // definitions that conflict with those in AAFCOMPlatform.h, uses // undefined types, pulls in numerous other header files, etc - all of // which are unnecessary insofar as these error codes are concerned. #if !defined(OS_WINDOWS) // Pick up the MAKE_SCODE macro from AAFResult.h #include <AAFResult.h> // The following from ss-impl/ref/h/ref.hxx #define FACILITY_STORAGE 0x0003 // storage errors (STG_E_*) /****** Storage Error Codes *************************************************/ /* DOS-based error codes */ #define STG_E_INVALIDFUNCTION \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x01) #define STG_E_FILENOTFOUND \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x02) #define STG_E_PATHNOTFOUND \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x03) #define STG_E_TOOMANYOPENFILES \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x04) #define STG_E_ACCESSDENIED \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x05) #define STG_E_INVALIDHANDLE \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x06) #define STG_E_INSUFFICIENTMEMORY \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x08) #define STG_E_INVALIDPOINTER \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x09) #define STG_E_NOMOREFILES \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x12) #define STG_E_DISKISWRITEPROTECTED \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x13) #define STG_E_SEEKERROR \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x19) #define STG_E_SHAREVIOLATION \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x20) #define STG_E_WRITEFAULT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x1d) #define STG_E_READFAULT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x1e) #define STG_E_LOCKVIOLATION \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x21) #define STG_E_FILEALREADYEXISTS \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x50) #define STG_E_INVALIDPARAMETER \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x57) #define STG_E_MEDIUMFULL \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x70) #define STG_E_ABNORMALAPIEXIT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xfa) #define STG_E_INVALIDHEADER \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xfb) #define STG_E_INVALIDNAME \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xfc) #define STG_E_UNKNOWN \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xfd) #define STG_E_UNIMPLEMENTEDFUNCTION\ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xfe) #define STG_E_INVALIDFLAG \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0xff) /* Standard storage error codes */ #define STG_E_INUSE \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x100) #define STG_E_NOTCURRENT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x101) #define STG_E_REVERTED \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x102) #define STG_E_CANTSAVE \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x103) #define STG_E_OLDFORMAT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x104) #define STG_E_OLDDLL \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x105) #define STG_E_SHAREREQUIRED \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x106) #define STG_E_NOTFILEBASEDSTORAGE \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x107) #define STG_E_DOCFILECORRUPT \ MAKE_SCODE(SEVERITY_ERROR, FACILITY_STORAGE, 0x109) /* Information returns */ #define STG_S_CONVERTED \ MAKE_SCODE(SEVERITY_SUCCESS, FACILITY_STORAGE, 0x200) // endif !defined(OS_WINDOWS) #endif // These two don't seem to be defined in the Windoze header files. #define STG_S_BUFFEROVERFLOW \ MAKE_SCODE(SEVERITY_SUCCESS, FACILITY_STORAGE, 0x201) #define STG_S_TRYOVERWRITE \ MAKE_SCODE(SEVERITY_SUCCESS, FACILITY_STORAGE, 0x202) #endif
chusopr/cloudbreak
integration-test/src/main/java/com/sequenceiq/it/cloudbreak/newway/action/ProxyConfigCreateIfNotExistsAction.java
<reponame>chusopr/cloudbreak package com.sequenceiq.it.cloudbreak.newway.action; import static com.sequenceiq.it.cloudbreak.newway.log.Log.logJSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sequenceiq.it.cloudbreak.newway.CloudbreakClient; import com.sequenceiq.it.cloudbreak.newway.ProxyConfigEntity; import com.sequenceiq.it.cloudbreak.newway.context.TestContext; public class ProxyConfigCreateIfNotExistsAction implements ActionV2<ProxyConfigEntity> { private static final Logger LOGGER = LoggerFactory.getLogger(ProxyConfigCreateIfNotExistsAction.class); @Override public ProxyConfigEntity action(TestContext testContext, ProxyConfigEntity entity, CloudbreakClient client) throws Exception { LOGGER.info("Create ProxyConfig with name: {}", entity.getRequest().getName()); try { entity.setResponse( client.getCloudbreakClient().proxyConfigV3Endpoint().createInWorkspace(client.getWorkspaceId(), entity.getRequest()) ); logJSON(LOGGER, "ProxyConfig created successfully: ", entity.getRequest()); } catch (Exception e) { LOGGER.info("Cannot create ProxyConfig, fetch existed one: {}", entity.getRequest().getName()); entity.setResponse( client.getCloudbreakClient().proxyConfigV3Endpoint() .getByNameInWorkspace(client.getWorkspaceId(), entity.getRequest().getName())); } if (entity.getResponse() == null) { throw new IllegalStateException("ProxyConfig could not be created."); } return entity; } }
CristiRO/Monalisa
src/lia/Monitor/ciena/osrp/tl1/TL1Util.java
/* * $Id: TL1Util.java 7419 2013-10-16 12:56:15Z ramiro $ * * Created on Oct 26, 2007 */ package lia.Monitor.ciena.osrp.tl1; import java.io.BufferedReader; import java.io.StringReader; import java.util.ArrayList; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import lia.util.telnet.CienaTelnet; /** * Various helper funtions/ * @author ramiro * */ public class TL1Util { private static final Logger logger = Logger.getLogger(TL1Util.class.getName()); public static final String OSRP_CTAG = "osrp"; public static final String RTRV_OSRP_NODES_CMD = "rtrv-osrp-node::ALL:osrp;\n"; public static final String RTRV_OSRP_CTPS_CMD = "rtrv-osrp-ctp::ALL:osrp;\n"; public static final String RTRV_OSRP_LTPS_CMD = "rtrv-osrp-ltp::ALL:osrp;\n"; public static final String RTRV_OSRP_ROUTEMETRIC_CMD = "rtrv-osrp-routemetric::ALL:osrp;\n"; public static final String RTRV_ALL_STSPC_CMD = "rtrv-stspc:::osrp;\n"; public static final String TL1_START_CODE = "M "; public static final String TL1_END_CODE = ";"; public static final OsrpTL1Response[] getAllOsrpNodes() throws Exception { return execAndGet(RTRV_OSRP_NODES_CMD); } public static final OsrpTL1Response[] getAllOsrpLtps() throws Exception { return execAndGet(RTRV_OSRP_LTPS_CMD); } public static final OsrpTL1Response[] getAllOsrpCtps() throws Exception { return execAndGet(RTRV_OSRP_CTPS_CMD); } public static final OsrpTL1Response[] getAllSTSPCs() throws Exception { return execAndGet(RTRV_ALL_STSPC_CMD); } public static final OsrpTL1Response[] getAllRouteMetric() throws Exception { return execAndGet(RTRV_OSRP_ROUTEMETRIC_CMD); } private static final OsrpTL1Response[] execAndGet(final String tl1CMD) throws Exception { final long sTime = System.currentTimeMillis(); long dtTl1 = 0; final StringBuilder logMsg = new StringBuilder(); try { final CienaTelnet tl1Telnet = CienaTelnet.getMonitorInstance(); final StringBuilder sb = tl1Telnet.doCmd(tl1CMD, OSRP_CTAG); dtTl1 = System.currentTimeMillis() - sTime; if (sb == null) { throw new NullPointerException("Null response from TL1 Telnet interface"); } if (logger.isLoggable(Level.FINEST)) { logMsg.append("\n[ TL1Util ] [ execAndGet ] doCmd [ ").append(tl1CMD).append(" ] received:\n"); logMsg.append(sb.toString()); } BufferedReader reader = new BufferedReader(new StringReader(sb.toString())); String line = null; boolean started = false; final ArrayList osrpTL1Response = new ArrayList(); while ((line = reader.readLine()) != null) { line = line.trim(); if (!started) { if (line.startsWith(TL1_START_CODE)) { started = true; } continue; } if (line.startsWith(TL1_END_CODE)) { break; } if (!line.startsWith(OsrpTL1Response.START_END_DELIMITER)) { if (logger.isLoggable(Level.FINER)) { logMsg.append("\n Ignoring line: [").append(line) .append("] because it did not start with START_END_DELIMITER"); } continue; } final OsrpTL1Response tl1Response = OsrpTL1Response.parseLine(line); if (logger.isLoggable(Level.FINER)) { logMsg.append("\n").append("OsrpTL1Response for line: ").append(line).append(" -> ") .append(tl1Response); } osrpTL1Response.add(tl1Response); } if (logger.isLoggable(Level.FINE)) { logMsg.append("\n[ TL1Util ] [ execAndGet ] doCmd [ ").append(tl1CMD).append(" ] returning: ") .append(osrpTL1Response.size()).append(" values"); } return (OsrpTL1Response[]) osrpTL1Response.toArray(new OsrpTL1Response[osrpTL1Response.size()]); } finally { if (logger.isLoggable(Level.FINE)) { logMsg.append("\n[ TL1Util ] [ execAndGet ] doCmd [ ").append(tl1CMD).append(" ] DT wait CD/CI: ") .append(dtTl1).append(" ms Total DT: ").append(System.currentTimeMillis() - sTime) .append(" ms\n"); logger.log(Level.FINE, logMsg.toString()); } } } public static final String getStringVal(final String key, final OsrpTL1Response osrpTL1Response) { if ((osrpTL1Response == null) || (key == null)) { return null; } return (String) osrpTL1Response.paramsMap.get(key); } public static final int getIntVal(final String key, final OsrpTL1Response osrpTL1Response) { if ((key == null) || (osrpTL1Response == null) || (osrpTL1Response.paramsMap == null)) { return -1; } final Map map = osrpTL1Response.paramsMap; int retV = -1; try { retV = Integer.parseInt((String) map.get(key)); } catch (Throwable t) { if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "[ Tl1Util ] [ getIntVal ] Unable to determine the integer value for Key: " + key + " from OsrpTL1Response: " + osrpTL1Response, t); } retV = -1; } return retV; } public static final long getLongVal(final String key, final OsrpTL1Response osrpTL1Response) { if ((key == null) || (osrpTL1Response == null) || (osrpTL1Response.paramsMap == null)) { return -1; } final Map map = osrpTL1Response.paramsMap; long retV = -1; try { retV = Long.parseLong((String) map.get(key)); } catch (Throwable t) { if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "[ Tl1Util ] [ getLongVal ] Unable to determine the long value for Key: " + key + " from OsrpTL1Response: " + osrpTL1Response, t); } retV = -1; } return retV; } }
johndpope/echo
thirdparty/physx/PhysXSDK/Source/PvdRuntime/src/PxMetaDataPvdBinding.cpp
/* * Copyright (c) 2008-2015, NVIDIA CORPORATION. All rights reserved. * * NVIDIA CORPORATION and its licensors retain all intellectual property * and proprietary rights in and to this software, related documentation * and any modifications thereto. Any use, reproduction, disclosure or * distribution of this software and related documentation without an express * license agreement from NVIDIA CORPORATION is strictly prohibited. */ // Copyright (c) 2004-2008 AGEIA Technologies, Inc. All rights reserved. // Copyright (c) 2001-2004 NovodeX AG. All rights reserved. // suppress LNK4221 #include "PxPreprocessor.h" PX_DUMMY_SYMBOL #include "PxVisualDebugger.h" #if PX_SUPPORT_VISUAL_DEBUGGER #include "PxSimpleTypes.h" #include "PsArray.h" //using namespace physx::shdfnd; #include "PxMetaDataPvdBinding.h" #include "Px.h" #include "PxMetaDataObjects.h" #include "PvdConnection.h" #include "PvdVisualDebugger.h" #include "PvdDataStream.h" #include "PxScene.h" #include "ScIterators.h" #include "ScBodyCore.h" #include "PvdMetaDataExtensions.h" #include "PvdMetaDataPropertyVisitor.h" #include "PvdMetaDataDefineProperties.h" #include "PvdMetaDataBindingData.h" #include "PxRigidDynamic.h" #include "PxArticulation.h" #include "PxArticulationLink.h" #include "PvdTypeNames.h" #include "NpScene.h" #include "NpPhysics.h" #include "gpu/PxParticleGpu.h" using namespace physx::debugger; using namespace physx; using namespace Sc; namespace physx { namespace Pvd { struct NameValuePair { const char* mName; PxU32 mValue; }; using namespace physx::Sq; static const NameValuePair g_physx_Sq_SceneQueryID__EnumConversion[] = { { "QUERY_RAYCAST_ANY_OBJECT", static_cast<PxU32>(QueryID::QUERY_RAYCAST_ANY_OBJECT) }, { "QUERY_RAYCAST_CLOSEST_OBJECT", static_cast<PxU32>(QueryID::QUERY_RAYCAST_CLOSEST_OBJECT) }, { "QUERY_RAYCAST_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_RAYCAST_ALL_OBJECTS) }, { "QUERY_OVERLAP_SPHERE_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_OVERLAP_SPHERE_ALL_OBJECTS) }, { "QUERY_OVERLAP_AABB_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_OVERLAP_AABB_ALL_OBJECTS) }, { "QUERY_OVERLAP_OBB_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_OVERLAP_OBB_ALL_OBJECTS) }, { "QUERY_OVERLAP_CAPSULE_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_OVERLAP_CAPSULE_ALL_OBJECTS) }, { "QUERY_OVERLAP_CONVEX_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_OVERLAP_CONVEX_ALL_OBJECTS) }, { "QUERY_LINEAR_OBB_SWEEP_CLOSEST_OBJECT", static_cast<PxU32>(QueryID::QUERY_LINEAR_OBB_SWEEP_CLOSEST_OBJECT) }, { "QUERY_LINEAR_CAPSULE_SWEEP_CLOSEST_OBJECT", static_cast<PxU32>(QueryID::QUERY_LINEAR_CAPSULE_SWEEP_CLOSEST_OBJECT) }, { "QUERY_LINEAR_CONVEX_SWEEP_CLOSEST_OBJECT", static_cast<PxU32>(QueryID::QUERY_LINEAR_CONVEX_SWEEP_CLOSEST_OBJECT) }, { "QUERY_LINEAR_COMPOUND_GEOMETRY_SWEEP_CLOSEST_OBJECT", static_cast<PxU32>(QueryID::QUERY_LINEAR_COMPOUND_GEOMETRY_SWEEP_CLOSEST_OBJECT) }, { "QUERY_LINEAR_OBB_SWEEP_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_LINEAR_OBB_SWEEP_ALL_OBJECTS) }, { "QUERY_LINEAR_CAPSULE_SWEEP_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_LINEAR_CAPSULE_SWEEP_ALL_OBJECTS) }, { "QUERY_LINEAR_CONVEX_SWEEP_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_LINEAR_CONVEX_SWEEP_ALL_OBJECTS) }, { "QUERY_LINEAR_COMPOUND_GEOMETRY_SWEEP_ALL_OBJECTS", static_cast<PxU32>(QueryID::QUERY_LINEAR_COMPOUND_GEOMETRY_SWEEP_ALL_OBJECTS) }, { NULL, 0 } }; struct SceneQueryIDConvertor { const NameValuePair* NameConversion; SceneQueryIDConvertor():NameConversion(g_physx_Sq_SceneQueryID__EnumConversion){} }; PvdMetaDataBinding::PvdMetaDataBinding() : mBindingData( PX_NEW( PvdMetaDataBindingData )() ) { } PvdMetaDataBinding::~PvdMetaDataBinding() { for( OwnerActorsMap::Iterator iter = mBindingData->mOwnerActorsMap.getIterator(); !iter.done(); iter++) { iter->second->~OwnerActorsValueType(); PX_FREE( iter->second ); } PX_DELETE( mBindingData ); mBindingData = NULL; } template<typename TDataType, typename TValueType, typename TClassType> inline void definePropertyStruct( PvdDataStream& inStream, const char* pushName = NULL ) { PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoValueStructDefine definitionObj( helper ); bool doPush = pushName && *pushName; if ( doPush ) definitionObj.pushName( pushName ); visitAllPvdProperties<TDataType>( definitionObj ); if ( doPush ) definitionObj.popName(); helper.addPropertyMessage(getPvdNamespacedNameForType<TClassType>(), getPvdNamespacedNameForType<TValueType>(), sizeof(TValueType)); } template<typename TDataType, typename TValueType> inline void definePropertyStruct( PvdDataStream& inStream ) { definePropertyStruct<TDataType,TValueType,TDataType>( inStream ); } template<typename TDataType> inline void createClassAndDefineProperties( PvdDataStream& inStream ) { inStream.createClass<TDataType>(); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<TDataType>() ); visitAllPvdProperties<TDataType>( definitionObj ); } template<typename TDataType, typename TParentType> inline void createClassDeriveAndDefineProperties( PvdDataStream& inStream ) { inStream.createClass<TDataType>(); inStream.deriveClass<TParentType,TDataType>(); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<TDataType>() ); visitInstancePvdProperties<TDataType>( definitionObj ); } template<typename TDataType, typename TConvertSrc, typename TConvertData> inline void definePropertyFlags( PvdDataStream& inStream, const char* inPropertyName ) { PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); //PxEnumTraits< TValueType > filterFlagsEnum; TConvertSrc filterFlagsEnum; const TConvertData* convertor = filterFlagsEnum.NameConversion; for ( ; convertor->mName != NULL; ++convertor ) { helper.addNamedValue( convertor->mName, convertor->mValue); } inStream.createProperty<TDataType,PxU32>( inPropertyName,"Bitflag", PropertyType::Scalar, helper.getNamedValues()); helper.clearNamedValues(); } template<typename TDataType, typename TConvertSrc, typename TConvertData> inline void definePropertyEnums( PvdDataStream& inStream, const char* inPropertyName ) { PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); //PxEnumTraits< TValueType > filterFlagsEnum; TConvertSrc filterFlagsEnum; const TConvertData* convertor = filterFlagsEnum.NameConversion; for ( ; convertor->mName != NULL; ++convertor ) { helper.addNamedValue( convertor->mName, convertor->mValue); } inStream.createProperty<TDataType,PxU32>( inPropertyName,"Enumeration Value", PropertyType::Scalar, helper.getNamedValues()); helper.clearNamedValues(); } namespace { PX_FORCE_INLINE void registerPvdRaycast( PvdDataStream& inStream ) { inStream.createClass<PvdRaycast>(); definePropertyEnums<PvdRaycast, Pvd::SceneQueryIDConvertor, Pvd::NameValuePair>(inStream, "type"); inStream.createProperty<PvdRaycast,PxFilterData>( "filterData" ); definePropertyFlags<PvdRaycast, PxEnumTraits< physx::PxQueryFlag::Enum>, PxU32ToName >(inStream, "filterFlags"); inStream.createProperty<PvdRaycast,PxVec3>( "origin" ); inStream.createProperty<PvdRaycast,PxVec3>( "unitDir" ); inStream.createProperty<PvdRaycast,PxF32>( "distance" ); inStream.createProperty<PvdRaycast,String>( "hits_arrayName" ); inStream.createProperty<PvdRaycast,PxU32>( "hits_baseIndex" ); inStream.createProperty<PvdRaycast,PxU32>( "hits_count" ); } PX_FORCE_INLINE void registerPvdSweep( PvdDataStream& inStream ) { inStream.createClass<PvdSweep>(); definePropertyEnums<PvdSweep, Pvd::SceneQueryIDConvertor, Pvd::NameValuePair>(inStream, "type"); definePropertyFlags<PvdSweep, PxEnumTraits< physx::PxQueryFlag::Enum>, PxU32ToName>(inStream, "filterFlags"); inStream.createProperty<PvdSweep,PxVec3>( "unitDir" ); inStream.createProperty<PvdSweep,PxF32>( "distance" ); inStream.createProperty<PvdSweep,String>( "geom_arrayName" ); inStream.createProperty<PvdSweep,PxU32>( "geom_baseIndex" ); inStream.createProperty<PvdSweep,PxU32>( "geom_count" ); inStream.createProperty<PvdSweep,String>( "pose_arrayName" ); inStream.createProperty<PvdSweep,PxU32>( "pose_baseIndex" ); inStream.createProperty<PvdSweep,PxU32>( "pose_count" ); inStream.createProperty<PvdSweep,String>( "filterData_arrayName" ); inStream.createProperty<PvdSweep,PxU32>( "filterData_baseIndex" ); inStream.createProperty<PvdSweep,PxU32>( "filterData_count" ); inStream.createProperty<PvdSweep,String>( "hits_arrayName" ); inStream.createProperty<PvdSweep,PxU32>( "hits_baseIndex" ); inStream.createProperty<PvdSweep,PxU32>( "hits_count" ); } PX_FORCE_INLINE void registerPvdOverlap( PvdDataStream& inStream ) { inStream.createClass<PvdOverlap>(); definePropertyEnums<PvdOverlap, Pvd::SceneQueryIDConvertor, Pvd::NameValuePair>(inStream, "type"); inStream.createProperty<PvdOverlap,PxFilterData>( "filterData" ); definePropertyFlags<PvdOverlap, PxEnumTraits< physx::PxQueryFlag::Enum>, PxU32ToName>(inStream, "filterFlags"); inStream.createProperty<PvdOverlap,PxTransform>( "pose" ); inStream.createProperty<PvdOverlap,String>( "geom_arrayName" ); inStream.createProperty<PvdOverlap,PxU32>( "geom_baseIndex" ); inStream.createProperty<PvdOverlap,PxU32>( "geom_count" ); inStream.createProperty<PvdOverlap,String>( "hits_arrayName" ); inStream.createProperty<PvdOverlap,PxU32>( "hits_baseIndex" ); inStream.createProperty<PvdOverlap,PxU32>( "hits_count" ); } PX_FORCE_INLINE void registerPvdSqHit( PvdDataStream& inStream ) { inStream.createClass<PvdSqHit>(); inStream.createProperty<PvdSqHit, ObjectRef>( "Shape" ); inStream.createProperty<PvdSqHit, ObjectRef>( "Actor" ); inStream.createProperty<PvdSqHit, PxU32>( "FaceIndex" ); definePropertyFlags<PvdSqHit, PxEnumTraits< physx::PxHitFlag::Enum>, PxU32ToName>(inStream, "Flags"); inStream.createProperty<PvdSqHit, PxVec3>( "Impact" ); inStream.createProperty<PvdSqHit, PxVec3>( "Normal" ); inStream.createProperty<PvdSqHit, PxF32>( "Distance" ); inStream.createProperty<PvdSqHit, PxF32>( "U" ); inStream.createProperty<PvdSqHit, PxF32>( "V" ); inStream.createProperty<PvdSqHit, PxU32>( "SweepGeometryIndex" ); } } // end namespace void PvdMetaDataBinding::registerSDKProperties( PvdDataStream& inStream ) { //PxPhysics { inStream.createClass<PxPhysics>(); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<PxPhysics>() ); helper.pushName( "TolerancesScale" ); visitAllPvdProperties<PxTolerancesScale>( definitionObj ); helper.popName(); inStream.createProperty<PxPhysics,ObjectRef>( "Scenes", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "SharedShapes", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "Materials", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "HeightFields", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "ConvexMeshes", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "TriangleMeshes", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,ObjectRef>( "ClothFabrics", "children", PropertyType::Array ); inStream.createProperty<PxPhysics,PxU32>( "Version.Major" ); inStream.createProperty<PxPhysics,PxU32>( "Version.Minor" ); inStream.createProperty<PxPhysics,PxU32>( "Version.Bugfix" ); inStream.createProperty<PxPhysics,String>( "Version.Build" ); definePropertyStruct<PxTolerancesScale,PxTolerancesScaleGeneratedValues,PxPhysics>( inStream, "TolerancesScale" ); } { //PxGeometry inStream.createClass<PxGeometry>(); inStream.createProperty<PxGeometry,ObjectRef>( "Shape", "parents", PropertyType::Scalar ); } { //PxBoxGeometry createClassDeriveAndDefineProperties<PxBoxGeometry,PxGeometry>( inStream ); definePropertyStruct<PxBoxGeometry,PxBoxGeometryGeneratedValues,PxBoxGeometry>( inStream ); } {//PxSphereGeometry createClassDeriveAndDefineProperties<PxSphereGeometry,PxGeometry>( inStream ); definePropertyStruct<PxSphereGeometry,PxSphereGeometryGeneratedValues,PxSphereGeometry>( inStream ); } { //PxCapsuleGeometry createClassDeriveAndDefineProperties<PxCapsuleGeometry,PxGeometry>( inStream ); definePropertyStruct<PxCapsuleGeometry,PxCapsuleGeometryGeneratedValues,PxCapsuleGeometry>( inStream ); } { //PxPlaneGeometry createClassDeriveAndDefineProperties<PxPlaneGeometry,PxGeometry>( inStream ); } { //PxConvexMeshGeometry createClassDeriveAndDefineProperties<PxConvexMeshGeometry,PxGeometry>( inStream ); definePropertyStruct<PxConvexMeshGeometry,PxConvexMeshGeometryGeneratedValues,PxConvexMeshGeometry>( inStream ); } { //PxTriangleMeshGeometry createClassDeriveAndDefineProperties<PxTriangleMeshGeometry,PxGeometry>( inStream ); definePropertyStruct<PxTriangleMeshGeometry,PxTriangleMeshGeometryGeneratedValues,PxTriangleMeshGeometry>( inStream ); } { //PxHeightFieldGeometry createClassDeriveAndDefineProperties<PxHeightFieldGeometry,PxGeometry>( inStream ); definePropertyStruct<PxHeightFieldGeometry,PxHeightFieldGeometryGeneratedValues,PxHeightFieldGeometry>( inStream ); } //PxScene { /* struct PvdContact { PxVec3 point; PxVec3 axis; const void* shape0; const void* shape1; PxReal separation; PxReal normalForce; PxU32 internalFaceIndex0; PxU32 internalFaceIndex1; bool normalForceAvailable; };*/ { //contact information inStream.createClass<PvdContact>(); inStream.createProperty<PvdContact,PxVec3>( "Point" ); inStream.createProperty<PvdContact,PxVec3>( "Axis" ); inStream.createProperty<PvdContact,ObjectRef>( "Shapes[0]" ); inStream.createProperty<PvdContact,ObjectRef>( "Shapes[1]" ); inStream.createProperty<PvdContact,PxF32>( "Separation" ); inStream.createProperty<PvdContact,PxF32>( "NormalForce" ); inStream.createProperty<PvdContact,PxU32>( "InternalFaceIndex[0]" ); inStream.createProperty<PvdContact,PxU32>( "InternalFaceIndex[1]" ); inStream.createProperty<PvdContact,bool>( "NormalForceValid" ); } registerPvdSqHit( inStream ); registerPvdRaycast( inStream ); registerPvdSweep( inStream ); registerPvdOverlap( inStream ); inStream.createClass<PxScene>(); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<PxScene>() ); visitAllPvdProperties<PxSceneDesc>( definitionObj ); helper.pushName( "SimulationStatistics" ); visitAllPvdProperties<PxSimulationStatistics>( definitionObj ); helper.popName(); inStream.createProperty<PxScene,ObjectRef>( "Physics", "parents", PropertyType::Scalar ); inStream.createProperty<PxScene,PxU32>( "Timestamp" ); inStream.createProperty<PxScene,PxReal>( "SimulateElapsedTime" ); definePropertyStruct<PxSceneDesc,PxSceneDescGeneratedValues,PxScene>( inStream ); definePropertyStruct<PxSimulationStatistics,PxSimulationStatisticsGeneratedValues,PxScene>( inStream, "SimulationStatistics" ); inStream.createProperty<PxScene,PvdContact>( "Contacts", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdOverlap>( "SceneQueries.Overlaps", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdSweep>( "SceneQueries.Sweeps", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdSqHit>( "SceneQueries.Hits", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdRaycast>( "SceneQueries.Raycasts", "", PropertyType::Array ); inStream.createProperty<PxScene,PxTransform>( "SceneQueries.PoseList", "", PropertyType::Array ); inStream.createProperty<PxScene,PxFilterData>( "SceneQueries.FilterDataList", "", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "SceneQueries.GeometryList", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdOverlap>( "BatchedQueries.Overlaps", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdSweep>( "BatchedQueries.Sweeps", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdSqHit>( "BatchedQueries.Hits", "", PropertyType::Array ); inStream.createProperty<PxScene,PvdRaycast>( "BatchedQueries.Raycasts", "", PropertyType::Array ); inStream.createProperty<PxScene,PxTransform>( "BatchedQueries.PoseList", "", PropertyType::Array ); inStream.createProperty<PxScene,PxFilterData>( "BatchedQueries.FilterDataList", "", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "BatchedQueries.GeometryList", "", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "RigidStatics", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "RigidDynamics", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "Articulations", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "ParticleSystems", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "ParticleFluids", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "Cloths", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "Joints", "children", PropertyType::Array ); inStream.createProperty<PxScene,ObjectRef>( "Aggregates", "children", PropertyType::Array ); } //PxMaterial { createClassAndDefineProperties<PxMaterial>( inStream ); definePropertyStruct<PxMaterial,PxMaterialGeneratedValues,PxMaterial>( inStream ); inStream.createProperty<PxMaterial,ObjectRef>( "Physics", "parents", PropertyType::Scalar ); } //PxHeightField { { inStream.createClass<PxHeightFieldSample>(); inStream.createProperty<PxHeightFieldSample,PxU16>( "Height" ); inStream.createProperty<PxHeightFieldSample,PxU8>( "MaterialIndex[0]" ); inStream.createProperty<PxHeightFieldSample,PxU8>( "MaterialIndex[1]" ); } inStream.createClass<PxHeightField>(); //It is important the PVD fields match the RepX fields, so this has //to be hand coded. PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<PxHeightField>() ); visitAllPvdProperties<PxHeightFieldDesc>( definitionObj ); inStream.createProperty<PxHeightField,PxHeightFieldSample>("Samples", "", PropertyType::Array ); inStream.createProperty<PxHeightField,ObjectRef>( "Physics", "parents", PropertyType::Scalar ); definePropertyStruct<PxHeightFieldDesc,PxHeightFieldDescGeneratedValues,PxHeightField>( inStream ); } //PxConvexMesh { { //hull polygon data. inStream.createClass<PvdHullPolygonData>(); inStream.createProperty<PvdHullPolygonData,PxU16>( "NumVertices" ); inStream.createProperty<PvdHullPolygonData,PxU16>( "IndexBase" ); } inStream.createClass<PxConvexMesh>(); inStream.createProperty<PxConvexMesh,PxF32>( "Mass" ); inStream.createProperty<PxConvexMesh,PxMat33>( "LocalInertia" ); inStream.createProperty<PxConvexMesh,PxVec3>( "LocalCenterOfMass" ); inStream.createProperty<PxConvexMesh,PxVec3>( "Points", "", PropertyType::Array ); inStream.createProperty<PxConvexMesh,PvdHullPolygonData>( "HullPolygons", "", PropertyType::Array ); inStream.createProperty<PxConvexMesh,PxU8>( "PolygonIndexes", "", PropertyType::Array ); inStream.createProperty<PxConvexMesh,ObjectRef>( "Physics", "parents", PropertyType::Scalar ); } //PxTriangleMesh { inStream.createClass<PxTriangleMesh>(); inStream.createProperty<PxTriangleMesh,PxVec3>( "Points", "", PropertyType::Array ); inStream.createProperty<PxTriangleMesh,PxU32>( "NbTriangles", "", PropertyType::Scalar ); inStream.createProperty<PxTriangleMesh,PxU32>( "Triangles", "", PropertyType::Array ); inStream.createProperty<PxTriangleMesh,PxU16>( "MaterialIndices", "", PropertyType::Array ); inStream.createProperty<PxTriangleMesh,ObjectRef>( "Physics", "parents", PropertyType::Scalar ); } { //PxShape createClassAndDefineProperties<PxShape>( inStream ); definePropertyStruct<PxShape,PxShapeGeneratedValues,PxShape>( inStream ); inStream.createProperty<PxShape,ObjectRef>( "Geometry", "children" ); inStream.createProperty<PxShape,ObjectRef>( "Materials", "children", PropertyType::Array ); inStream.createProperty<PxShape,ObjectRef>( "Actor", "parents" ); } //PxActor { createClassAndDefineProperties<PxActor>( inStream ); inStream.createProperty<PxActor,ObjectRef>( "Scene", "parents" ); } //PxRigidActor { createClassDeriveAndDefineProperties<PxRigidActor,PxActor>( inStream ); inStream.createProperty<PxRigidActor,ObjectRef>( "Shapes", "children", PropertyType::Array ); inStream.createProperty<PxRigidActor,ObjectRef>( "Joints", "children", PropertyType::Array ); } //PxRigidStatic { createClassDeriveAndDefineProperties<PxRigidStatic,PxRigidActor>( inStream ); definePropertyStruct<PxRigidStatic,PxRigidStaticGeneratedValues,PxRigidStatic>( inStream ); } {//PxRigidBody createClassDeriveAndDefineProperties<PxRigidBody,PxRigidActor>( inStream ); } //PxRigidDynamic { createClassDeriveAndDefineProperties<PxRigidDynamic,PxRigidBody>( inStream ); //If anyone adds a 'getKinematicTarget' to PxRigidDynamic you can remove the line //below (after the code generator has run). inStream.createProperty<PxRigidDynamic,PxTransform>( "KinematicTarget" ); definePropertyStruct<PxRigidDynamic,PxRigidDynamicGeneratedValues,PxRigidDynamic>( inStream ); //Manually define the update struct. PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); /*struct PxRigidDynamicUpdateBlock { Transform GlobalPose; Float3 LinearVelocity; Float3 AngularVelocity; PxU8 IsSleeping; PxU8 padding[3]; }; */ helper.pushName( "GlobalPose" ); helper.addPropertyMessageArg<PxTransform>( offsetof( PxRigidDynamicUpdateBlock, GlobalPose ) );helper.popName(); helper.pushName( "LinearVelocity" ); helper.addPropertyMessageArg<PxVec3>( offsetof( PxRigidDynamicUpdateBlock, LinearVelocity ) ); helper.popName(); helper.pushName( "AngularVelocity" ); helper.addPropertyMessageArg<PxVec3>( offsetof( PxRigidDynamicUpdateBlock, AngularVelocity ) ); helper.popName(); helper.pushName( "IsSleeping" ); helper.addPropertyMessageArg<bool>( offsetof( PxRigidDynamicUpdateBlock, IsSleeping ) ); helper.popName(); helper.addPropertyMessage<PxRigidDynamic,PxRigidDynamicUpdateBlock>(); } { //PxArticulation createClassAndDefineProperties<PxArticulation>( inStream ); inStream.createProperty<PxArticulation,ObjectRef>( "Scene", "parents" ); inStream.createProperty<PxArticulation,ObjectRef>( "Links", "children", PropertyType::Array ); definePropertyStruct<PxArticulation,PxArticulationGeneratedValues,PxArticulation>( inStream ); } { //PxArticulationLink createClassDeriveAndDefineProperties<PxArticulationLink,PxRigidBody>( inStream ); inStream.createProperty<PxArticulationLink,ObjectRef>( "Parent", "parents" ); inStream.createProperty<PxArticulationLink,ObjectRef>( "Links", "children", PropertyType::Array ); inStream.createProperty<PxArticulationLink,ObjectRef>( "InboundJoint", "children" ); definePropertyStruct<PxArticulationLink,PxArticulationLinkGeneratedValues,PxArticulationLink>( inStream ); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); /*struct PxArticulationLinkUpdateBlock { Transform GlobalPose; Float3 LinearVelocity; Float3 AngularVelocity; }; */ helper.pushName( "GlobalPose" ); helper.addPropertyMessageArg<PxTransform>( offsetof( PxArticulationLinkUpdateBlock, GlobalPose ) );helper.popName(); helper.pushName( "LinearVelocity" ); helper.addPropertyMessageArg<PxVec3>( offsetof( PxArticulationLinkUpdateBlock, LinearVelocity ) ); helper.popName(); helper.pushName( "AngularVelocity" ); helper.addPropertyMessageArg<PxVec3>( offsetof( PxArticulationLinkUpdateBlock, AngularVelocity ) ); helper.popName(); helper.addPropertyMessage<PxArticulationLink,PxArticulationLinkUpdateBlock>(); } { //PxArticulationJoint createClassAndDefineProperties<PxArticulationJoint>( inStream ); inStream.createProperty<PxArticulationJoint,ObjectRef>( "Link", "parents" ); definePropertyStruct<PxArticulationJoint,PxArticulationJointGeneratedValues,PxArticulationJoint>( inStream ); } { //PxConstraint createClassAndDefineProperties<PxConstraint>( inStream ); definePropertyStruct<PxConstraint,PxConstraintGeneratedValues,PxConstraint>( inStream ); } #if PX_USE_PARTICLE_SYSTEM_API { //PxParticleBase createClassDeriveAndDefineProperties<PxParticleBase,PxActor>( inStream ); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<PxParticleBase>() ); visitParticleSystemBufferProperties( makePvdPropertyFilter( definitionObj ) ); } { //PxParticleSystem createClassDeriveAndDefineProperties<PxParticleSystem,PxParticleBase>( inStream ); inStream.createProperty<PxParticleSystem,PxU32>( "NbParticles"); inStream.createProperty<PxParticleSystem,PxU32>( "ValidParticleRange"); inStream.createProperty<PxParticleSystem,PxU32>( "ValidParticleBitmap", "", PropertyType::Array ); definePropertyStruct<PxParticleSystem,PxParticleSystemGeneratedValues,PxParticleSystem>( inStream ); } { //PxParticleFluid createClassDeriveAndDefineProperties<PxParticleFluid,PxParticleBase>( inStream ); inStream.createProperty<PxParticleFluid,PxU32>( "NbParticles"); inStream.createProperty<PxParticleFluid,PxU32>( "ValidParticleRange"); inStream.createProperty<PxParticleFluid,PxU32>( "ValidParticleBitmap", "", PropertyType::Array ); PvdPropertyDefinitionHelper& helper( inStream.getPropertyDefinitionHelper() ); PvdClassInfoDefine definitionObj( helper, getPvdNamespacedNameForType<PxParticleFluid>() ); visitParticleFluidBufferProperties( makePvdPropertyFilter( definitionObj ) ); definePropertyStruct<PxParticleFluid,PxParticleFluidGeneratedValues,PxParticleFluid>( inStream ); } #endif #if PX_USE_CLOTH_API {// PxClothFabricPhase createClassAndDefineProperties<PxClothFabricPhase>( inStream ); } { //PxClothFabric createClassAndDefineProperties<PxClothFabric>( inStream ); definePropertyStruct<PxClothFabric,PxClothFabricGeneratedValues,PxClothFabric>( inStream ); inStream.createProperty<PxClothFabric,ObjectRef>( "Physics", "parents" ); inStream.createProperty<PxClothFabric,ObjectRef>( "Cloths", "children", PropertyType::Array ); inStream.createProperty<PxClothFabric,PxClothFabricPhase>( "Phases", "", PropertyType::Array ); } { //PxCloth {//PxClothParticle createClassAndDefineProperties<PxClothParticle>( inStream ); } {//PxClothStretchConfig createClassAndDefineProperties<PxClothStretchConfig>( inStream ); } {//PxClothTetherConstraintConfig createClassAndDefineProperties<PxClothTetherConfig>( inStream ); } {//PvdPositionAndRadius inStream.createClass<PvdPositionAndRadius>(); inStream.createProperty<PvdPositionAndRadius,PxVec3>( "Position" ); inStream.createProperty<PvdPositionAndRadius,PxF32>( "Radius" ); } createClassDeriveAndDefineProperties<PxCloth,PxActor>( inStream ); definePropertyStruct<PxCloth,PxClothGeneratedValues,PxCloth>( inStream ); inStream.createProperty<PxCloth,ObjectRef>( "Fabric" ); inStream.createProperty<PxCloth,PxClothParticle>( "ParticleBuffer", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxClothParticle>( "ParticleAccelerations", "", PropertyType::Array ); inStream.createProperty<PxCloth,PvdPositionAndRadius>( "MotionConstraints", "", PropertyType::Array ); inStream.createProperty<PxCloth,PvdPositionAndRadius>( "CollisionSpheres", "", PropertyType::Array ); inStream.createProperty<PxCloth,PvdPositionAndRadius>( "SeparationConstraints", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxU32>( "CollisionSpherePairs", "", PropertyType::Array ); inStream.createProperty<PxCloth,PvdPositionAndRadius>( "CollisionPlanes", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxU32>( "CollisionConvexMasks", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxVec3>( "CollisionTriangles", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxU32>( "VirtualParticles", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxVec3>( "VirtualParticleWeights", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxU32>( "SelfCollisionIndices", "", PropertyType::Array ); inStream.createProperty<PxCloth,PxVec4>( "RestPositions", "", PropertyType::Array ); } #endif { //PxAggregate createClassAndDefineProperties<PxAggregate>( inStream ); inStream.createProperty<PxAggregate,ObjectRef>( "Scene", "parents" ); definePropertyStruct<PxAggregate,PxAggregateGeneratedValues,PxAggregate>( inStream ); inStream.createProperty<PxAggregate, ObjectRef>( "Actors", "children", PropertyType::Array ); inStream.createProperty<PxAggregate, ObjectRef>( "Articulations", "children", PropertyType::Array ); } } template<typename TClassType, typename TValueType, typename TDataType> static void doSendAllProperties( PvdDataStream& inStream, const TDataType* inDatatype, const void* instanceId ) { TValueType theValues( inDatatype ); inStream.setPropertyMessage( instanceId, theValues ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxPhysics& inPhysics ) { PxTolerancesScale theScale( inPhysics.getTolerancesScale() ); doSendAllProperties<PxPhysics,PxTolerancesScaleGeneratedValues>( inStream, &theScale, &inPhysics ); inStream.setPropertyValue( &inPhysics, "Version.Major", (PxU32)PX_PHYSICS_VERSION_MAJOR ); inStream.setPropertyValue( &inPhysics, "Version.Minor", (PxU32)PX_PHYSICS_VERSION_MINOR ); inStream.setPropertyValue( &inPhysics, "Version.Bugfix", (PxU32)PX_PHYSICS_VERSION_BUGFIX ); #if defined(PX_CHECKED) #if defined(NDEBUG) //This is a checked build String buildType = "Checked"; #elif defined(_DEBUG) //This is a debug build String buildType = "Debug"; #endif #elif defined(PX_PROFILE) String buildType = "Profile"; #elif defined(NDEBUG) //This is a release build String buildType = "Release"; #endif inStream.setPropertyValue( &inPhysics, "Version.Build", buildType ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxScene& inScene ) { PxPhysics& physics( const_cast<PxScene&>( inScene ).getPhysics() ); PxTolerancesScale theScale; PxSceneDesc theDesc( theScale ); { theDesc.gravity = inScene.getGravity(); theDesc.simulationEventCallback = inScene.getSimulationEventCallback(PX_DEFAULT_CLIENT); theDesc.contactModifyCallback = inScene.getContactModifyCallback(); theDesc.ccdContactModifyCallback = inScene.getCCDContactModifyCallback(); theDesc.filterShaderData = inScene.getFilterShaderData(); theDesc.filterShaderDataSize = inScene.getFilterShaderDataSize(); theDesc.filterShader = inScene.getFilterShader(); theDesc.filterCallback = inScene.getFilterCallback(); theDesc.broadPhaseType = inScene.getBroadPhaseType(); theDesc.broadPhaseCallback = inScene.getBroadPhaseCallback(); theDesc.limits = inScene.getLimits(); theDesc.meshContactMargin = inScene.getMeshContactMargin(); theDesc.frictionType = inScene.getFrictionType(); theDesc.contactCorrelationDistance = inScene.getContactCorrelationDistance(); theDesc.bounceThresholdVelocity = inScene.getBounceThresholdVelocity(); theDesc.frictionOffsetThreshold = inScene.getFrictionOffsetThreshold(); theDesc.flags = inScene.getFlags(); theDesc.cpuDispatcher = inScene.getCpuDispatcher(); theDesc.gpuDispatcher = inScene.getGpuDispatcher(); theDesc.spuDispatcher = inScene.getSpuDispatcher(); theDesc.staticStructure = inScene.getStaticStructure(); theDesc.dynamicStructure = inScene.getDynamicStructure(); theDesc.dynamicTreeRebuildRateHint = inScene.getDynamicTreeRebuildRateHint(); theDesc.userData = inScene.userData; theDesc.solverBatchSize = inScene.getSolverBatchSize(); //theDesc.nbContactDataBlocks = inScene.getNbContactDataBlocksUsed(); //theDesc.maxNbContactDataBlocks = inScene.getMaxNbContactDataBlocksUsed(); theDesc.contactReportStreamBufferSize = inScene.getContactReportStreamBufferSize(); theDesc.ccdMaxPasses = inScene.getCCDMaxPasses(); //theDesc.simulationOrder = inScene.getSimulationOrder(); theDesc.wakeCounterResetValue = inScene.getWakeCounterResetValue(); } PxSceneDescGeneratedValues theValues( &theDesc ); inStream.setPropertyMessage( &inScene, theValues ); //Create parent/child relationship. inStream.setPropertyValue( &inScene, "Physics", (const void*)&physics ); inStream.pushBackObjectRef( &physics, "Scenes", &inScene ); } void PvdMetaDataBinding::sendBeginFrame( PvdDataStream& inStream, const PxScene* inScene, PxReal simulateElapsedTime ) { inStream.beginSection( inScene, "frame" ); inStream.setPropertyValue( inScene, "Timestamp", inScene->getTimestamp() ); inStream.setPropertyValue( inScene, "SimulateElapsedTime", simulateElapsedTime ); } template <typename TDataType> struct NullConverter { void operator()( TDataType& data, const TDataType& src ) { data = src; } }; template <typename TTargetType, PxU32 T_NUM_ITEMS, typename TSourceType = TTargetType, typename Converter = NullConverter<TTargetType> > class ScopedPropertyValueSender { TTargetType stack[T_NUM_ITEMS]; TTargetType* mCur; const TTargetType* mEnd; PvdDataStream& mStream; public: ScopedPropertyValueSender( PvdDataStream& inStream, const void* inObj, String name ) : mCur( stack ) , mEnd( &stack[T_NUM_ITEMS] ) , mStream( inStream ) { mStream.beginSetPropertyValue( inObj, name, getPvdNamespacedNameForType<TTargetType>() ); } ~ScopedPropertyValueSender() { if ( stack != mCur ) { PxU32 size = sizeof (TTargetType) * PxU32( mCur - stack ); mStream.appendPropertyValueData( DataRef<const PxU8>( (PxU8*)stack, size ) ); } mStream.endSetPropertyValue(); } void append( const TSourceType& data ) { Converter()( *mCur, data ); if ( mCur < mEnd - 1) ++mCur; else { mStream.appendPropertyValueData( DataRef<const PxU8>( (PxU8*)stack, sizeof stack ) ); mCur = stack; } } private: ScopedPropertyValueSender& operator=(const ScopedPropertyValueSender&); }; void PvdMetaDataBinding::sendContacts( PvdDataStream& inStream, const PxScene& inScene ) { inStream.setPropertyValue( &inScene, "Contacts", DataRef<const PxU8>(), getPvdNamespacedNameForType<PvdContact>() ); } struct PvdContactConverter { void operator()( PvdContact& data, const Sc::ContactIterator::Contact& src ) { data.point = src.point; data.axis = src.normal; data.shape0 = src.shape0; data.shape1 = src.shape1; data.separation = src.separation; data.normalForce = src.normalForce; data.internalFaceIndex0 = src.faceIndex0; data.internalFaceIndex1 = src.faceIndex1; data.normalForceAvailable = src.normalForceAvailable; } }; void PvdMetaDataBinding::sendContacts( PvdDataStream& inStream, const PxScene& inScene, Sc::ContactIterator& inContacts ) { ScopedPropertyValueSender<PvdContact, 32, Sc::ContactIterator::Contact, PvdContactConverter> sender( inStream, &inScene, "Contacts" ); Sc::ContactIterator::Pair* pair; Sc::ContactIterator::Contact* contact; while( (pair = inContacts.getNextPair()) != NULL ) { while( (contact = pair->getNextContact()) != NULL ) sender.append( *contact ); } } void PvdMetaDataBinding::sendStats( PvdDataStream& inStream, const PxScene* inScene ) { PxSimulationStatistics theStats; inScene->getSimulationStatistics( theStats ); #if PX_SUPPORT_GPU_PHYSX if (((NpScene*)inScene)->getScene().getScScene().getSceneGpu()) { // gpu triangle mesh cache stats PxTriangleMeshCacheStatistics triMeshCacheStats = NpPhysics::getInstance().getNpPhysicsGpu().getTriangleMeshCacheStatistics(*inScene); theStats.particlesGpuMeshCacheSize = triMeshCacheStats.bytesTotal; theStats.particlesGpuMeshCacheUsed = triMeshCacheStats.bytesUsed; theStats.particlesGpuMeshCacheHitrate = triMeshCacheStats.percentageHitrate; } #endif PxSimulationStatisticsGeneratedValues values( &theStats ); inStream.setPropertyMessage( inScene, values ); } void PvdMetaDataBinding::sendEndFrame( PvdDataStream& inStream, const PxScene* inScene ) { inStream.endSection( inScene, "frame" ); } template<typename TDataType> void addPhysicsGroupProperty( PvdDataStream& inStream, const char* groupName, const TDataType& inData, const PxPhysics& ownerPhysics ) { inStream.setPropertyValue( &inData, "Physics", (const void*)&ownerPhysics ); inStream.pushBackObjectRef( &ownerPhysics, groupName, &inData ); //Buffer type objects *have* to be flushed directly out once created else scene creation doesn't //work. inStream.flush(); } template<typename TDataType> void removePhysicsGroupProperty( PvdDataStream& inStream, const char* groupName, const TDataType& inData, const PxPhysics& ownerPhysics ) { inStream.removeObjectRef( &ownerPhysics, groupName, &inData ); inStream.destroyInstance( &inData ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxMaterial& inMaterial, const PxPhysics& ownerPhysics ) { inStream.createInstance( &inMaterial ); sendAllProperties( inStream, inMaterial ); addPhysicsGroupProperty( inStream, "Materials", inMaterial, ownerPhysics ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxMaterial& inMaterial ) { PxMaterialGeneratedValues values( &inMaterial ); inStream.setPropertyMessage( &inMaterial, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxMaterial& inMaterial, const PxPhysics& ownerPhysics ) { removePhysicsGroupProperty( inStream, "Materials", inMaterial, ownerPhysics ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxHeightField& inData ) { PxHeightFieldDesc theDesc; //Save the height field to desc. theDesc.nbRows = inData.getNbRows(); theDesc.nbColumns = inData.getNbColumns(); theDesc.format = inData.getFormat(); theDesc.samples.stride = inData.getSampleStride(); theDesc.samples.data = NULL; theDesc.thickness = inData.getThickness(); theDesc.convexEdgeThreshold = inData.getConvexEdgeThreshold(); theDesc.flags = inData.getFlags(); PxU32 theCellCount = inData.getNbRows() * inData.getNbColumns(); PxU32 theSampleStride = sizeof( PxHeightFieldSample ); PxU32 theSampleBufSize = theCellCount * theSampleStride; mBindingData->mTempU8Array.resize( theSampleBufSize ); PxHeightFieldSample* theSamples = reinterpret_cast< PxHeightFieldSample*> ( mBindingData->mTempU8Array.begin() ); inData.saveCells( theSamples, theSampleBufSize ); theDesc.samples.data = theSamples; PxHeightFieldDescGeneratedValues values( &theDesc ); inStream.setPropertyMessage( &inData, values ); PxHeightFieldSample* theSampleData = reinterpret_cast<PxHeightFieldSample*>(mBindingData->mTempU8Array.begin()); inStream.setPropertyValue( &inData, "Samples", theSampleData, theCellCount ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxHeightField& inData, const PxPhysics& ownerPhysics ) { inStream.createInstance( &inData ); sendAllProperties(inStream, inData); addPhysicsGroupProperty( inStream, "HeightFields", inData, ownerPhysics ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxHeightField& inData, const PxPhysics& ownerPhysics ) { removePhysicsGroupProperty( inStream, "HeightFields", inData, ownerPhysics ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxConvexMesh& inData, const PxPhysics& ownerPhysics ) { inStream.createInstance( &inData ); PxReal mass; PxMat33 localInertia; PxVec3 localCom; inData.getMassInformation(mass, reinterpret_cast<PxMat33 &>(localInertia), localCom); inStream.setPropertyValue( &inData, "Mass", mass ); inStream.setPropertyValue( &inData, "LocalInertia", localInertia ); inStream.setPropertyValue( &inData, "LocalCenterOfMass", localCom); // update arrays: // vertex Array: { const PxVec3* vertexPtr = inData.getVertices(); const PxU32 numVertices = inData.getNbVertices(); inStream.setPropertyValue( &inData, "Points", vertexPtr, numVertices ); } // HullPolyArray: PxU16 maxIndices = 0; { PxU32 numPolygons = inData.getNbPolygons(); PvdHullPolygonData* tempData = mBindingData->allocateTemp<PvdHullPolygonData>( numPolygons ); //Get the polygon data stripping the plane equations for(PxU32 index = 0; index < numPolygons; index++) { PxHullPolygon curOut; inData.getPolygonData(index, curOut); maxIndices = PxMax(maxIndices, PxU16(curOut.mIndexBase + curOut.mNbVerts)); tempData[index].mIndexBase = curOut.mIndexBase; tempData[index].mNumVertices = curOut.mNbVerts; } inStream.setPropertyValue( &inData, "HullPolygons", tempData, numPolygons ); } // poly index Array: { const PxU8* indices = inData.getIndexBuffer(); inStream.setPropertyValue( &inData, "PolygonIndexes", indices, maxIndices ); } addPhysicsGroupProperty( inStream, "ConvexMeshes", inData, ownerPhysics ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxConvexMesh& inData, const PxPhysics& ownerPhysics ) { removePhysicsGroupProperty( inStream, "ConvexMeshes", inData, ownerPhysics ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxTriangleMesh& inData, const PxPhysics& ownerPhysics ) { inStream.createInstance( &inData ); bool hasMatIndex = inData.getTriangleMaterialIndex(0) != 0xffff; // update arrays: // vertex Array: { const PxVec3* vertexPtr = inData.getVertices(); const PxU32 numVertices = inData.getNbVertices(); inStream.setPropertyValue( &inData, "Points", vertexPtr, numVertices ); } // index Array: { const bool has16BitIndices = inData.getTriangleMeshFlags() & PxTriangleMeshFlag::eHAS_16BIT_TRIANGLE_INDICES ? true : false; const PxU32 numTriangles = inData.getNbTriangles(); inStream.setPropertyValue( &inData, "NbTriangles", numTriangles ); const PxU32 numIndexes = numTriangles * 3; const PxU8* trianglePtr = reinterpret_cast<const PxU8*>(inData.getTriangles()); //We declared this type as a 32 bit integer above. //PVD will automatically unsigned-extend data that is smaller than the target type. if ( has16BitIndices ) inStream.setPropertyValue( &inData, "Triangles", reinterpret_cast<const PxU16*>( trianglePtr ), numIndexes ); else inStream.setPropertyValue( &inData, "Triangles", reinterpret_cast<const PxU32*>( trianglePtr ), numIndexes ); } // material Array: if(hasMatIndex) { PxU32 numMaterials = inData.getNbTriangles(); PxU16* matIndexData = mBindingData->allocateTemp<PxU16>( numMaterials ); for(PxU32 m = 0; m < numMaterials; m++) matIndexData[m] = inData.getTriangleMaterialIndex(m); inStream.setPropertyValue( &inData, "MaterialIndices", matIndexData, numMaterials ); } addPhysicsGroupProperty( inStream, "TriangleMeshes", inData, ownerPhysics ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxTriangleMesh& inData, const PxPhysics& ownerPhysics ) { removePhysicsGroupProperty( inStream, "TriangleMeshes", inData, ownerPhysics ); } template<typename TDataType> struct GeometryBufferRegisterOp { void registerBuffers( const TDataType&, BufferRegistrar&) {} }; template<> struct GeometryBufferRegisterOp<PxConvexMeshGeometry> { void registerBuffers( const PxConvexMeshGeometry geom, BufferRegistrar& registrar ) { registrar.addRef( geom.convexMesh ); } }; template<> struct GeometryBufferRegisterOp<PxTriangleMeshGeometry> { void registerBuffers( const PxTriangleMeshGeometry geom, BufferRegistrar& registrar ) { registrar.addRef( geom.triangleMesh ); } }; template<> struct GeometryBufferRegisterOp<PxHeightFieldGeometry> { void registerBuffers( const PxHeightFieldGeometry geom, BufferRegistrar& registrar ) { registrar.addRef( geom.heightField ); } }; template<typename TGeneratedValuesType, typename TGeomType> void sendGeometry( PvdDataStream& inStream, const PxShape& inShape, const TGeomType& geom, BufferRegistrar& registrar ) { const void* geomInst = (reinterpret_cast<const PxU8*>( &inShape ) ) + 4; inStream.createInstance( getPvdNamespacedNameForType<TGeomType>(), geomInst ); GeometryBufferRegisterOp<TGeomType>().registerBuffers( geom, registrar ); TGeneratedValuesType values( &geom ); inStream.setPropertyMessage( geomInst, values ); inStream.setPropertyValue( &inShape, "Geometry", geomInst ); inStream.setPropertyValue( geomInst, "Shape", (const void*)&inShape ); } void setGeometry( PvdDataStream& inStream, const PxShape& inObj, BufferRegistrar& registrar ) { switch( inObj.getGeometryType() ) { #define SEND_PVD_GEOM_TYPE( enumType, geomType, valueType ) \ case PxGeometryType::enumType: \ { \ Px##geomType geom; \ inObj.get##geomType( geom ); \ sendGeometry<valueType>( inStream, inObj, geom, registrar ); \ } \ break; SEND_PVD_GEOM_TYPE( eSPHERE, SphereGeometry, PxSphereGeometryGeneratedValues ); //Plane geometries don't have any properties, so this avoids using a property //struct for them. case PxGeometryType::ePLANE: { PxPlaneGeometry geom; inObj.getPlaneGeometry( geom ); const void* geomInst = (reinterpret_cast<const PxU8*>( &inObj ) ) + 4; inStream.createInstance( getPvdNamespacedNameForType<PxPlaneGeometry>(), geomInst ); inStream.setPropertyValue( &inObj, "Geometry", geomInst ); inStream.setPropertyValue( geomInst, "Shape", (const void*)&inObj ); } break; SEND_PVD_GEOM_TYPE( eCAPSULE, CapsuleGeometry, PxCapsuleGeometryGeneratedValues ); SEND_PVD_GEOM_TYPE( eBOX, BoxGeometry, PxBoxGeometryGeneratedValues ); SEND_PVD_GEOM_TYPE( eCONVEXMESH, ConvexMeshGeometry, PxConvexMeshGeometryGeneratedValues ); SEND_PVD_GEOM_TYPE( eTRIANGLEMESH, TriangleMeshGeometry, PxTriangleMeshGeometryGeneratedValues ); SEND_PVD_GEOM_TYPE( eHEIGHTFIELD, HeightFieldGeometry, PxHeightFieldGeometryGeneratedValues ); #undef SEND_PVD_GEOM_TYPE case PxGeometryType::eGEOMETRY_COUNT: case PxGeometryType::eINVALID: default: PX_ASSERT( false ); break; } } void setMaterials( PvdDataStream& inStream, const PxShape& inObj, BufferRegistrar& registrar, PvdMetaDataBindingData* mBindingData ) { PxU32 numMaterials = inObj.getNbMaterials(); PxMaterial** materialPtr = mBindingData->allocateTemp<PxMaterial*>( numMaterials ); inObj.getMaterials( materialPtr, numMaterials ); for ( PxU32 idx = 0; idx < numMaterials; ++idx ) { registrar.addRef( materialPtr[idx] ); inStream.pushBackObjectRef( &inObj, "Materials", materialPtr[idx] ); } } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxShape& inObj, const PxRigidActor& owner, BufferRegistrar& registrar ) { if ( !inStream.isInstanceValid( &owner ) ) return; const OwnerActorsMap::Entry* entry = mBindingData->mOwnerActorsMap.find(&inObj); if( entry != NULL ) { if( !mBindingData->mOwnerActorsMap[&inObj]->contains( &owner ) ) mBindingData->mOwnerActorsMap[&inObj]->insert( &owner ); } else { OwnerActorsValueType* data = reinterpret_cast<OwnerActorsValueType*>(PX_ALLOC(sizeof(OwnerActorsValueType), "mOwnerActorsMapValue"));//( 1 ); OwnerActorsValueType* actors = PX_PLACEMENT_NEW(data, OwnerActorsValueType); actors->insert( &owner ); mBindingData->mOwnerActorsMap.insert( &inObj, actors ); } if ( inStream.isInstanceValid( &inObj ) ) { inStream.pushBackObjectRef( &owner, "Shapes", &inObj ); return; } inStream.createInstance( &inObj ); inStream.pushBackObjectRef( &owner, "Shapes", &inObj ); inStream.setPropertyValue( &inObj, "Actor", (const void*)&owner ); sendAllProperties( inStream, inObj ); setGeometry( inStream, inObj, registrar ); setMaterials( inStream, inObj, registrar, mBindingData ); if ( !inObj.isExclusive() ) inStream.pushBackObjectRef( &owner.getScene()->getPhysics(), "SharedShapes", &inObj ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxShape& inObj ) { PxShapeGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::releaseAndRecreateGeometry( PvdDataStream& inStream, const PxShape& inObj, PxPhysics& /*ownerPhysics*/, BufferRegistrar& registrar ) { const void* geomInst = (reinterpret_cast<const PxU8*>( &inObj ) ) + 4; inStream.destroyInstance( geomInst ); //Quick fix for HF modify, PxConvexMesh and PxTriangleMesh need recook, they should always be new if modified if( inObj.getGeometryType() == PxGeometryType::eHEIGHTFIELD ) { PxHeightFieldGeometry hfGeom; inObj.getHeightFieldGeometry(hfGeom); if(inStream.isInstanceValid(hfGeom.heightField)) sendAllProperties(inStream, *hfGeom.heightField); } setGeometry( inStream, inObj, registrar ); //Need update actor cause PVD takes actor-shape as a pair. { PxRigidActor* actor = inObj.getActor(); if(actor != NULL) { if(const PxRigidStatic* rgS = actor->isRigidStatic()) sendAllProperties( inStream, *rgS ); else if(const PxRigidDynamic* rgD = actor->isRigidDynamic()) sendAllProperties( inStream, *rgD ); } } } void PvdMetaDataBinding::updateMaterials( PvdDataStream& inStream, const PxShape& inObj, BufferRegistrar& registrar ) { //Clear the shape's materials array. inStream.setPropertyValue( &inObj, "Materials", DataRef<const PxU8>(), getPvdNamespacedNameForType<ObjectRef>() ); setMaterials( inStream, inObj, registrar, mBindingData ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxShape& inObj, const PxRigidActor& owner ) { if ( inStream.isInstanceValid( &inObj ) ) { inStream.removeObjectRef( &owner, "Shapes", &inObj ); bool bDestroy = true; const OwnerActorsMap::Entry* entry0 = mBindingData->mOwnerActorsMap.find(&inObj); if( entry0 != NULL ) { entry0->second->erase( &owner ); if( entry0->second->size() > 0 ) bDestroy = false; else { mBindingData->mOwnerActorsMap[&inObj]->~OwnerActorsValueType(); PX_FREE( mBindingData->mOwnerActorsMap[&inObj]); mBindingData->mOwnerActorsMap.erase( &inObj ); } } if (bDestroy) { const void* geomInst = (reinterpret_cast<const PxU8*>( &inObj ) ) + 4; inStream.destroyInstance( geomInst ); inStream.destroyInstance( &inObj ); const OwnerActorsMap::Entry* entry = mBindingData->mOwnerActorsMap.find(&inObj); if( entry != NULL ) { entry->second->~OwnerActorsValueType(); PX_FREE( entry->second );; mBindingData->mOwnerActorsMap.erase(&inObj); } } } } template<typename TDataType> void addSceneGroupProperty( PvdDataStream& inStream, const char* groupName, const TDataType& inObj, const PxScene& inScene ) { inStream.createInstance( &inObj ); inStream.pushBackObjectRef( &inScene, groupName, &inObj ); inStream.setPropertyValue( &inObj, "Scene", (const void*)(&inScene) ); } template<typename TDataType> void removeSceneGroupProperty( PvdDataStream& inStream, const char* groupName, const TDataType& inObj, const PxScene& inScene ) { inStream.removeObjectRef( &inScene, groupName, &inObj ); inStream.destroyInstance( &inObj ); } void sendShapes( PvdMetaDataBinding& binding, PvdDataStream& inStream, const PxRigidActor& inObj, BufferRegistrar& registrar ) { InlineArray<PxShape*, 5> shapeData; PxU32 nbShapes = inObj.getNbShapes(); shapeData.resize( nbShapes ); inObj.getShapes( shapeData.begin(), nbShapes ); for ( PxU32 idx = 0; idx < nbShapes; ++idx ) binding.createInstance( inStream, *shapeData[idx], inObj, registrar ); } void releaseShapes( PvdMetaDataBinding& binding, PvdDataStream& inStream, const PxRigidActor& inObj ) { InlineArray<PxShape*, 5> shapeData; PxU32 nbShapes = inObj.getNbShapes(); shapeData.resize( nbShapes ); inObj.getShapes( shapeData.begin(), nbShapes ); for ( PxU32 idx = 0; idx < nbShapes; ++idx ) binding.destroyInstance( inStream, *shapeData[idx], inObj ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxRigidStatic& inObj, const PxScene& ownerScene, BufferRegistrar& registrar ) { addSceneGroupProperty( inStream, "RigidStatics", inObj, ownerScene ); sendAllProperties( inStream, inObj ); sendShapes( *this, inStream, inObj, registrar ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxRigidStatic& inObj ) { PxRigidStaticGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxRigidStatic& inObj, const PxScene& ownerScene ) { releaseShapes( *this, inStream, inObj ); removeSceneGroupProperty( inStream, "RigidStatics", inObj, ownerScene ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxRigidDynamic& inObj, const PxScene& ownerScene, BufferRegistrar& registrar ) { addSceneGroupProperty( inStream, "RigidDynamics", inObj, ownerScene ); sendAllProperties( inStream, inObj ); sendShapes( *this, inStream, inObj,registrar ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxRigidDynamic& inObj ) { PxRigidDynamicGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxRigidDynamic& inObj, const PxScene& ownerScene ) { releaseShapes( *this, inStream, inObj ); removeSceneGroupProperty( inStream, "RigidDynamics", inObj, ownerScene ); } void addChild( PvdDataStream& inStream, const void* inParent, const PxArticulationLink& inChild ) { inStream.pushBackObjectRef( inParent, "Links", &inChild ); inStream.setPropertyValue( &inChild, "Parent", inParent ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxArticulation& inObj, const PxScene& ownerScene, BufferRegistrar& registrar ) { addSceneGroupProperty( inStream, "Articulations", inObj, ownerScene ); sendAllProperties( inStream, inObj ); PxU32 numLinks = inObj.getNbLinks(); mBindingData->mArticulationLinks.resize( numLinks ); inObj.getLinks( mBindingData->mArticulationLinks.begin(), numLinks ); //From <NAME>: /* No, there can only be one root, and in all the code I wrote (which is not 100% of the HL code for articulations), the index of a child is always > the index of the parent. */ //Create all the links for ( PxU32 idx = 0; idx < numLinks; ++idx ) { if(!inStream.isInstanceValid(mBindingData->mArticulationLinks[idx])) createInstance( inStream, *mBindingData->mArticulationLinks[idx], registrar ); } //Setup the link graph for ( PxU32 idx = 0; idx < numLinks; ++idx ) { PxArticulationLink* link = mBindingData->mArticulationLinks[idx]; if ( idx == 0 ) addChild( inStream, &inObj, *link ); PxU32 numChildren = link->getNbChildren(); PxArticulationLink** children = mBindingData->allocateTemp<PxArticulationLink*>( numChildren ); link->getChildren( children, numChildren ); for ( PxU32 i = 0; i < numChildren; ++i ) addChild( inStream, link, *children[i] ); } } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxArticulation& inObj ) { PxArticulationGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxArticulation& inObj, const PxScene& ownerScene ) { removeSceneGroupProperty( inStream, "Articulations", inObj, ownerScene ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxArticulationLink& inObj, BufferRegistrar& registrar ) { inStream.createInstance( &inObj ); PxArticulationJoint* joint( inObj.getInboundJoint() ); if ( joint ) { inStream.createInstance( joint ); inStream.setPropertyValue( &inObj, "InboundJoint", (const void*)joint ); inStream.setPropertyValue( joint, "Link", (const void*)&inObj ); sendAllProperties( inStream, *joint ); } sendAllProperties( inStream, inObj ); sendShapes( *this, inStream, inObj, registrar ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxArticulationLink& inObj ) { PxArticulationLinkGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxArticulationLink& inObj ) { PxArticulationJoint* joint( inObj.getInboundJoint() ); if ( joint ) inStream.destroyInstance( joint ); releaseShapes( *this, inStream, inObj ); inStream.destroyInstance( &inObj ); } //These are created as part of the articulation link's creation process, so outside entities don't need to //create them. void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxArticulationJoint& inObj ) { PxArticulationJointGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::originShift( PvdDataStream& inStream, const PxScene* inScene, PxVec3 shift ) { inStream.originShift(inScene, shift); } template<typename TReadDataType> struct ParticleFluidUpdater { TReadDataType& mData; Array<PxU8>& mTempU8Array; PvdDataStream& mStream; const void* mInstanceId; PxU32 mRdFlags; ParticleFluidUpdater( TReadDataType& d, PvdDataStream& s, const void* id, PxU32 flags, Array<PxU8>& tempArray ) : mData( d ) , mTempU8Array( tempArray ) , mStream( s ) , mInstanceId( id ) , mRdFlags( flags ) { } template<PxU32 TKey, typename TObjectType, typename TPropertyType, PxU32 TEnableFlag> void handleBuffer( const PxBufferPropertyInfo< TKey, TObjectType, PxStrideIterator<const TPropertyType>, TEnableFlag >& inProp, NamespacedName datatype ) { PxU32 nbValidParticles = mData.nbValidParticles; PxU32 validParticleRange = mData.validParticleRange; PxStrideIterator<const TPropertyType> iterator( inProp.get( &mData ) ); const PxU32* validParticleBitmap = mData.validParticleBitmap; if( nbValidParticles == 0 || iterator.ptr() == NULL || inProp.isEnabled(mRdFlags) == false ) return; // setup the pvd array DataRef<const PxU8> propData; mTempU8Array.resize(nbValidParticles * sizeof(TPropertyType)); TPropertyType* tmpArray = reinterpret_cast<TPropertyType*>(mTempU8Array.begin()); propData = DataRef<const PxU8>( mTempU8Array.begin(), mTempU8Array.size() ); if(nbValidParticles == validParticleRange) { for ( PxU32 idx = 0; idx < nbValidParticles; ++idx ) tmpArray[idx] = iterator[idx]; } else { PxU32 tIdx = 0; // iterate over bitmap and send all valid particles for (PxU32 w = 0; w <= (validParticleRange-1) >> 5; w++) { for (PxU32 b = validParticleBitmap[w]; b; b &= b-1) { tmpArray[tIdx++] = iterator[w<<5|Ps::lowestSetBit(b)]; } } PX_ASSERT(tIdx == nbValidParticles); } mStream.setPropertyValue( mInstanceId, inProp.mName, propData, datatype ); } template<PxU32 TKey, typename TObjectType, typename TPropertyType, PxU32 TEnableFlag> void handleBuffer( const PxBufferPropertyInfo< TKey, TObjectType, PxStrideIterator<const TPropertyType>, TEnableFlag >& inProp ) { handleBuffer( inProp, getPvdNamespacedNameForType<TPropertyType>() ); } template<PxU32 TKey, typename TObjectType, typename TEnumType, typename TStorageType, PxU32 TEnableFlag> void handleFlagsBuffer( const PxBufferPropertyInfo< TKey, TObjectType, PxStrideIterator<const PxFlags<TEnumType, TStorageType> >, TEnableFlag >& inProp, const PxU32ToName* ) { handleBuffer( inProp, getPvdNamespacedNameForType<TStorageType>() ); } private: ParticleFluidUpdater& operator=(const ParticleFluidUpdater&); }; #if PX_USE_PARTICLE_SYSTEM_API void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxParticleSystem& inObj, const PxScene& ownerScene ) { addSceneGroupProperty( inStream, "ParticleSystems", inObj, ownerScene ); sendAllProperties( inStream, inObj ); PxParticleReadData* readData( const_cast<PxParticleSystem&>( inObj ).lockParticleReadData() ); if ( readData ) { PxU32 readFlags = inObj.getParticleReadDataFlags(); sendArrays( inStream, inObj, *readData, readFlags ); readData->unlock(); } } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxParticleSystem& inObj ) { PxParticleSystemGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::sendArrays( PvdDataStream& inStream, const PxParticleSystem& inObj, PxParticleReadData& inData, PxU32 inFlags ) { inStream.setPropertyValue( &inObj, "NbParticles", inData.nbValidParticles); inStream.setPropertyValue( &inObj, "ValidParticleRange", inData.validParticleRange); if(inData.validParticleRange > 0) inStream.setPropertyValue( &inObj, "ValidParticleBitmap", inData.validParticleBitmap, (inData.validParticleRange >> 5)+1 ); ParticleFluidUpdater<PxParticleReadData> theUpdater( inData, inStream, (const PxActor*)&inObj, inFlags, mBindingData->mTempU8Array ); visitParticleSystemBufferProperties( makePvdPropertyFilter( theUpdater ) ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxParticleSystem& inObj, const PxScene& ownerScene ) { removeSceneGroupProperty( inStream, "ParticleSystems", inObj, ownerScene ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxParticleFluid& inObj, const PxScene& ownerScene ) { addSceneGroupProperty( inStream, "ParticleFluids", inObj, ownerScene ); sendAllProperties( inStream, inObj ); PxParticleFluidReadData* readData( const_cast<PxParticleFluid&>( inObj ).lockParticleFluidReadData() ); if ( readData ) { PxU32 readFlags = inObj.getParticleReadDataFlags(); sendArrays( inStream, inObj, *readData, readFlags ); readData->unlock(); } } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxParticleFluid& inObj ) { PxParticleFluidGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::sendArrays( PvdDataStream& inStream, const PxParticleFluid& inObj, PxParticleFluidReadData& inData, PxU32 inFlags ) { inStream.setPropertyValue( &inObj, "NbParticles", inData.nbValidParticles); inStream.setPropertyValue( &inObj, "ValidParticleRange", inData.validParticleRange); if(inData.validParticleRange > 0) inStream.setPropertyValue( &inObj, "ValidParticleBitmap", inData.validParticleBitmap, (inData.validParticleRange >> 5)+1 ); ParticleFluidUpdater<PxParticleFluidReadData> theUpdater( inData, inStream, (const PxActor*)&inObj, inFlags, mBindingData->mTempU8Array ); visitParticleSystemBufferProperties( makePvdPropertyFilter( theUpdater ) ); visitParticleFluidBufferProperties( makePvdPropertyFilter( theUpdater ) ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxParticleFluid& inObj, const PxScene& ownerScene ) { removeSceneGroupProperty( inStream, "ParticleFluids", inObj, ownerScene ); } #endif // PX_USE_PARTICLE_SYSTEM_API template<typename TBlockType, typename TActorType, typename TOperator> void updateActor( PvdDataStream& inStream, TActorType** actorGroup, PxU32 numActors, TOperator sleepingOp, PvdMetaDataBindingData& bindingData ) { TBlockType theBlock; if ( numActors == 0 ) return; for ( PxU32 idx = 0; idx < numActors; ++idx ) { TActorType* theActor( actorGroup[idx] ); bool sleeping = sleepingOp( theActor, theBlock ); bool wasSleeping = bindingData.mSleepingActors.contains( theActor ); if ( sleeping == false || sleeping != wasSleeping ) { theBlock.GlobalPose = theActor->getGlobalPose(); theBlock.AngularVelocity = theActor->getAngularVelocity(); theBlock.LinearVelocity = theActor->getLinearVelocity(); inStream.sendPropertyMessageFromGroup( theActor, theBlock ); if ( sleeping != wasSleeping ) { if ( sleeping ) bindingData.mSleepingActors.insert( theActor ); else bindingData.mSleepingActors.erase( theActor ); } } } } struct RigidDynamicUpdateOp { bool operator()( PxRigidDynamic* actor, PxRigidDynamicUpdateBlock& block ) { bool sleeping = actor->isSleeping(); block.IsSleeping = sleeping; return sleeping; } }; struct ArticulationLinkUpdateOp { bool sleeping; ArticulationLinkUpdateOp( bool s ) : sleeping( s ){} bool operator()( PxArticulationLink*, PxArticulationLinkUpdateBlock& ) { return sleeping; } }; void PvdMetaDataBinding::updateDynamicActorsAndArticulations( PvdDataStream& inStream, const PxScene* inScene, PvdVisualizer* linkJointViz ) { PX_COMPILE_TIME_ASSERT( sizeof( PxRigidDynamicUpdateBlock ) == 14 * 4 ); { PxU32 actorCount = inScene->getNbActors( PxActorTypeFlag::eRIGID_DYNAMIC ); if ( actorCount ) { inStream.beginPropertyMessageGroup<PxRigidDynamicUpdateBlock>(); mBindingData->mActors.resize( actorCount ); PxActor** theActors = mBindingData->mActors.begin(); inScene->getActors( PxActorTypeFlag::eRIGID_DYNAMIC, theActors, actorCount ); updateActor<PxRigidDynamicUpdateBlock>( inStream, reinterpret_cast<PxRigidDynamic**>( theActors ), actorCount, RigidDynamicUpdateOp(), *mBindingData ); inStream.endPropertyMessageGroup(); } } { PxU32 articulationCount = inScene->getNbArticulations(); if ( articulationCount ) { mBindingData->mArticulations.resize( articulationCount ); PxArticulation** firstArticulation = mBindingData->mArticulations.begin(); PxArticulation** lastArticulation = firstArticulation + articulationCount; inScene->getArticulations( firstArticulation, articulationCount ); inStream.beginPropertyMessageGroup<PxArticulationLinkUpdateBlock>(); for ( ; firstArticulation < lastArticulation; ++firstArticulation ) { PxU32 linkCount = (*firstArticulation)->getNbLinks(); bool sleeping = (*firstArticulation)->isSleeping(); if ( linkCount ) { mBindingData->mArticulationLinks.resize( linkCount ); PxArticulationLink** theLink = mBindingData->mArticulationLinks.begin(); (*firstArticulation)->getLinks( theLink, linkCount ); updateActor<PxArticulationLinkUpdateBlock>( inStream, theLink, linkCount, ArticulationLinkUpdateOp( sleeping ), *mBindingData ); if ( linkJointViz ) { for ( PxU32 idx = 0; idx < linkCount; ++idx ) linkJointViz->visualize( *theLink[idx] ); } } } inStream.endPropertyMessageGroup(); firstArticulation = mBindingData->mArticulations.begin(); for ( ; firstArticulation < lastArticulation; ++firstArticulation ) inStream.setPropertyValue( *firstArticulation, "IsSleeping", (*firstArticulation)->isSleeping() ); } } } template<typename TObjType> struct CollectionOperator { Array<PxU8>& mTempArray; const TObjType& mObject; PvdDataStream& mStream; CollectionOperator( Array<PxU8>& ary, const TObjType& obj, PvdDataStream& stream ) : mTempArray( ary ), mObject( obj ), mStream( stream ) {} void pushName( const char* ) {} void popName() {} template< typename TAccessor > void simpleProperty(PxU32 /*key*/, const TAccessor& ) {} template< typename TAccessor > void flagsProperty(PxU32 /*key*/, const TAccessor&, const PxU32ToName* ) {} template<typename TColType, typename TDataType, typename TCollectionProp > void handleCollection( const TCollectionProp& prop, NamespacedName dtype, PxU32 countMultiplier = 1 ) { PxU32 count = prop.size( &mObject ); mTempArray.resize( count * sizeof( TDataType ) ); TColType* start = reinterpret_cast<TColType*>( mTempArray.begin() ); prop.get( &mObject, start, count * countMultiplier ); mStream.setPropertyValue( &mObject, prop.mName, DataRef<const PxU8>( mTempArray.begin(), mTempArray.size() ), dtype ); } template< PxU32 TKey, typename TObject, typename TColType > void handleCollection( const PxReadOnlyCollectionPropertyInfo<TKey,TObject,TColType>& prop ) { handleCollection<TColType,TColType>( prop, getPvdNamespacedNameForType<TColType>() ); } //Enumerations or bitflags. template< PxU32 TKey, typename TObject, typename TColType > void handleCollection( const PxReadOnlyCollectionPropertyInfo<TKey,TObject,TColType>& prop, const PxU32ToName* ) { PX_COMPILE_TIME_ASSERT( sizeof( TColType ) == sizeof( PxU32 ) ); handleCollection<TColType,PxU32>( prop, getPvdNamespacedNameForType<PxU32>() ); } private: CollectionOperator& operator=(const CollectionOperator&); }; #if PX_USE_CLOTH_API struct PxClothFabricCollectionOperator : CollectionOperator<PxClothFabric> { PxClothFabricCollectionOperator( Array<PxU8>& ary, const PxClothFabric& obj, PvdDataStream& stream ) : CollectionOperator<PxClothFabric>( ary, obj, stream ) {} template< PxU32 TKey, typename TObject, typename TColType > void handleCollection( const PxReadOnlyCollectionPropertyInfo<TKey,TObject,TColType>& prop ) { //CollectionOperator<PxClothFabric>::handleCollection<TColType,TColType>( prop, getPvdNamespacedNameForType<TColType>(), sizeof( TColType ) ); // have to duplicate here because the cloth api expects buffer sizes // in the number of elements, not the number of bytes PxU32 count = prop.size( &mObject ); mTempArray.resize( count * sizeof( TColType ) ); TColType* start = reinterpret_cast<TColType*>( mTempArray.begin() ); prop.get( &mObject, start, count ); mStream.setPropertyValue( &mObject, prop.mName, DataRef<const PxU8>( mTempArray.begin(), mTempArray.size() ), getPvdNamespacedNameForType<TColType>() ); } //Enumerations or bitflags. template< PxU32 TKey, typename TObject, typename TColType > void handleCollection( const PxReadOnlyCollectionPropertyInfo<TKey,TObject,TColType>& prop, const PxU32ToName* ) { PX_COMPILE_TIME_ASSERT( sizeof( TColType ) == sizeof( PxU32 ) ); CollectionOperator<PxClothFabric>::handleCollection<TColType,PxU32>( prop, getPvdNamespacedNameForType<PxU32>() ); } private: PxClothFabricCollectionOperator& operator=(const PxClothFabricCollectionOperator&); }; void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxClothFabric& fabric, const PxPhysics& ownerPhysics ) { inStream.createInstance( &fabric ); addPhysicsGroupProperty( inStream, "ClothFabrics", fabric, ownerPhysics ); sendAllProperties( inStream, fabric ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxClothFabric& fabric ) { PxClothFabricCollectionOperator op( mBindingData->mTempU8Array, fabric, inStream ); visitInstancePvdProperties<PxClothFabric>( op ); PxClothFabricGeneratedValues values( &fabric ); inStream.setPropertyMessage( &fabric, values ); PxU32 count = fabric.getNbPhases(); PxClothFabricPhase* phases = mBindingData->allocateTemp<PxClothFabricPhase>( count ); if ( count ) fabric.getPhases( phases, count ); inStream.setPropertyValue( &fabric, "Phases", DataRef<const PxU8>( (PxU8*)phases, sizeof(PxClothFabricPhase)*count), getPvdNamespacedNameForType<PxClothFabricPhase>() ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxClothFabric& fabric, const PxPhysics& ownerPhysics ) { removePhysicsGroupProperty( inStream, "ClothFabrics", fabric, ownerPhysics ); } void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxCloth& cloth, const PxScene& ownerScene, BufferRegistrar& registrar ) { addSceneGroupProperty( inStream, "Cloths", cloth, ownerScene ); PxClothFabric* fabric = cloth.getFabric(); if ( fabric != NULL ) { registrar.addRef( cloth.getFabric() ); inStream.setPropertyValue( &cloth, "Fabric", (const void*)fabric ); inStream.pushBackObjectRef( fabric, "Cloths", &cloth ); } sendAllProperties( inStream, cloth ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxCloth& cloth ) { sendSimpleProperties( inStream, cloth ); sendParticleAccelerations( inStream, cloth ); sendMotionConstraints( inStream, cloth ); sendCollisionSpheres( inStream, cloth ); sendCollisionSpheres( inStream, cloth, true ); sendCollisionTriangles( inStream, cloth ); sendVirtualParticles( inStream, cloth ); sendSeparationConstraints( inStream, cloth ); sendSelfCollisionIndices( inStream, cloth ); sendRestPositions( inStream, cloth ); } void PvdMetaDataBinding::sendSimpleProperties( PvdDataStream& inStream, const PxCloth& cloth ) { PxClothGeneratedValues values( &cloth ); inStream.setPropertyMessage( &cloth, values ); } void PvdMetaDataBinding::sendMotionConstraints( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 count = cloth.getNbMotionConstraints(); PxClothParticleMotionConstraint* constraints = mBindingData->allocateTemp<PxClothParticleMotionConstraint>( count ); if ( count ) cloth.getMotionConstraints( constraints ); inStream.setPropertyValue( &cloth, "MotionConstraints", mBindingData->tempToRef(), getPvdNamespacedNameForType<PvdPositionAndRadius>() ); } void PvdMetaDataBinding::sendRestPositions( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 count = cloth.getNbRestPositions(); PxVec4* positions = mBindingData->allocateTemp<PxVec4>( count ); if ( count ) cloth.getRestPositions( positions ); inStream.setPropertyValue( &cloth, "RestPositions", mBindingData->tempToRef(), getPvdNamespacedNameForType<PxVec4>() ); } void PvdMetaDataBinding::sendParticleAccelerations( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 count = cloth.getNbParticleAccelerations(); PxVec4* accelerations = mBindingData->allocateTemp<PxVec4>( count ); if ( count ) cloth.getParticleAccelerations( accelerations ); inStream.setPropertyValue( &cloth, "ParticleAccelerations", mBindingData->tempToRef(), getPvdNamespacedNameForType<PxVec4>() ); } void PvdMetaDataBinding::sendSelfCollisionIndices( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 count = cloth.getNbSelfCollisionIndices(); PxU32* selfCollisionIndices = mBindingData->allocateTemp<PxU32>( count ); if ( count ) cloth.getSelfCollisionIndices( selfCollisionIndices ); inStream.setPropertyValue( &cloth, "SelfCollisionIndices", mBindingData->tempToRef(), getPvdNamespacedNameForType<PxU32>() ); } void PvdMetaDataBinding::sendCollisionSpheres( PvdDataStream& inStream, const PxCloth& cloth, bool sendPairs ) { PxU32 numSpheres = cloth.getNbCollisionSpheres(); PxU32 numIndices = 2*cloth.getNbCollisionCapsules(); PxU32 numPlanes = cloth.getNbCollisionPlanes(); PxU32 numConvexes = cloth.getNbCollisionConvexes(); PxU32 numTriangles = cloth.getNbCollisionTriangles(); PxU32 sphereBytes = numSpheres * sizeof( PxClothCollisionSphere ); PxU32 pairBytes = numIndices * sizeof( PxU32 ); PxU32 planesBytes = numPlanes * sizeof(PxClothCollisionPlane); PxU32 convexBytes = numConvexes * sizeof(PxU32); PxU32 triangleBytes = numTriangles * sizeof(PxClothCollisionTriangle); mBindingData->mTempU8Array.resize( sphereBytes + pairBytes + planesBytes + convexBytes + triangleBytes); PxU8* bufferStart = mBindingData->mTempU8Array.begin(); PxClothCollisionSphere* spheresBuffer = reinterpret_cast<PxClothCollisionSphere*>( mBindingData->mTempU8Array.begin() ); PxU32* indexBuffer = reinterpret_cast<PxU32*>(spheresBuffer + numSpheres); PxClothCollisionPlane* planeBuffer = reinterpret_cast<PxClothCollisionPlane*>(indexBuffer + numIndices); PxU32* convexBuffer = reinterpret_cast<PxU32*>(planeBuffer + numPlanes); PxClothCollisionTriangle* trianglesBuffer = reinterpret_cast<PxClothCollisionTriangle*>(convexBuffer + numConvexes); cloth.getCollisionData( spheresBuffer, indexBuffer, planeBuffer, convexBuffer, trianglesBuffer ); inStream.setPropertyValue( &cloth, "CollisionSpheres", DataRef<const PxU8>( bufferStart, sphereBytes ), getPvdNamespacedNameForType<PvdPositionAndRadius>() ); if ( sendPairs ) inStream.setPropertyValue( &cloth, "CollisionSpherePairs", DataRef<const PxU8>( bufferStart + sphereBytes, pairBytes ), getPvdNamespacedNameForType<PxU32>() ); } // begin code to generate triangle mesh from cloth convex planes namespace { PxReal det(PxVec4 v0, PxVec4 v1, PxVec4 v2, PxVec4 v3) { const PxVec3& d0 = reinterpret_cast<const PxVec3&>(v0); const PxVec3& d1 = reinterpret_cast<const PxVec3&>(v1); const PxVec3& d2 = reinterpret_cast<const PxVec3&>(v2); const PxVec3& d3 = reinterpret_cast<const PxVec3&>(v3); return v0.w * d1.cross(d2).dot(d3) - v1.w * d0.cross(d2).dot(d3) + v2.w * d0.cross(d1).dot(d3) - v3.w * d0.cross(d1).dot(d2); } PxVec3 intersect(PxVec4 p0, PxVec4 p1, PxVec4 p2) { const PxVec3& d0 = reinterpret_cast<const PxVec3&>(p0); const PxVec3& d1 = reinterpret_cast<const PxVec3&>(p1); const PxVec3& d2 = reinterpret_cast<const PxVec3&>(p2); return (p0.w * d1.cross(d2) + p1.w * d2.cross(d0) + p2.w * d0.cross(d1)) / d0.dot(d2.cross(d1)); } const PxU16 sInvalid = PxU16(-1); // restriction: only supports a single patch per vertex. struct HalfedgeMesh { struct Halfedge { Halfedge(PxU16 vertex = sInvalid, PxU16 face = sInvalid, PxU16 next = sInvalid, PxU16 prev = sInvalid) : mVertex(vertex), mFace(face), mNext(next), mPrev(prev) {} PxU16 mVertex; // to PxU16 mFace; // left PxU16 mNext; // ccw PxU16 mPrev; // cw }; PxU16 findHalfedge(PxU16 v0, PxU16 v1) { PxU16 h = mVertices[v0], start = h; while(h != sInvalid && mHalfedges[h].mVertex != v1) { h = mHalfedges[PxU32(h ^ 1)].mNext; if(h == start) return sInvalid; } return h; } void connect(PxU16 h0, PxU16 h1) { mHalfedges[h0].mNext = h1; mHalfedges[h1].mPrev = h0; } void addTriangle(PxU16 v0, PxU16 v1, PxU16 v2) { // add new vertices PxU16 n = PxU16(PxMax(v0, PxMax(v1, v2))+1); if(mVertices.size() < n) mVertices.resize(n, sInvalid); // collect halfedges, prev and next of triangle PxU16 verts[] = { v0, v1, v2 }; PxU16 handles[3], prev[3], next[3]; for(PxU16 i=0; i<3; ++i) { PxU16 j = PxU16((i+1)%3); PxU16 h = findHalfedge(verts[i], verts[j]); if(h == sInvalid) { // add new edge h = Ps::to16(mHalfedges.size()); mHalfedges.pushBack(Halfedge(verts[j])); mHalfedges.pushBack(Halfedge(verts[i])); } handles[i] = h; prev[i] = mHalfedges[h].mPrev; next[i] = mHalfedges[h].mNext; } // patch connectivity for(PxU16 i=0; i<3; ++i) { PxU16 j = PxU16((i+1)%3); mHalfedges[handles[i]].mFace = Ps::to16(mFaces.size()); // connect prev and next connect(handles[i], handles[j]); if(next[j] == sInvalid) // new next edge, connect opposite connect(PxU16(handles[j]^1), next[i]!=sInvalid ? next[i] : PxU16(handles[i]^1)); if(prev[i] == sInvalid) // new prev edge, connect opposite connect(prev[j]!=sInvalid ? prev[j] : PxU16(handles[j]^1), PxU16(handles[i]^1)); // prev is boundary, update middle vertex if(mHalfedges[PxU32(handles[i]^1)].mFace == sInvalid) mVertices[verts[j]] = PxU16(handles[i]^1); } mFaces.pushBack(handles[2]); } PxU16 removeTriangle(PxU16 f) { PxU16 result = sInvalid; for(PxU16 i=0, h = mFaces[f]; i<3; ++i) { PxU16 v0 = mHalfedges[PxU32(h^1)].mVertex; PxU16 v1 = mHalfedges[PxU32(h)].mVertex; mHalfedges[h].mFace = sInvalid; if(mHalfedges[PxU32(h^1)].mFace == sInvalid) // was boundary edge, remove { // update halfedge connectivity connect(mHalfedges[h].mPrev, mHalfedges[PxU32(h^1)].mNext); connect(mHalfedges[PxU32(h^1)].mPrev, mHalfedges[h].mNext); // update vertex boundary or delete mVertices[v0] = mVertices[v0] == h ? sInvalid : mHalfedges[PxU32(h^1)].mNext; mVertices[v1] = mVertices[v1] == (h^1) ? sInvalid : mHalfedges[h].mNext; } else { mVertices[v0] = h; // update vertex boundary result = v1; } h = mHalfedges[h].mNext; } mFaces[f] = sInvalid; return result; } // true if vertex v is in front of face f bool visible(PxU16 v, PxU16 f) { PxU16 h = mFaces[f]; if(h == sInvalid) return false; PxU16 v0 = mHalfedges[h].mVertex; h = mHalfedges[h].mNext; PxU16 v1 = mHalfedges[h].mVertex; h = mHalfedges[h].mNext; PxU16 v2 = mHalfedges[h].mVertex; h = mHalfedges[h].mNext; return det(mPoints[v], mPoints[v0], mPoints[v1], mPoints[v2]) < 0.0f; } shdfnd::Array<Halfedge> mHalfedges; shdfnd::Array<PxU16> mVertices; // vertex -> (boundary) halfedge shdfnd::Array<PxU16> mFaces; // face -> halfedge shdfnd::Array<PxVec4> mPoints; }; } struct ConvexMeshBuilder { ConvexMeshBuilder(const PxVec4* planes) : mPlanes(planes) {} void operator()(PxU32 mask, float scale=1.0f); const PxVec4* mPlanes; shdfnd::Array<PxVec3> mVertices; shdfnd::Array<PxU16> mIndices; }; void ConvexMeshBuilder::operator()(PxU32 planeMask, float scale) { PxU16 numPlanes = Ps::to16(shdfnd::bitCount(planeMask)); if (numPlanes == 1) { PxTransform t = PxTransformFromPlaneEquation(reinterpret_cast<const PxPlane&>(mPlanes[lowestSetBit(planeMask)])); if (!t.isValid()) return; const PxU16 indices[] = { 0, 1, 2, 0, 2, 3 }; const PxVec3 vertices[] = { PxVec3(0.0f, scale, scale), PxVec3(0.0f, -scale, scale), PxVec3(0.0f, -scale, -scale), PxVec3(0.0f, scale, -scale) }; PxU16 baseIndex = Ps::to16(mVertices.size()); for (PxU32 i=0; i < 4; ++i) mVertices.pushBack(t.transform(vertices[i])); for (PxU32 i=0; i < 6; ++i) mIndices.pushBack(PxU16(indices[i] + baseIndex)); return; } if(numPlanes < 4) return; // todo: handle degenerate cases HalfedgeMesh mesh; // gather points (planes, that is) mesh.mPoints.reserve(numPlanes); for(; planeMask; planeMask &= planeMask-1) mesh.mPoints.pushBack(mPlanes[shdfnd::lowestSetBit(planeMask)]); // initialize to tetrahedron mesh.addTriangle(0, 1, 2); mesh.addTriangle(0, 3, 1); mesh.addTriangle(1, 3, 2); mesh.addTriangle(2, 3, 0); // flip if inside-out if(mesh.visible(3, 0)) shdfnd::swap(mesh.mPoints[0], mesh.mPoints[1]); // iterate through remaining points for(PxU16 i=4; i<mesh.mPoints.size(); ++i) { // remove any visible triangle PxU16 v0 = sInvalid; for(PxU16 j=0; j<mesh.mFaces.size(); ++j) { if(mesh.visible(i, j)) v0 = PxMin(v0, mesh.removeTriangle(j)); } if(v0 == sInvalid) continue; // tesselate hole PxU16 start = v0; do { PxU16 h = mesh.mVertices[v0]; PxU16 v1 = mesh.mHalfedges[h].mVertex; mesh.addTriangle(v0, v1, i); v0 = v1; } while(v0 != start); } // convert triangles to vertices (intersection of 3 planes) shdfnd::Array<PxU32> face2Vertex(mesh.mFaces.size()); for(PxU32 i=0; i<mesh.mFaces.size(); ++i) { face2Vertex[i] = mVertices.size(); PxU16 h = mesh.mFaces[i]; if(h == sInvalid) continue; PxU16 v0 = mesh.mHalfedges[h].mVertex; h = mesh.mHalfedges[h].mNext; PxU16 v1 = mesh.mHalfedges[h].mVertex; h = mesh.mHalfedges[h].mNext; PxU16 v2 = mesh.mHalfedges[h].mVertex; mVertices.pushBack(intersect(mesh.mPoints[v0], mesh.mPoints[v1], mesh.mPoints[v2])); } // convert vertices to polygons (face one-ring) for(PxU32 i=0; i<mesh.mVertices.size(); ++i) { PxU16 h = mesh.mVertices[i]; if(h == sInvalid) continue; PxU16 v0 = Ps::to16(face2Vertex[mesh.mHalfedges[h].mFace]); h = PxU16(mesh.mHalfedges[h].mPrev^1); PxU16 v1 = Ps::to16(face2Vertex[mesh.mHalfedges[h].mFace]); while(true) { h = PxU16(mesh.mHalfedges[h].mPrev^1); PxU16 v2 = Ps::to16(face2Vertex[mesh.mHalfedges[h].mFace]); if(v0 == v2) break; mIndices.pushBack(v0); mIndices.pushBack(v2); mIndices.pushBack(v1); v1 = v2; } } } void PvdMetaDataBinding::sendCollisionTriangles( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 numSpheres = cloth.getNbCollisionSpheres(); PxU32 numIndices = 2*cloth.getNbCollisionCapsules(); PxU32 numPlanes = cloth.getNbCollisionPlanes(); PxU32 numConvexes = cloth.getNbCollisionConvexes(); PxU32 numTriangles = cloth.getNbCollisionTriangles(); PxU32 sphereBytes = numSpheres * sizeof( PxClothCollisionSphere ); PxU32 pairBytes = numIndices * sizeof( PxU32 ); PxU32 planesBytes = numPlanes * sizeof(PxClothCollisionPlane); PxU32 convexBytes = numConvexes * sizeof(PxU32); PxU32 triangleBytes = numTriangles * sizeof(PxClothCollisionTriangle); mBindingData->mTempU8Array.resize( sphereBytes + pairBytes + planesBytes + convexBytes + triangleBytes); PxU8* bufferStart = mBindingData->mTempU8Array.begin(); PxClothCollisionSphere* spheresBuffer = reinterpret_cast<PxClothCollisionSphere*>( mBindingData->mTempU8Array.begin() ); PxU32* indexBuffer = reinterpret_cast<PxU32*>(spheresBuffer + numSpheres); PxClothCollisionPlane* planeBuffer = reinterpret_cast<PxClothCollisionPlane*>(indexBuffer + numIndices); PxU32* convexBuffer = reinterpret_cast<PxU32*>(planeBuffer + numPlanes); PxClothCollisionTriangle* trianglesBuffer = reinterpret_cast<PxClothCollisionTriangle*>(convexBuffer + numConvexes); cloth.getCollisionData( spheresBuffer, indexBuffer, planeBuffer, convexBuffer, trianglesBuffer ); inStream.setPropertyValue( &cloth, "CollisionPlanes", DataRef<const PxU8>(bufferStart + sphereBytes + pairBytes, planesBytes), getPvdNamespacedNameForType<PvdPositionAndRadius>() ); inStream.setPropertyValue( &cloth, "CollisionConvexMasks", DataRef<const PxU8>(bufferStart + sphereBytes + pairBytes + planesBytes, convexBytes), getPvdNamespacedNameForType<PxU32>() ); inStream.setPropertyValue( &cloth, "CollisionTriangles", DataRef<const PxU8>(bufferStart + sphereBytes + pairBytes + planesBytes + convexBytes, triangleBytes), getPvdNamespacedNameForType<PxVec3>() ); } void PvdMetaDataBinding::sendVirtualParticles( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 numParticles = cloth.getNbVirtualParticles(); PxU32 numWeights = cloth.getNbVirtualParticleWeights(); PxU32 numIndexes = numParticles * 4; PxU32 numIndexBytes = numIndexes * sizeof( PxU32 ); PxU32 numWeightBytes = numWeights * sizeof( PxVec3 ); mBindingData->mTempU8Array.resize( PxMax( numIndexBytes, numWeightBytes ) ); PxU8* dataStart = mBindingData->mTempU8Array.begin(); PxU32* indexStart = reinterpret_cast<PxU32*>( dataStart ); if (numIndexes) cloth.getVirtualParticles( indexStart ); inStream.setPropertyValue( &cloth, "VirtualParticles", DataRef<const PxU8>( dataStart, numIndexBytes ), getPvdNamespacedNameForType<PxU32>() ); PxVec3* weightStart = reinterpret_cast<PxVec3*>( dataStart ); if (numWeights) cloth.getVirtualParticleWeights( weightStart ); inStream.setPropertyValue( &cloth, "VirtualParticleWeights", DataRef<const PxU8>( dataStart, numWeightBytes ), getPvdNamespacedNameForType<PxVec3>() ); } void PvdMetaDataBinding::sendSeparationConstraints( PvdDataStream& inStream, const PxCloth& cloth ) { PxU32 count = cloth.getNbSeparationConstraints(); PxU32 byteSize = count * sizeof(PxClothParticleSeparationConstraint); mBindingData->mTempU8Array.resize( byteSize ); if ( count ) cloth.getSeparationConstraints( reinterpret_cast<PxClothParticleSeparationConstraint*>( mBindingData->mTempU8Array.begin() ) ); inStream.setPropertyValue( &cloth, "SeparationConstraints", mBindingData->tempToRef(), getPvdNamespacedNameForType<PvdPositionAndRadius>() ); } #endif // PX_USE_CLOTH_API //per frame update #if PX_USE_CLOTH_API void PvdMetaDataBinding::updateCloths( PvdDataStream& inStream, const PxScene& inScene ) { PxU32 actorCount = inScene.getNbActors( PxActorTypeFlag::eCLOTH ); if ( actorCount == 0 ) return; mBindingData->mActors.resize( actorCount ); PxActor** theActors = mBindingData->mActors.begin(); inScene.getActors( PxActorTypeFlag::eCLOTH, theActors, actorCount ); PX_COMPILE_TIME_ASSERT( sizeof( PxClothParticle ) == sizeof( PxVec3 ) + sizeof( PxF32 ) ); for ( PxU32 idx =0; idx < actorCount; ++idx ) { PxCloth* theCloth = static_cast<PxCloth*>( theActors[idx] ); bool isSleeping = theCloth->isSleeping(); bool wasSleeping = mBindingData->mSleepingActors.contains( theCloth ); if ( isSleeping == false || isSleeping != wasSleeping ) { PxClothParticleData* theData = theCloth->lockParticleData(); if ( theData != NULL ) { PxU32 numBytes = sizeof( PxClothParticle ) * theCloth->getNbParticles(); inStream.setPropertyValue( theCloth, "ParticleBuffer", DataRef<const PxU8>( reinterpret_cast<const PxU8*>( theData->particles ), numBytes ), getPvdNamespacedNameForType<PxClothParticle>() ); theData->unlock(); } } if ( isSleeping != wasSleeping ) { inStream.setPropertyValue( theCloth, "IsSleeping", isSleeping ); if ( isSleeping ) mBindingData->mSleepingActors.insert( theActors[idx] ); else mBindingData->mSleepingActors.erase( theActors[idx] ); } } } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxCloth& cloth, const PxScene& ownerScene ) { PxClothFabric* fabric = cloth.getFabric(); if ( fabric ) inStream.removeObjectRef( fabric, "Cloths", &cloth ); removeSceneGroupProperty( inStream, "Cloths", cloth, ownerScene ); } #endif // PX_USE_CLOTH_API #define ENABLE_AGGREGATE_PVD_SUPPORT 1 #ifdef ENABLE_AGGREGATE_PVD_SUPPORT void PvdMetaDataBinding::createInstance( PvdDataStream& inStream, const PxAggregate& inObj, const PxScene& ownerScene, BufferRegistrar& ) { addSceneGroupProperty( inStream, "Aggregates", inObj, ownerScene ); sendAllProperties( inStream, inObj ); } void PvdMetaDataBinding::sendAllProperties( PvdDataStream& inStream, const PxAggregate& inObj ) { PxAggregateGeneratedValues values( &inObj ); inStream.setPropertyMessage( &inObj, values ); } void PvdMetaDataBinding::destroyInstance( PvdDataStream& inStream, const PxAggregate& inObj, const PxScene& ownerScene ) { removeSceneGroupProperty( inStream, "Aggregates", inObj, ownerScene ); } template<bool bPushBack> class ChangeOjectRefCmd : public PvdDataStream::PvdCommand { ChangeOjectRefCmd &operator=(const ChangeOjectRefCmd&) { PX_ASSERT(0); return *this; } //PX_NOCOPY doesn't work for local classes public: const void* instance; String propName; const void* propObj; ChangeOjectRefCmd(const void* inInst, String inName, const void* inObj):instance(inInst), propName(inName), propObj(inObj) { } //Assigned is needed for copying ChangeOjectRefCmd(const ChangeOjectRefCmd& other) :instance(other.instance), propName(other.propName), propObj(other.propObj) { } virtual bool canRun(PvdInstanceDataStream &inStream ) { PX_ASSERT(inStream.isInstanceValid(instance)); return inStream.isInstanceValid(propObj); } virtual void run( PvdInstanceDataStream &inStream ) { if(!inStream.isInstanceValid(instance)) return; if(bPushBack) { if(inStream.isInstanceValid(propObj)) inStream.pushBackObjectRef( instance, propName, propObj ); } else { //the called function will assert if propobj is already removed inStream.removeObjectRef( instance, propName, propObj ); } } }; template<class Command> void changeAggregateSubActors( PvdDataStream& inStream, const PxAggregate& inObj, const PxActor& inActor ) { const PxArticulationLink* link = inActor.isArticulationLink(); String propName = NULL; const void* object = NULL; if( link == NULL ) { propName = "Actors"; object = &inActor; } else if( link->getInboundJoint() == NULL) { propName = "Articulations"; object = &link->getArticulation(); } else return; Command* cmd = PX_PLACEMENT_NEW(inStream.allocateMemForCmd(sizeof(Command)), Command)( &inObj, propName, object); if(cmd->canRun( inStream )) cmd->run( inStream ); else inStream.pushPvdCommand( *cmd ); } void PvdMetaDataBinding::detachAggregateActor( PvdDataStream& inStream, const PxAggregate& inObj, const PxActor& inActor ) { typedef ChangeOjectRefCmd<false> RemoveOjectRefCmd; changeAggregateSubActors< RemoveOjectRefCmd >(inStream, inObj, inActor); } void PvdMetaDataBinding::attachAggregateActor( PvdDataStream& inStream, const PxAggregate& inObj, const PxActor& inActor ) { typedef ChangeOjectRefCmd<true> PushbackOjectRefCmd; changeAggregateSubActors< PushbackOjectRefCmd >(inStream, inObj, inActor); } #else void PvdMetaDataBinding::createInstance( PvdDataStream&, const PxAggregate&, const PxScene&, BufferRegistrar& ) {} void PvdMetaDataBinding::sendAllProperties( PvdDataStream&, const PxAggregate& ) {} void PvdMetaDataBinding::destroyInstance( PvdDataStream&, const PxAggregate&, const PxScene& ) {} void PvdMetaDataBinding::detachAggregateActor( PvdDataStream&, const PxAggregate&, const PxActor& ) {} void PvdMetaDataBinding::attachAggregateActor( PvdDataStream&, const PxAggregate&, const PxActor& ) {} #endif template <typename TDataType> void sendSceneArray( PvdDataStream& inStream, const PxScene& inScene, const Ps::Array<TDataType>& inArray, const char* propName ) { if ( 0 == inArray.size() ) inStream.setPropertyValue( &inScene, propName, DataRef<const PxU8>(), getPvdNamespacedNameForType<TDataType>() ); else { ScopedPropertyValueSender<TDataType, 32> sender( inStream, &inScene, propName ); for ( PxU32 i = 0; i < inArray.size(); ++i ) sender.append( inArray[i] ); } } void sendSceneArray( PvdDataStream& inStream, const PxScene& inScene, const Ps::Array<PvdSqHit>& inArray, const char* propName ) { if ( 0 == inArray.size() ) inStream.setPropertyValue( &inScene, propName, DataRef<const PxU8>(), getPvdNamespacedNameForType<PvdSqHit>() ); else { ScopedPropertyValueSender<PvdSqHit, 32> sender( inStream, &inScene, propName ); for ( PxU32 i = 0; i < inArray.size(); ++i ) { if(!inStream.isInstanceValid(inArray[i].shape) || !inStream.isInstanceValid(inArray[i].actor)) { PvdSqHit hit = inArray[i]; hit.shape = NULL; hit.actor = NULL; sender.append( hit ); } else sender.append( inArray[i] ); } } } void PvdMetaDataBinding::sendSceneQueries( PvdDataStream& inStream, const PxScene& inScene, bool hasValue ) { if(!hasValue) return; const physx::NpScene& scene = static_cast<const NpScene&>(inScene); VisualDebugger& sdkPvd = *static_cast<VisualDebugger*>(NpPhysics::getInstance().getVisualDebugger()); for( PxU32 i = 0; i < 2; i++ ) { PvdSceneQueryCollector& collector = ((i==0)? scene.getSingleSqCollector():scene.getBatchedSqCollector()); Ps::Mutex::ScopedLock lock(collector.getLock()); String propName = collector.getArrayName(collector.mPvdSqHits); sendSceneArray( inStream, inScene, collector.mPvdSqHits, propName ); propName = collector.getArrayName(collector.mPoses); sendSceneArray( inStream, inScene, collector.mPoses, propName ); propName = collector.getArrayName(collector.mFilterData); sendSceneArray( inStream, inScene, collector.mFilterData, propName ); const Ps::Array<PxGeometryHolder>& geometriesToDestroy = collector.getPrevFrameGeometries(); propName = collector.getArrayName(geometriesToDestroy); for (PxU32 k = 0; k < geometriesToDestroy.size(); ++k) { const PxGeometryHolder& inObj = geometriesToDestroy[k]; inStream.removeObjectRef( &inScene, propName, &inObj ); inStream.destroyInstance( &inObj ); } const Ps::Array<PxGeometryHolder>& geometriesToCreate = collector.getCurrentFrameGeometries(); for (PxU32 k = 0; k < geometriesToCreate.size(); ++k) { const PxGeometry& geometry = geometriesToCreate[k].any(); switch ( geometry.getType() ) { #define SEND_PVD_GEOM_TYPE( enumType, TGeomType, TValueType ) \ case enumType: \ { \ const TGeomType& inObj = static_cast<const TGeomType&>( geometry ); \ inStream.createInstance( getPvdNamespacedNameForType<TGeomType>(), &inObj ); \ GeometryBufferRegisterOp<TGeomType>().registerBuffers( inObj, sdkPvd ); \ TValueType values( &inObj ); \ inStream.setPropertyMessage( &inObj, values ); \ inStream.pushBackObjectRef( &inScene, propName, &inObj ); \ } \ break; SEND_PVD_GEOM_TYPE( PxGeometryType::eBOX, PxBoxGeometry, PxBoxGeometryGeneratedValues ) SEND_PVD_GEOM_TYPE( PxGeometryType::eSPHERE, PxSphereGeometry, PxSphereGeometryGeneratedValues ) SEND_PVD_GEOM_TYPE( PxGeometryType::eCAPSULE, PxCapsuleGeometry, PxCapsuleGeometryGeneratedValues ) SEND_PVD_GEOM_TYPE( PxGeometryType::eCONVEXMESH, PxConvexMeshGeometry, PxConvexMeshGeometryGeneratedValues ) #undef SEND_PVD_GEOM_TYPE case PxGeometryType::ePLANE: case PxGeometryType::eTRIANGLEMESH: case PxGeometryType::eHEIGHTFIELD: case PxGeometryType::eGEOMETRY_COUNT: case PxGeometryType::eINVALID: default: PX_ALWAYS_ASSERT_MESSAGE( "unsupported scene query geometry type" ); break; } } collector.prepareNextFrameGeometries(); propName = collector.getArrayName(collector.mAccumulatedRaycastQueries); sendSceneArray( inStream, inScene, collector.mAccumulatedRaycastQueries, propName ); propName = collector.getArrayName(collector.mAccumulatedOverlapQueries); sendSceneArray( inStream, inScene, collector.mAccumulatedOverlapQueries, propName ); propName = collector.getArrayName(collector.mAccumulatedSweepQueries); sendSceneArray( inStream, inScene, collector.mAccumulatedSweepQueries, propName ); } } }} #endif
nettooe/aweip
aweip-base/src/com/aweip/entity/ComentArquivoIdeiaEntity.java
<reponame>nettooe/aweip package com.aweip.entity; import javax.persistence.Access; import javax.persistence.AccessType; import javax.persistence.Entity; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Table; /** * The Class ComentArquivoIdeiaEntity. */ @Entity @Table(name = "comentarquivoideia") @Access(AccessType.FIELD) @NamedQueries(@NamedQuery(name = ComentArquivoIdeiaEntity.findByArquivoIdeia, query = "Select obj" + " from ComentArquivoIdeiaEntity obj" + " JOIN obj.arquivoIdeia" + " JOIN FETCH obj.comentario obj2" + " JOIN FETCH obj2.usuario obj3" + " where obj.arquivoIdeia = :arquivoIdeia" + " AND obj.dataExclusao IS NULL")) public class ComentArquivoIdeiaEntity extends ComentArquivoIdeia { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 1L; /** The Constant findByArquivoIdeia. */ public static final String findByArquivoIdeia = "ComentArquivoIdeia.findByArquivoIdeia"; }
lehaSVV2009/territories-frontend
src/PlayersControls/index.js
<reponame>lehaSVV2009/territories-frontend import React from "react"; import intl from "react-intl-universal"; import * as GameUtils from "territories-core"; import Button from "../libs/territories-ui/Button"; import { Container, Item } from "../libs/territories-ui/Grid"; import DicesWindow from "../DicesWindow"; import IconButton from "../libs/territories-ui/IconButton"; import Player from "../Player"; import Rectangle from "../libs/territories-ui/Rectangle"; import RotateIcon from "../libs/territories-icons/Rotate"; import Tooltip from "../libs/territories-ui/Tooltip"; import { FixedSizePaper, FullHeightContainer } from "./elements"; const PlayersControls = ({ cellRadius, currentPlayer, dices, rollingDices, allCellsCount, occupiedCounters, readOnly, onStartRollDices, onFinishRollDices, onRotateRectangle, onSkipTurn }) => ( <Container> <Item> <Player player={GameUtils.PLAYER_1} isCurrent={GameUtils.isPlayer1(currentPlayer)} allCellsCount={allCellsCount} playerCellsCount={occupiedCounters[GameUtils.PLAYER_1]} readOnly={readOnly} onSkipTurn={onSkipTurn} /> </Item> <Item center> <div> <FixedSizePaper height={`${cellRadius * 25}px`} width={`${cellRadius * 25}px`} > {dices && dices[0] !== 0 ? ( <FullHeightContainer column center alignItems="center"> <Item> <Tooltip title="Rotate rectangle"> <IconButton disabled={readOnly} size="small" onClick={onRotateRectangle} > <RotateIcon /> </IconButton> </Tooltip> </Item> <Item center> <Rectangle rows={Array(dices[0]).fill(Array(dices[1]).fill())} cellRadius={cellRadius} /> </Item> </FullHeightContainer> ) : ( <FullHeightContainer column center alignItems="center"> <Button disabled={readOnly} color={ GameUtils.isPlayer1(currentPlayer) ? "primary" : "secondary" } size="large" variant="contained" onClick={onStartRollDices} > {intl.get("player_controls.roll_dices")} </Button> </FullHeightContainer> )} </FixedSizePaper> <DicesWindow rollingDices={rollingDices} onFinishRoll={onFinishRollDices} /> </div> </Item> <Item> <Player player={GameUtils.PLAYER_2} isCurrent={GameUtils.isPlayer2(currentPlayer)} allCellsCount={allCellsCount} playerCellsCount={occupiedCounters[GameUtils.PLAYER_2]} readOnly={readOnly} onSkipTurn={onSkipTurn} /> </Item> </Container> ); export default PlayersControls;
Shashi-rk/azure-sdk-for-java
sdk/security/azure-resourcemanager-security/src/main/java/com/azure/resourcemanager/security/implementation/IotAlertsClientImpl.java
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.security.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.security.fluent.IotAlertsClient; import com.azure.resourcemanager.security.fluent.models.IotAlertModelInner; import com.azure.resourcemanager.security.models.IotAlertListModel; import com.azure.resourcemanager.security.models.ManagementState; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in IotAlertsClient. */ public final class IotAlertsClientImpl implements IotAlertsClient { private final ClientLogger logger = new ClientLogger(IotAlertsClientImpl.class); /** The proxy service used to perform REST calls. */ private final IotAlertsService service; /** The service client containing this operation class. */ private final SecurityCenterImpl client; /** * Initializes an instance of IotAlertsClientImpl. * * @param client the instance of the service client containing this operation class. */ IotAlertsClientImpl(SecurityCenterImpl client) { this.service = RestProxy.create(IotAlertsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for SecurityCenterIotAlerts to be used by the proxy service to perform * REST calls. */ @Host("{$host}") @ServiceInterface(name = "SecurityCenterIotAle") private interface IotAlertsService { @Headers({"Content-Type: application/json"}) @Get("/{scope}/providers/Microsoft.Security/iotAlerts") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<IotAlertListModel>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam(value = "scope", encoded = true) String scope, @QueryParam("startTimeUtc>") String minStartTimeUtc, @QueryParam("startTimeUtc<") String maxStartTimeUtc, @QueryParam("alertType") String alertType, @QueryParam("deviceManagementType") ManagementState deviceManagementType, @QueryParam("compromisedEntity") String compromisedEntity, @QueryParam("$limit") Integer limit, @QueryParam("$skipToken") String skipToken, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/{scope}/providers/Microsoft.Security/iotAlerts/{iotAlertId}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<IotAlertModelInner>> get( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam(value = "scope", encoded = true) String scope, @PathParam("iotAlertId") String iotAlertId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<IotAlertListModel>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param minStartTimeUtc Filter by minimum startTimeUtc (ISO 8601 format). * @param maxStartTimeUtc Filter by maximum startTimeUtc (ISO 8601 format). * @param alertType Filter by alert type. * @param deviceManagementType Get devices only from specific type, Managed or Unmanaged. * @param compromisedEntity Filter by compromised device. * @param limit Limit the number of items returned in a single page. * @param skipToken Skip token used for pagination. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<IotAlertModelInner>> listSinglePageAsync( String scope, String minStartTimeUtc, String maxStartTimeUtc, String alertType, ManagementState deviceManagementType, String compromisedEntity, Integer limit, String skipToken) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String apiVersion = "2020-08-06-preview"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), apiVersion, scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken, accept, context)) .<PagedResponse<IotAlertModelInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param minStartTimeUtc Filter by minimum startTimeUtc (ISO 8601 format). * @param maxStartTimeUtc Filter by maximum startTimeUtc (ISO 8601 format). * @param alertType Filter by alert type. * @param deviceManagementType Get devices only from specific type, Managed or Unmanaged. * @param compromisedEntity Filter by compromised device. * @param limit Limit the number of items returned in a single page. * @param skipToken Skip token used for pagination. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<IotAlertModelInner>> listSinglePageAsync( String scope, String minStartTimeUtc, String maxStartTimeUtc, String alertType, ManagementState deviceManagementType, String compromisedEntity, Integer limit, String skipToken, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String apiVersion = "2020-08-06-preview"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), apiVersion, scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param minStartTimeUtc Filter by minimum startTimeUtc (ISO 8601 format). * @param maxStartTimeUtc Filter by maximum startTimeUtc (ISO 8601 format). * @param alertType Filter by alert type. * @param deviceManagementType Get devices only from specific type, Managed or Unmanaged. * @param compromisedEntity Filter by compromised device. * @param limit Limit the number of items returned in a single page. * @param skipToken Skip token used for pagination. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<IotAlertModelInner> listAsync( String scope, String minStartTimeUtc, String maxStartTimeUtc, String alertType, ManagementState deviceManagementType, String compromisedEntity, Integer limit, String skipToken) { return new PagedFlux<>( () -> listSinglePageAsync( scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken), nextLink -> listNextSinglePageAsync(nextLink)); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<IotAlertModelInner> listAsync(String scope) { final String minStartTimeUtc = null; final String maxStartTimeUtc = null; final String alertType = null; final ManagementState deviceManagementType = null; final String compromisedEntity = null; final Integer limit = null; final String skipToken = null; return new PagedFlux<>( () -> listSinglePageAsync( scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken), nextLink -> listNextSinglePageAsync(nextLink)); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param minStartTimeUtc Filter by minimum startTimeUtc (ISO 8601 format). * @param maxStartTimeUtc Filter by maximum startTimeUtc (ISO 8601 format). * @param alertType Filter by alert type. * @param deviceManagementType Get devices only from specific type, Managed or Unmanaged. * @param compromisedEntity Filter by compromised device. * @param limit Limit the number of items returned in a single page. * @param skipToken Skip token used for pagination. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<IotAlertModelInner> listAsync( String scope, String minStartTimeUtc, String maxStartTimeUtc, String alertType, ManagementState deviceManagementType, String compromisedEntity, Integer limit, String skipToken, Context context) { return new PagedFlux<>( () -> listSinglePageAsync( scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken, context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<IotAlertModelInner> list(String scope) { final String minStartTimeUtc = null; final String maxStartTimeUtc = null; final String alertType = null; final ManagementState deviceManagementType = null; final String compromisedEntity = null; final Integer limit = null; final String skipToken = null; return new PagedIterable<>( listAsync( scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken)); } /** * List IoT alerts. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param minStartTimeUtc Filter by minimum startTimeUtc (ISO 8601 format). * @param maxStartTimeUtc Filter by maximum startTimeUtc (ISO 8601 format). * @param alertType Filter by alert type. * @param deviceManagementType Get devices only from specific type, Managed or Unmanaged. * @param compromisedEntity Filter by compromised device. * @param limit Limit the number of items returned in a single page. * @param skipToken Skip token used for pagination. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<IotAlertModelInner> list( String scope, String minStartTimeUtc, String maxStartTimeUtc, String alertType, ManagementState deviceManagementType, String compromisedEntity, Integer limit, String skipToken, Context context) { return new PagedIterable<>( listAsync( scope, minStartTimeUtc, maxStartTimeUtc, alertType, deviceManagementType, compromisedEntity, limit, skipToken, context)); } /** * Get IoT alert. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param iotAlertId Id of the alert. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return ioT alert. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<IotAlertModelInner>> getWithResponseAsync(String scope, String iotAlertId) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } if (iotAlertId == null) { return Mono.error(new IllegalArgumentException("Parameter iotAlertId is required and cannot be null.")); } final String apiVersion = "2020-08-06-preview"; final String accept = "application/json"; return FluxUtil .withContext( context -> service.get(this.client.getEndpoint(), apiVersion, scope, iotAlertId, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get IoT alert. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param iotAlertId Id of the alert. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return ioT alert. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<IotAlertModelInner>> getWithResponseAsync(String scope, String iotAlertId, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } if (iotAlertId == null) { return Mono.error(new IllegalArgumentException("Parameter iotAlertId is required and cannot be null.")); } final String apiVersion = "2020-08-06-preview"; final String accept = "application/json"; context = this.client.mergeContext(context); return service.get(this.client.getEndpoint(), apiVersion, scope, iotAlertId, accept, context); } /** * Get IoT alert. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param iotAlertId Id of the alert. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return ioT alert. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<IotAlertModelInner> getAsync(String scope, String iotAlertId) { return getWithResponseAsync(scope, iotAlertId) .flatMap( (Response<IotAlertModelInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get IoT alert. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param iotAlertId Id of the alert. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return ioT alert. */ @ServiceMethod(returns = ReturnType.SINGLE) public IotAlertModelInner get(String scope, String iotAlertId) { return getAsync(scope, iotAlertId).block(); } /** * Get IoT alert. * * @param scope Scope of the query: Subscription (i.e. /subscriptions/{subscriptionId}) or IoT Hub (i.e. * /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Devices/iotHubs/{iotHubName}). * @param iotAlertId Id of the alert. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return ioT alert. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<IotAlertModelInner> getWithResponse(String scope, String iotAlertId, Context context) { return getWithResponseAsync(scope, iotAlertId, context).block(); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<IotAlertModelInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<IotAlertModelInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of IoT alerts. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<IotAlertModelInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
selva-ror/trado
spec/models/address_spec.rb
# == Schema Information # # Table name: addresses # # id :integer not null, primary key # first_name :string # last_name :string # company :string # address :string # city :string # county :string # postcode :string # country :string # telephone :string # active :boolean default(TRUE) # default :boolean default(FALSE) # created_at :datetime not null # updated_at :datetime not null # addressable_id :integer # addressable_type :string # order_id :integer # require 'rails_helper' describe Address do # ActiveRecord relations it { expect(subject).to belong_to(:order) } it { expect(subject).to belong_to(:addressable) } # Validations it { expect(create(:address)).to validate_presence_of(:first_name) } it { expect(create(:address)).to validate_presence_of(:last_name) } it { expect(create(:address)).to validate_presence_of(:address) } it { expect(create(:address)).to validate_presence_of(:city) } it { expect(create(:address)).to validate_presence_of(:postcode) } describe "When displaying an address" do let!(:address) { create(:address, first_name: 'John', last_name: 'Doe') } it "should return a contact's full name as a string" do expect(address.full_name).to eq '<NAME>' end end describe "When display a full address" do let!(:address) { create(:address, first_name: 'John', last_name: 'Doe', address: '12 New St', telephone: '0123712963872') } let!(:full_address_hash) { { name: '<NAME>', address1: '12 New St', city: address.city, zip: address.postcode, state: address.county, country: address.country.alpha_two_code, telephone: '0123712963872' } } it "should return a hash of the full address" do expect(address.full_address).to eq full_address_hash end end end
phatblat/macOSPrivateFrameworks
PrivateFrameworks/PhotosGraph/PGGraphSocialGroupNode.h
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import <PhotosGraph/PGGraphNode.h> @class NSSet; @interface PGGraphSocialGroupNode : PGGraphNode { } + (id)sortedSocialGroupNodeForMomentNodes:(id)arg1; + (id)weightSortDescriptors; - (void)enumeratePersonNodesUsingBlock:(CDUnknownBlockType)arg1; - (void)enumerateMomentNodesUsingBlock:(CDUnknownBlockType)arg1; @property(readonly, nonatomic) NSSet *personNodes; @property(readonly, nonatomic) NSSet *momentNodes; @property(readonly, nonatomic) unsigned long long numberOfMomentNodes; @property(readonly, nonatomic) unsigned long long rank; @end
rajat2502/design-system
packages/core/test/Step.js
<filename>packages/core/test/Step.js<gh_stars>1-10 import React from 'react' import Step from '../src/Step' const hasIcon = component => { const { container } = renderWithTheme(component) return container.querySelector('svg') } describe('Step', () => { test('renders without crashing', () => { const json = rendererCreateWithTheme(<Step>Step</Step>).toJSON() expect(json).toMatchSnapshot() }) test('renders active step', () => { const component = <Step active>Step</Step> const json = rendererCreateWithTheme(component).toJSON() expect(json).toMatchSnapshot() expect(json).toHaveStyleRule('cursor', 'default') expect(hasIcon(component)).toBeFalsy() }) test('renders completed step', () => { const component = <Step completed>Step</Step> const json = rendererCreateWithTheme(component).toJSON() expect(json).toMatchSnapshot() expect(json).toHaveStyleRule('cursor', 'default') expect(hasIcon(component)).toBeTruthy() }) test('renders as active and completed', () => { const component = ( <Step active completed> Step </Step> ) const json = rendererCreateWithTheme(component).toJSON() expect(json).toMatchSnapshot() expect(json).toHaveStyleRule('cursor', 'default') expect(hasIcon(component)).toBeTruthy() }) test('renders with on click', () => { const component = <Step onClick={() => {}}>Step</Step> const json = rendererCreateWithTheme(component).toJSON() expect(json).toMatchSnapshot() expect(json).toHaveStyleRule('cursor', 'pointer') expect(hasIcon(component)).toBeFalsy() }) })
willcassella/SinGE
Systems/BulletPhysics/private/DebugDrawer.h
<filename>Systems/BulletPhysics/private/DebugDrawer.h<gh_stars>1-10 // DebugDrawer.h #pragma once #include <Core/Math/Vec3.h> #include <Engine/Util/DebugDraw.h> #include <btBulletCollisionCommon.h> namespace sge { namespace bullet_physics { class DebugDrawer final : public btIDebugDraw { /////////////////// /// Methods /// public: void drawLine( const btVector3& from, const btVector3& to, const btVector3& color) override; void drawContactPoint( const btVector3& pointOnB, const btVector3& normalOnB, btScalar distance, int lifeTime, const btVector3& color) override; void reportErrorWarning( const char* warningString) override; void draw3dText( const btVector3& location, const char* textString) override; void setDebugMode( int debugMode) override; int getDebugMode() const override; ////////////////// /// Fields /// public: std::vector<DebugLine> lines; }; } }
0003088/libelektra-qt-gui-test
src/plugins/fstab/fstab.c
/*************************************************************************** fstab.c - Access the /etc/fstab file ------------------- begin : Mon Dec 26 2004 copyright : (C) 2004 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the BSD License (revised). * * * ***************************************************************************/ /*************************************************************************** * * * This is a backend that takes /etc/fstab file as its backend storage. * * The kdbGet() method will parse /etc/fstab and generate a * * valid key tree. The kdbSet() method will take a KeySet with valid * * filesystem keys and print an equivalent regular fstab in stdout. * * * ***************************************************************************/ #include "fstab.h" #ifndef HAVE_KDBCONFIG # include "kdbconfig.h" #endif #define MAX_NUMBER_SIZE 10 /** @param name is a buffer with KDB_MAX_PATH_LENGTH space. * @param fstabEntry will be used to get the name: * @param swapIndex will count up for every swap * * - mnt_type will be checked if it is swap * * TODO Improvements: * - no counting up of swap? * - handle mountpoints none? * * Some logic to define the filesystem name when it is not * so obvious. */ void elektraFstabFsName(char * fsname, struct mntent *fstabEntry, unsigned int *swapIndex) { if (!strcmp(fstabEntry->mnt_type,"swap")) { sprintf(fsname,"swap%02d",*swapIndex); ++(*swapIndex); } else if (!strcmp(fstabEntry->mnt_dir,"none")) { strcpy(fsname,fstabEntry->mnt_type); } else { // Otherwise take dir as-is strcpy(fsname,fstabEntry->mnt_dir); } } int elektraFstabGet(Plugin *handle ELEKTRA_UNUSED, KeySet *returned, Key *parentKey) { int errnosave = errno; ssize_t nr_keys = 0; Key *key; Key *dir; FILE *fstab=0; #if DEBUG && VERBOSE printf ("get fstab %s from %s\n", keyName(parentKey), keyString(parentKey)); #endif if (!strcmp (keyName(parentKey), "system/elektra/modules/fstab")) { KeySet *moduleConfig = ksNew (50, keyNew ("system/elektra/modules/fstab", KEY_VALUE, "fstab plugin waits for your orders", KEY_END), keyNew ("system/elektra/modules/fstab/exports", KEY_END), keyNew ("system/elektra/modules/fstab/exports/get", KEY_FUNC, elektraFstabGet, KEY_END), keyNew ("system/elektra/modules/fstab/exports/set", KEY_FUNC, elektraFstabSet, KEY_END), #include "readme_fstab.c" keyNew ("system/elektra/modules/fstab/infos/version", KEY_VALUE, PLUGINVERSION, KEY_END), keyNew ("system/elektra/modules/fstab/config/needs", KEY_VALUE, "The configuration which is needed", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct", KEY_VALUE, "list FStab", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab", KEY_META, "check/type", "null empty", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/device", KEY_META, "check/type", "string", KEY_META, "check/path", "device", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/mpoint", KEY_META, "check/type", "string", KEY_META, "check/path", "directory", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/type", KEY_META, "check/type", "FSType", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/options", KEY_META, "check/type", "string", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/dumpfreq", KEY_META, "check/type", "unsigned_short", KEY_END), keyNew ("system/elektra/modules/fstab/config/needs/struct/FStab/passno", KEY_META, "check/type", "unsigned_short", KEY_END), KS_END); ksAppend (returned, moduleConfig); ksDel (moduleConfig); return 1; } key = keyDup (parentKey); ksAppendKey(returned, key); nr_keys ++; fstab=setmntent(keyString(parentKey), "r"); if (fstab == 0) { ELEKTRA_SET_ERROR_GET(parentKey); errno = errnosave; return -1; } struct mntent *fstabEntry; char fsname[KDB_MAX_PATH_LENGTH]; char buffer[MAX_NUMBER_SIZE]; unsigned int swapIndex=0; while ((fstabEntry=getmntent(fstab))) { nr_keys += 7; elektraFstabFsName(fsname, fstabEntry, &swapIndex); /* Include only the filesystem pseudo-names */ dir = keyDup (parentKey); keyAddBaseName(dir, fsname); keySetString(dir,""); keySetComment(dir,""); keySetComment (dir, "Filesystem pseudo-name"); ksAppendKey(returned,dir); key = keyDup (dir); keyAddBaseName(key, "device"); keySetString (key, fstabEntry->mnt_fsname); keySetComment (key, "Device or Label"); ksAppendKey(returned, key); key = keyDup (dir); keyAddBaseName(key, "mpoint"); keySetString (key, fstabEntry->mnt_dir); keySetComment (key, "Mount point"); ksAppendKey(returned, key); key = keyDup (dir); keyAddBaseName(key, "type"); keySetString (key, fstabEntry->mnt_type); keySetComment (key, "Filesystem type."); ksAppendKey(returned, key); key = keyDup (dir); keyAddBaseName(key, "options"); keySetString (key, fstabEntry->mnt_opts); keySetComment (key, "Filesystem specific options"); ksAppendKey(returned, key); key = keyDup (dir); keyAddBaseName(key, "dumpfreq"); snprintf(buffer, MAX_NUMBER_SIZE, "%d",fstabEntry->mnt_freq); keySetString (key, buffer); keySetComment (key, "Dump frequency in days"); ksAppendKey(returned, key); key = keyDup (dir); keyAddBaseName(key, "passno"); snprintf(buffer, MAX_NUMBER_SIZE, "%d",fstabEntry->mnt_passno); keySetString (key, buffer); keySetComment (key, "Pass number on parallel fsck"); ksAppendKey(returned, key); } endmntent(fstab); errno = errnosave; return nr_keys; } int elektraFstabSet(Plugin *handle ELEKTRA_UNUSED, KeySet *ks, Key *parentKey) { int errnosave = errno; FILE *fstab=0; Key *key=0; const void *rootname = 0; struct mntent fstabEntry; #if DEBUG && VERBOSE printf ("set fstab %s to file %s\n", keyName(parentKey), keyString(parentKey)); #endif ksRewind (ks); if ((key = ksNext (ks)) != 0) { /*skip parent key*/ } fstab=setmntent(keyString(parentKey), "w"); if(fstab == 0) { ELEKTRA_SET_ERROR_SET(parentKey); errno = errnosave; return -1; } memset(&fstabEntry,0,sizeof(struct mntent)); while ((key = ksNext (ks)) != 0) { const char *basename=keyBaseName(key); #if DEBUG && VERBOSE printf ("key: %s %s\n", keyName(key), basename); #endif if (!strcmp (basename, "device")) { fstabEntry.mnt_fsname=(char *)keyValue(key); } else if (!strcmp (basename, "mpoint")) { fstabEntry.mnt_dir=(char *)keyValue(key); } else if (!strcmp (basename, "type")) { fstabEntry.mnt_type=(char *)keyValue(key); } else if (!strcmp (basename, "options")) { fstabEntry.mnt_opts=(char *)keyValue(key); } else if (!strcmp (basename, "dumpfreq")) { fstabEntry.mnt_freq=atoi((char *)keyValue(key)); } else if (!strcmp (basename, "passno")) { fstabEntry.mnt_passno=atoi((char *)keyValue(key)); } else { // new rootname if (!rootname) { rootname = keyValue(key); } else { rootname = keyValue(key); #if DEBUG && VERBOSE fprintf(stdout, "first: %s %s %s %s %d %d\n", fstabEntry.mnt_fsname, fstabEntry.mnt_dir, fstabEntry.mnt_type, fstabEntry.mnt_opts, fstabEntry.mnt_freq, fstabEntry.mnt_passno); #endif addmntent(fstab, &fstabEntry); memset(&fstabEntry,0,sizeof(struct mntent)); } } } if (rootname) { #if DEBUG && VERBOSE fprintf(stdout, "last: %s %s %s %s %d %d\n", fstabEntry.mnt_fsname, fstabEntry.mnt_dir, fstabEntry.mnt_type, fstabEntry.mnt_opts, fstabEntry.mnt_freq, fstabEntry.mnt_passno); #endif addmntent(fstab, &fstabEntry); } endmntent(fstab); errno = errnosave; return 1; } Plugin *ELEKTRA_PLUGIN_EXPORT(fstab) { return elektraPluginExport("fstab", ELEKTRA_PLUGIN_GET, &elektraFstabGet, ELEKTRA_PLUGIN_SET, &elektraFstabSet, ELEKTRA_PLUGIN_END); }
WebicityBrowser/Webicity
src/main/java/everyos.browser.webicity.webribbon/everyos/browser/webicity/webribbon/ui/webui/rendering/box/ProxyBox.java
<reponame>WebicityBrowser/Webicity<filename>src/main/java/everyos.browser.webicity.webribbon/everyos/browser/webicity/webribbon/ui/webui/rendering/box/ProxyBox.java package everyos.browser.webicity.webribbon.ui.webui.rendering.box; import everyos.browser.webicity.webribbon.gui.WebPaintContext; import everyos.browser.webicity.webribbon.gui.box.InlineLevelBox; import everyos.engine.ribbon.core.rendering.RendererData; import everyos.engine.ribbon.core.shape.Rectangle; public class ProxyBox extends CachedRenderBox { public ProxyBox(InlineLevelBox box) { super(box); } @Override public void paint(RendererData rd, Rectangle viewport, WebPaintContext context) { getBox().paint(rd, viewport, context);; } }
XSxiesi/qt
slidepuzzlewidget/sliderpuzzlewidget.h
#ifndef SLIDERPUZZLEWIDGET_H #define SLIDERPUZZLEWIDGET_H #include <QWidget> #include "ui_sliderpuzzlewidget.h" class SliderPuzzleWidget : public QWidget { Q_OBJECT public: SliderPuzzleWidget(QWidget *parent = 0); ~SliderPuzzleWidget(); private: void initForm(); private slots: void onUpdateWidget(); void onSliderValueChanged(int value); void onSliderReleased(); private: Ui::SliderPuzzleWidget ui; }; #endif // SLIDERPUZZLEWIDGET_H
PixelCatalyst/OurCraft
src/test/java/com/pixcat/voxel/SolidBlockTest.java
<gh_stars>1-10 package com.pixcat.voxel; import com.pixcat.core.FileManager; import com.pixcat.graphics.Texture; import com.pixcat.graphics.Window; import nl.jqno.equalsverifier.EqualsVerifier; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; import static org.lwjgl.glfw.GLFW.glfwInit; public class SolidBlockTest { private Window placeholderWindow; private Texture testTexture; @Before public void setUp() { if (glfwInit() == false) throw new RuntimeException("Unable to initialize GLFW"); placeholderWindow = new Window(100, 100, "placeholder"); placeholderWindow.bindAsCurrent(); testTexture = FileManager.getInstance().loadTexture("dirt.png"); } @Test(expected = IllegalArgumentException.class) public void testCreationWithZeroID() { new SolidBlock((byte) 0, "name", testTexture); } @Test public void testCreationWithNullName() { new SolidBlock((byte) 1, null, testTexture); } @Test(expected = IllegalArgumentException.class) public void testCreationWithNullTexture() { new SolidBlock((byte) 1, "name", null); } @Test public void testValidCreation() { Block testBlock = new SolidBlock((byte) 1, "name", testTexture); assertEquals((byte) 1, testBlock.getID()); assertEquals("name", testBlock.getName()); assertEquals(testTexture, testBlock.getTexture()); } @Test public void testEqualsHashCode() { EqualsVerifier.forClass(SolidBlock.class).verify(); } @After public void tearDown() { placeholderWindow.destroy(); } }
ppiecuch/godot
modules/gdextensions/sfxr/gdsfxr.cpp
/*************************************************************************/ /* gdsfxr.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ /* https://godotengine.org */ /*************************************************************************/ /* Copyright (c) 2007-2021 <NAME>, <NAME>. */ /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */ /* */ /* Permission is hereby granted, free of charge, to any person obtaining */ /* a copy of this software and associated documentation files (the */ /* "Software"), to deal in the Software without restriction, including */ /* without limitation the rights to use, copy, modify, merge, publish, */ /* distribute, sublicense, and/or sell copies of the Software, and to */ /* permit persons to whom the Software is furnished to do so, subject to */ /* the following conditions: */ /* */ /* The above copyright notice and this permission notice shall be */ /* included in all copies or substantial portions of the Software. */ /* */ /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ #define GAUDIO_NO_ERROR 0 #define GAUDIO_CANNOT_OPEN_FILE -1 #define GAUDIO_UNRECOGNIZED_FORMAT -2 #define GAUDIO_ERROR_WHILE_READING -3 #define GAUDIO_UNSUPPORTED_FORMAT -4 #define g_id unsigned int #define gaudio_Error int #define LOG1(a) std::cout << a << "\n"; #define LOG2(a, b) std::cout << a << b << "\n"; #define LOG3(a, b, c) std::cout << a << b << c << "\n"; #include <stdio.h> #include <stdlib.h> #include <string.h> #include <algorithm> #include <iostream> #include <map> #include "gdsfxr.h" #include "retrosfxvoice.h" #include "core/os/file_access.h" /// AudioStreamSfxr void AudioStreamSfxr::_update_voice() { struct SampleBuffer : public BufferCallback { PoolRealArray data; virtual void append_sample(float sample) { data.push_back(sample); } size_t samples() const { return data.size(); } }; SampleBuffer buffer; sfx_voice.Play(); while (sfx_voice.IsActive()) { sfx_voice.Render(256, &buffer); } _cache = buffer.data; _dirty = false; } void AudioStreamSfxr::from_file(const String p_file) { if (!sfx_voice.LoadSettings(p_file.utf8().get_data())) { ERR_PRINT("Failed to load sfx settings from " + p_file); } } void AudioStreamSfxr::from_dict(const Dictionary &p_dict) { } Error AudioStreamSfxr::save_to_wav(const String &p_path, int quality, int sample_size) { ERR_FAIL_COND_V_MSG(sample_size != 8 && sample_size != 16, ERR_PARAMETER_RANGE_ERROR, "Invalid wave bits size (8/16)"); ERR_FAIL_COND_V_MSG(quality != 11025 && quality != 22050 && quality != 44100, ERR_PARAMETER_RANGE_ERROR, "Invalid wave freq. size (11025/22050/44100)"); if (!sfx_voice.ExportWav(p_path.utf8().get_data(), sample_size, quality)) { WARN_PRINT("Export to wav file failed: " + p_path); return ERR_FILE_CANT_WRITE; } return OK; } int AudioStreamSfxr::fill(AudioFrame *p_buffer, int p_frames, int p_from) { if (_dirty) { const_cast<AudioStreamSfxr *>(this)->_update_voice(); } ERR_FAIL_COND_V(p_from >= _cache.size(), 0); for (int p = p_from; p < p_from + p_frames; p++) { if (p == _cache.size()) { return p - p_from; } const float sample = _cache[p]; *p_buffer++ = AudioFrame(sample, sample); } return p_frames; } float AudioStreamSfxr::get_length() const { if (_dirty) { const_cast<AudioStreamSfxr *>(this)->_update_voice(); } return sfx_voice.GetVoiceLengthInSamples() / 44100.0; } Ref<AudioStreamPlayback> AudioStreamSfxr::instance_playback() { Ref<AudioStreamPlaybackSfxr> sfx; sfx.instance(); sfx->sfx_stream = Ref<AudioStreamSfxr>(this); return sfx; } String AudioStreamSfxr::get_stream_name() const { return "SFXR Stream"; } AudioStreamSfxr::AudioStreamSfxr() { _dirty = true; loop = false; loop_offset = 0; sfx_voice.ResetParams(); } /// AudioStreamPlaybackSfxr void AudioStreamPlaybackSfxr::mix(AudioFrame *p_buffer, float p_rate_scale, int p_frames) { ERR_FAIL_COND(!active); int filled = sfx_stream->fill(p_buffer, p_frames, sample_position); int todo = p_frames - filled; sample_position += filled; if (todo) { //end of file! if (sfx_stream->loop) { do { seek(sfx_stream->loop_offset); filled = sfx_stream->fill(p_buffer, p_frames, sample_position); todo = p_frames - filled; sample_position += filled; } while (todo > 0); loops++; } else { for (int i = filled; i < p_frames; i++) { p_buffer[i] = AudioFrame(0, 0); } active = false; } } } void AudioStreamPlaybackSfxr::start(float p_from_pos) { active = true; loops = 0; seek(p_from_pos); } void AudioStreamPlaybackSfxr::stop() { active = false; } bool AudioStreamPlaybackSfxr::is_playing() const { return active; } int AudioStreamPlaybackSfxr::get_loop_count() const { return loops; } float AudioStreamPlaybackSfxr::get_playback_position() const { return sample_position / sfx_stream->get_sample_rate(); } void AudioStreamPlaybackSfxr::seek(float p_time) { if (!active) { return; } if (p_time >= sfx_stream->get_length()) { p_time = 0; } sample_position = sfx_stream->get_sample_rate() * p_time; } float AudioStreamPlaybackSfxr::get_length() const { return sfx_stream->get_length(); } AudioStreamPlaybackSfxr::AudioStreamPlaybackSfxr() { active = false; loops = 0; sample_position = 0; } /// Sfx resource importer String ResourceImporterSfxr::get_preset_name(int p_idx) const { return String(); } void ResourceImporterSfxr::get_import_options(List<ImportOption> *r_options, int p_preset) const { r_options->push_back(ImportOption(PropertyInfo(Variant::BOOL, "edit/loop"), false)); r_options->push_back(ImportOption(PropertyInfo(Variant::REAL, "edit/loop_offset"), 0)); } bool ResourceImporterSfxr::get_option_visibility(const String &p_option, const Map<StringName, Variant> &p_options) const { return true; } String ResourceImporterSfxr::get_importer_name() const { return "SFXR"; } String ResourceImporterSfxr::get_visible_name() const { return "RetroSoundFX"; } void ResourceImporterSfxr::get_recognized_extensions(List<String> *p_extensions) const { p_extensions->push_back("sfx"); } String ResourceImporterSfxr::get_save_extension() const { return "res"; } String ResourceImporterSfxr::get_resource_type() const { return "AudioStreamSfxr"; } int ResourceImporterSfxr::get_preset_count() const { return 0; } Error ResourceImporterSfxr::import(const String &p_source_file, const String &p_save_path, const Map<StringName, Variant> &p_options, List<String> *r_platform_variants, List<String> *r_gen_files, Variant *r_metadata) { const bool loop = p_options["edit/loop"]; const float loop_offset = p_options["edit/loop_offset"]; Ref<AudioStreamSfxr> sfx_stream; sfx_stream.instance(); sfx_stream->from_file(p_source_file); sfx_stream->set_loop(loop); sfx_stream->set_loop_offset(loop_offset); return ResourceSaver::save(p_save_path + ".res", sfx_stream); }
dharshanaw/carbon-device-mgt
components/device-mgt/org.wso2.carbon.device.mgt.v09.api/src/main/java/org/wso2/carbon/device/mgt/jaxrs/service/impl/ConfigurationServiceImpl.java
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.device.mgt.jaxrs.service.impl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.device.mgt.common.configuration.mgt.ConfigurationEntry; import org.wso2.carbon.device.mgt.common.configuration.mgt.ConfigurationManagementException; import org.wso2.carbon.device.mgt.common.configuration.mgt.PlatformConfiguration; import org.wso2.carbon.device.mgt.jaxrs.beans.ErrorResponse; import org.wso2.carbon.device.mgt.jaxrs.service.api.ConfigurationManagementService; import org.wso2.carbon.device.mgt.jaxrs.service.impl.util.RequestValidationUtil; import org.wso2.carbon.device.mgt.jaxrs.util.DeviceMgtAPIUtils; import org.wso2.carbon.device.mgt.jaxrs.util.MDMAppConstants; import org.wso2.carbon.policy.mgt.common.PolicyManagementException; import org.wso2.carbon.policy.mgt.core.util.PolicyManagerUtil; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.core.Response; import java.util.ArrayList; import java.util.List; @Path("/configuration") public class ConfigurationServiceImpl implements ConfigurationManagementService { private static final Log log = LogFactory.getLog(ConfigurationServiceImpl.class); @GET @Override public Response getConfiguration(@HeaderParam("If-Modified-Since") String ifModifiedSince) { String msg; try { PlatformConfiguration config = DeviceMgtAPIUtils.getPlatformConfigurationManagementService(). getConfiguration(MDMAppConstants.RegistryConstants.GENERAL_CONFIG_RESOURCE_PATH); ConfigurationEntry configurationEntry = new ConfigurationEntry(); configurationEntry.setContentType("text"); configurationEntry.setName("notifierFrequency"); configurationEntry.setValue(PolicyManagerUtil.getMonitoringFrequency()); List<ConfigurationEntry> configList = config.getConfiguration(); if (configList == null) { configList = new ArrayList<>(); configList.add(configurationEntry); } config.setConfiguration(configList); return Response.ok().entity(config).build(); } catch (ConfigurationManagementException | PolicyManagementException e) { msg = "Error occurred while retrieving the general platform configuration"; log.error(msg, e); return Response.serverError().entity( new ErrorResponse.ErrorResponseBuilder().setMessage(msg).build()).build(); } } @PUT @Override public Response updateConfiguration(PlatformConfiguration config) { try { RequestValidationUtil.validateUpdateConfiguration(config); DeviceMgtAPIUtils.getPlatformConfigurationManagementService().saveConfiguration(config, MDMAppConstants.RegistryConstants.GENERAL_CONFIG_RESOURCE_PATH); //Schedule the task service DeviceMgtAPIUtils.scheduleTaskService(DeviceMgtAPIUtils.getNotifierFrequency(config)); PlatformConfiguration updatedConfig = DeviceMgtAPIUtils.getPlatformConfigurationManagementService(). getConfiguration(MDMAppConstants.RegistryConstants.GENERAL_CONFIG_RESOURCE_PATH); return Response.ok().entity(updatedConfig).build(); } catch (ConfigurationManagementException e) { String msg = "Error occurred while updating the general platform configuration"; log.error(msg, e); return Response.serverError().entity( new ErrorResponse.ErrorResponseBuilder().setMessage(msg).build()).build(); } } }
aklish/elide-testing-framework
src/test/java/com/yahoo/elide/testing/framework/helpers/user/TestEntityFactory.java
/* * Copyright 2016 Yahoo Inc. * Licensed under the terms of the Apache License, Version 2. Please see LICENSE.txt in the project root for terms. */ package com.yahoo.elide.testing.framework.helpers.user; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.yahoo.elide.testing.framework.core.graph.GraphNode; import com.yahoo.elide.testing.framework.core.graph.Entity; import com.yahoo.elide.testing.framework.core.graph.EntityCollection; import com.google.common.collect.Sets; import com.yahoo.elide.core.EntityDictionary; import com.yahoo.elide.jsonapi.JsonApiMapper; import com.yahoo.elide.jsonapi.models.Data; import com.yahoo.elide.jsonapi.models.Resource; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; /** * Logic for creating objects with sample data. */ public class TestEntityFactory { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final JsonApiMapper JSON_MAPPER = new JsonApiMapper(new EntityDictionary()); public static Resource loadPersonFile(Names person) { Data<Resource> document = loadJsonApiDocument(person.name()); if (document == null) { return null; } Collection<Resource> resources = document.get(); if (resources.size() == 1) { return resources.iterator().next(); } return null; } private static Data<Resource> loadJsonApiDocument(String fileName) { InputStream fileContents = TestUserFactory.class.getResourceAsStream("/people/" + fileName + ".json"); try { JsonNode json = OBJECT_MAPPER.readTree(fileContents); return JSON_MAPPER.readJsonApiDocument(json) .getData(); } catch (IOException e) { e.printStackTrace(); return null; } } public static EntityCollection getParentsCollectionWith(Names... names) { Set<Names> includedParents = Sets.newHashSet(names); EntityCollection parentCollection = new EntityCollection(null, "parent"); List<GraphNode> lineage = Collections.singletonList(parentCollection); SortedMap<String, Entity> parents = new TreeMap<>(); if (includedParents.contains(Names.BONHAM_MO)) { Entity mo = generatePersonWithLineage(Names.BONHAM_MO, lineage); parents.put(mo.getId(), mo); } if (includedParents.contains(Names.BONHAM_MARGERY)) { Entity margery = generatePersonWithLineage(Names.BONHAM_MARGERY, lineage); parents.put(margery.getId(), margery); } if (includedParents.contains(Names.AMALBERTI_EMMANUEL)) { Entity emmanuel = generatePersonWithLineage(Names.AMALBERTI_EMMANUEL, lineage); parents.put(emmanuel.getId(), emmanuel); } parentCollection.setAccessibleEntities(parents); return parentCollection; } public static EntityCollection getChildrenCollectionForParent(Names name) { Entity parent; switch (name) { case BONHAM_MO: case BONHAM_MARGERY: case AMALBERTI_EMMANUEL: parent = getPerson(name); break; default: throw new IllegalStateException(name.toString() + " is not a parent"); } List<GraphNode> lineage = Collections.singletonList(parent); return new EntityCollection(lineage, "children"); } public static Entity getPerson(Names name) { List<GraphNode> lineage = null; switch (name) { case BONHAM_MO: case BONHAM_MARGERY: lineage = Collections.singletonList(getParentsCollectionWith()); break; case BONHAM_GAVINO: case BONHAM_PAYTON: lineage = Collections.singletonList(getChildrenCollectionForParent(Names.BONHAM_MO)); break; case AMALBERTI_EMMANUEL: lineage = Collections.singletonList(getParentsCollectionWith()); break; case AMALBERTI_REBEKA: case AMALBERTI_DOVE: lineage = Collections.singletonList(getChildrenCollectionForParent(Names.AMALBERTI_EMMANUEL)); break; case TANG_GORAN: case TANG_HINA: lineage = Collections.singletonList(getParentsCollectionWith()); break; case TANG_LIM: lineage = Collections.singletonList(getChildrenCollectionForParent(Names.TANG_GORAN)); break; } return generatePersonWithLineage(name, lineage); } public static Entity generatePersonWithLineage(Names name, List<GraphNode> lineage) { Entity person; Resource resource = loadPersonFile(name); switch (name) { case BONHAM_MO: case BONHAM_MARGERY: case BONHAM_GAVINO: case BONHAM_PAYTON: case AMALBERTI_EMMANUEL: case AMALBERTI_REBEKA: case AMALBERTI_DOVE: case TANG_GORAN: case TANG_HINA: case TANG_LIM: person = new Entity(lineage, resource); break; default: throw new IllegalStateException("Unknown person"); } return person; } }
microsoftgraph/msgraph-beta-sdk-go
models/data_classification_service.go
package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // DataClassificationService provides operations to manage the dataClassificationService singleton. type DataClassificationService struct { Entity // The classifyFileJobs property classifyFileJobs []JobResponseBaseable // The classifyTextJobs property classifyTextJobs []JobResponseBaseable // The evaluateDlpPoliciesJobs property evaluateDlpPoliciesJobs []JobResponseBaseable // The evaluateLabelJobs property evaluateLabelJobs []JobResponseBaseable // The exactMatchDataStores property exactMatchDataStores []ExactMatchDataStoreable // The exactMatchUploadAgents property exactMatchUploadAgents []ExactMatchUploadAgentable // The jobs property jobs []JobResponseBaseable // The sensitiveTypes property sensitiveTypes []SensitiveTypeable // The sensitivityLabels property sensitivityLabels []SensitivityLabelable } // NewDataClassificationService instantiates a new dataClassificationService and sets the default values. func NewDataClassificationService()(*DataClassificationService) { m := &DataClassificationService{ Entity: *NewEntity(), } return m } // CreateDataClassificationServiceFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreateDataClassificationServiceFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewDataClassificationService(), nil } // GetClassifyFileJobs gets the classifyFileJobs property value. The classifyFileJobs property func (m *DataClassificationService) GetClassifyFileJobs()([]JobResponseBaseable) { if m == nil { return nil } else { return m.classifyFileJobs } } // GetClassifyTextJobs gets the classifyTextJobs property value. The classifyTextJobs property func (m *DataClassificationService) GetClassifyTextJobs()([]JobResponseBaseable) { if m == nil { return nil } else { return m.classifyTextJobs } } // GetEvaluateDlpPoliciesJobs gets the evaluateDlpPoliciesJobs property value. The evaluateDlpPoliciesJobs property func (m *DataClassificationService) GetEvaluateDlpPoliciesJobs()([]JobResponseBaseable) { if m == nil { return nil } else { return m.evaluateDlpPoliciesJobs } } // GetEvaluateLabelJobs gets the evaluateLabelJobs property value. The evaluateLabelJobs property func (m *DataClassificationService) GetEvaluateLabelJobs()([]JobResponseBaseable) { if m == nil { return nil } else { return m.evaluateLabelJobs } } // GetExactMatchDataStores gets the exactMatchDataStores property value. The exactMatchDataStores property func (m *DataClassificationService) GetExactMatchDataStores()([]ExactMatchDataStoreable) { if m == nil { return nil } else { return m.exactMatchDataStores } } // GetExactMatchUploadAgents gets the exactMatchUploadAgents property value. The exactMatchUploadAgents property func (m *DataClassificationService) GetExactMatchUploadAgents()([]ExactMatchUploadAgentable) { if m == nil { return nil } else { return m.exactMatchUploadAgents } } // GetFieldDeserializers the deserialization information for the current model func (m *DataClassificationService) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := m.Entity.GetFieldDeserializers() res["classifyFileJobs"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateJobResponseBaseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]JobResponseBaseable, len(val)) for i, v := range val { res[i] = v.(JobResponseBaseable) } m.SetClassifyFileJobs(res) } return nil } res["classifyTextJobs"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateJobResponseBaseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]JobResponseBaseable, len(val)) for i, v := range val { res[i] = v.(JobResponseBaseable) } m.SetClassifyTextJobs(res) } return nil } res["evaluateDlpPoliciesJobs"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateJobResponseBaseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]JobResponseBaseable, len(val)) for i, v := range val { res[i] = v.(JobResponseBaseable) } m.SetEvaluateDlpPoliciesJobs(res) } return nil } res["evaluateLabelJobs"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateJobResponseBaseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]JobResponseBaseable, len(val)) for i, v := range val { res[i] = v.(JobResponseBaseable) } m.SetEvaluateLabelJobs(res) } return nil } res["exactMatchDataStores"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateExactMatchDataStoreFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]ExactMatchDataStoreable, len(val)) for i, v := range val { res[i] = v.(ExactMatchDataStoreable) } m.SetExactMatchDataStores(res) } return nil } res["exactMatchUploadAgents"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateExactMatchUploadAgentFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]ExactMatchUploadAgentable, len(val)) for i, v := range val { res[i] = v.(ExactMatchUploadAgentable) } m.SetExactMatchUploadAgents(res) } return nil } res["jobs"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateJobResponseBaseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]JobResponseBaseable, len(val)) for i, v := range val { res[i] = v.(JobResponseBaseable) } m.SetJobs(res) } return nil } res["sensitiveTypes"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateSensitiveTypeFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]SensitiveTypeable, len(val)) for i, v := range val { res[i] = v.(SensitiveTypeable) } m.SetSensitiveTypes(res) } return nil } res["sensitivityLabels"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateSensitivityLabelFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]SensitivityLabelable, len(val)) for i, v := range val { res[i] = v.(SensitivityLabelable) } m.SetSensitivityLabels(res) } return nil } return res } // GetJobs gets the jobs property value. The jobs property func (m *DataClassificationService) GetJobs()([]JobResponseBaseable) { if m == nil { return nil } else { return m.jobs } } // GetSensitiveTypes gets the sensitiveTypes property value. The sensitiveTypes property func (m *DataClassificationService) GetSensitiveTypes()([]SensitiveTypeable) { if m == nil { return nil } else { return m.sensitiveTypes } } // GetSensitivityLabels gets the sensitivityLabels property value. The sensitivityLabels property func (m *DataClassificationService) GetSensitivityLabels()([]SensitivityLabelable) { if m == nil { return nil } else { return m.sensitivityLabels } } // Serialize serializes information the current object func (m *DataClassificationService) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { err := m.Entity.Serialize(writer) if err != nil { return err } if m.GetClassifyFileJobs() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetClassifyFileJobs())) for i, v := range m.GetClassifyFileJobs() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("classifyFileJobs", cast) if err != nil { return err } } if m.GetClassifyTextJobs() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetClassifyTextJobs())) for i, v := range m.GetClassifyTextJobs() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("classifyTextJobs", cast) if err != nil { return err } } if m.GetEvaluateDlpPoliciesJobs() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetEvaluateDlpPoliciesJobs())) for i, v := range m.GetEvaluateDlpPoliciesJobs() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("evaluateDlpPoliciesJobs", cast) if err != nil { return err } } if m.GetEvaluateLabelJobs() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetEvaluateLabelJobs())) for i, v := range m.GetEvaluateLabelJobs() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("evaluateLabelJobs", cast) if err != nil { return err } } if m.GetExactMatchDataStores() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExactMatchDataStores())) for i, v := range m.GetExactMatchDataStores() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("exactMatchDataStores", cast) if err != nil { return err } } if m.GetExactMatchUploadAgents() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExactMatchUploadAgents())) for i, v := range m.GetExactMatchUploadAgents() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("exactMatchUploadAgents", cast) if err != nil { return err } } if m.GetJobs() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetJobs())) for i, v := range m.GetJobs() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("jobs", cast) if err != nil { return err } } if m.GetSensitiveTypes() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSensitiveTypes())) for i, v := range m.GetSensitiveTypes() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("sensitiveTypes", cast) if err != nil { return err } } if m.GetSensitivityLabels() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSensitivityLabels())) for i, v := range m.GetSensitivityLabels() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("sensitivityLabels", cast) if err != nil { return err } } return nil } // SetClassifyFileJobs sets the classifyFileJobs property value. The classifyFileJobs property func (m *DataClassificationService) SetClassifyFileJobs(value []JobResponseBaseable)() { if m != nil { m.classifyFileJobs = value } } // SetClassifyTextJobs sets the classifyTextJobs property value. The classifyTextJobs property func (m *DataClassificationService) SetClassifyTextJobs(value []JobResponseBaseable)() { if m != nil { m.classifyTextJobs = value } } // SetEvaluateDlpPoliciesJobs sets the evaluateDlpPoliciesJobs property value. The evaluateDlpPoliciesJobs property func (m *DataClassificationService) SetEvaluateDlpPoliciesJobs(value []JobResponseBaseable)() { if m != nil { m.evaluateDlpPoliciesJobs = value } } // SetEvaluateLabelJobs sets the evaluateLabelJobs property value. The evaluateLabelJobs property func (m *DataClassificationService) SetEvaluateLabelJobs(value []JobResponseBaseable)() { if m != nil { m.evaluateLabelJobs = value } } // SetExactMatchDataStores sets the exactMatchDataStores property value. The exactMatchDataStores property func (m *DataClassificationService) SetExactMatchDataStores(value []ExactMatchDataStoreable)() { if m != nil { m.exactMatchDataStores = value } } // SetExactMatchUploadAgents sets the exactMatchUploadAgents property value. The exactMatchUploadAgents property func (m *DataClassificationService) SetExactMatchUploadAgents(value []ExactMatchUploadAgentable)() { if m != nil { m.exactMatchUploadAgents = value } } // SetJobs sets the jobs property value. The jobs property func (m *DataClassificationService) SetJobs(value []JobResponseBaseable)() { if m != nil { m.jobs = value } } // SetSensitiveTypes sets the sensitiveTypes property value. The sensitiveTypes property func (m *DataClassificationService) SetSensitiveTypes(value []SensitiveTypeable)() { if m != nil { m.sensitiveTypes = value } } // SetSensitivityLabels sets the sensitivityLabels property value. The sensitivityLabels property func (m *DataClassificationService) SetSensitivityLabels(value []SensitivityLabelable)() { if m != nil { m.sensitivityLabels = value } }
dmgerman/hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeDecommissioningEvent.java
<filename>hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeDecommissioningEvent.java begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ end_comment begin_package DECL|package|org.apache.hadoop.yarn.server.resourcemanager.rmnode package|package name|org operator|. name|apache operator|. name|hadoop operator|. name|yarn operator|. name|server operator|. name|resourcemanager operator|. name|rmnode package|; end_package begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|yarn operator|. name|api operator|. name|records operator|. name|NodeId import|; end_import begin_comment comment|/** * RMNode Decommissioning Event. * */ end_comment begin_class DECL|class|RMNodeDecommissioningEvent specifier|public class|class name|RMNodeDecommissioningEvent extends|extends name|RMNodeEvent block|{ comment|// Optional decommissioning timeout in second. DECL|field|decommissioningTimeout specifier|private specifier|final name|Integer name|decommissioningTimeout decl_stmt|; comment|// Create instance with optional timeout comment|// (timeout could be null which means use default). DECL|method|RMNodeDecommissioningEvent (NodeId nodeId, Integer timeout) specifier|public name|RMNodeDecommissioningEvent parameter_list|( name|NodeId name|nodeId parameter_list|, name|Integer name|timeout parameter_list|) block|{ name|super argument_list|( name|nodeId argument_list|, name|RMNodeEventType operator|. name|GRACEFUL_DECOMMISSION argument_list|) expr_stmt|; name|this operator|. name|decommissioningTimeout operator|= name|timeout expr_stmt|; block|} DECL|method|getDecommissioningTimeout () specifier|public name|Integer name|getDecommissioningTimeout parameter_list|() block|{ return|return name|this operator|. name|decommissioningTimeout return|; block|} block|} end_class end_unit
muelli/scapi
src/java/edu/biu/scapi/interactiveMidProtocols/zeroKnowledge/ZKFromSigmaProver.java
/** * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * * Copyright (c) 2012 - SCAPI (http://crypto.biu.ac.il/scapi) * This file is part of the SCAPI project. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * We request that any publication and/or code referring to and/or based on SCAPI contain an appropriate citation to SCAPI, including a reference to * http://crypto.biu.ac.il/SCAPI. * * SCAPI uses Crypto++, Miracl, NTL and Bouncy Castle. Please see these projects for any further licensing issues. * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * */ package edu.biu.scapi.interactiveMidProtocols.zeroKnowledge; import java.io.IOException; import edu.biu.scapi.comm.Channel; import edu.biu.scapi.exceptions.CheatAttemptException; import edu.biu.scapi.exceptions.CommitValueException; import edu.biu.scapi.exceptions.SecurityLevelException; import edu.biu.scapi.interactiveMidProtocols.sigmaProtocol.SigmaProverComputation; import edu.biu.scapi.interactiveMidProtocols.sigmaProtocol.utility.SigmaProtocolMsg; import edu.biu.scapi.interactiveMidProtocols.sigmaProtocol.utility.SigmaProverInput; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.CmtReceiver; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.CmtCommitValue; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.CmtOnBigInteger; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.CmtOnByteArray; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.CmtRCommitPhaseOutput; import edu.biu.scapi.interactiveMidProtocols.commitmentScheme.pedersen.CmtPedersenReceiver; import edu.biu.scapi.securityLevel.PerfectlyHidingCmt; /** * Concrete implementation of Zero Knowledge prover.<p> * * This is a transformation that takes any Sigma protocol and any perfectly hiding commitment scheme and * yields a zero-knowledge proof.<P> * * For more information see Protocol 6.5.1, page 161 of Hazay-Lindell.<p> * The pseudo code of this protocol can be found in Protocol 2.1 of pseudo codes document at {@link http://crypto.biu.ac.il/scapi/SDK_Pseudocode_SCAPI_V2.0.0.pdf}.<p> * * @author Cryptography and Computer Security Research Group Department of Computer Science Bar-Ilan University (<NAME>) * */ public class ZKFromSigmaProver implements ZKProver{ private Channel channel; private SigmaProverComputation sProver; //Underlying prover that computes the proof of the sigma protocol. private CmtReceiver receiver; //Underlying Commitment receiver to use. /** * Constructor that accepts the underlying channel, sigma protocol's prover and commitment's receiver to use. * @param channel used to communicate between prover and verifier. * @param sProver underlying sigma prover to use. * @param receiver Must be an instance of PerfectlyHidingCT * @throws SecurityLevelException if the given CTReceiver is not an instance of PerfectlyHidingCT */ public ZKFromSigmaProver(Channel channel, SigmaProverComputation sProver, CmtReceiver receiver) throws SecurityLevelException{ //receiver must be an instance of PerfectlyHidingCT if (!(receiver instanceof PerfectlyHidingCmt)){ throw new SecurityLevelException("the given CTReceiver must be an instance of PerfectlyHidingCmt"); } //receiver must be a commitment scheme on ByteArray or on BigInteger if (!(receiver instanceof CmtOnBigInteger) && !(receiver instanceof CmtOnByteArray)){ throw new IllegalArgumentException("the given receiver must be a commitment scheme on ByteArray or on BigInteger"); } this.sProver = sProver; this.receiver = receiver; this.channel = channel; } /** * Constructor that accepts the underlying channel, sigma protocol's prover and sets default commitment's receiver. * @param channel used to communicate between prover and verifier. * @param sProver underlying sigma prover to use. * @throws IOException can be thrown in the pre-process stage of PedersenCTReceiver */ public ZKFromSigmaProver(Channel channel, SigmaProverComputation sProver) throws IOException{ this.sProver = sProver; this.receiver = new CmtPedersenReceiver(channel); this.channel = channel; } /** * Runs the prover side of the Zero Knowledge proof.<p> * Let (a,e,z) denote the prover1, verifier challenge and prover2 messages of the sigma protocol.<p> * This function computes the following calculations:<p> * * RUN the receiver in COMMIT.commit <p> * COMPUTE the first message a in sigma, using (x,w) as input<p> * SEND a to V<p> * RUN the receiver in COMMIT.decommit <p> * IF COMMIT.decommit returns some e<p> * COMPUTE the response z to (a,e) according to sigma<p> * SEND z to V<p> * OUTPUT nothing<p> * ELSE (IF COMMIT.decommit returns INVALID)<p> * OUTPUT ERROR (CHEAT_ATTEMPT_BY_V)<p> * @param input must be an instance of SigmaProverInput. * @throws IllegalArgumentException if the given input is not an instance of SigmaProverInput * @throws IOException if failed to send the message. * @throws CheatAttemptException if the challenge's length is not as expected. * @throws ClassNotFoundException if there was ap roblem in the serialization phase. * @throws CommitValueException can occur in case the commitment scheme is ElGamal. */ public void prove(ZKProverInput input) throws IOException, CheatAttemptException, ClassNotFoundException, CommitValueException { //The given input must be an instance of SigmaProtocolInput. if (!(input instanceof SigmaProverInput)){ throw new IllegalArgumentException("the given input must be an instance of SigmaProverInput"); } //Run the receiver in COMMIT.commit CmtRCommitPhaseOutput output = receiveCommit(); //Compute the first message a in sigma, using (x,w) as input and //Send a to V processFirstMsg((SigmaProverInput) input); //Run the receiver in COMMIT.decommit //If decommit returns INVALID output ERROR (CHEAT_ATTEMPT_BY_V) byte[] e = receiveDecommit(output.getCommitmentId()); //IF decommit returns some e, compute the response z to (a,e) according to sigma, //Send z to V and output nothing processSecondMsg(e); } /** * Runs the receiver in COMMIT.commit with P as the receiver. * @throws IOException * @throws ClassNotFoundException */ private CmtRCommitPhaseOutput receiveCommit() throws IOException, ClassNotFoundException{ return receiver.receiveCommitment(); } /** * Processes the first message of the Zero Knowledge protocol: * "COMPUTE the first message a in sigma, using (x,w) as input * SEND a to V". * @param input * @throws IOException if failed to send the message. */ private void processFirstMsg(SigmaProverInput input) throws IOException{ //Compute the first message by the underlying proverComputation. SigmaProtocolMsg a = sProver.computeFirstMsg(input); //Send the first message. sendMsgToVerifier(a); } /** * Runs the receiver in COMMIT.decommit * If decommit returns INVALID output ERROR (CHEAT_ATTEMPT_BY_V) * @param l * @param ctOutput * @return * @throws IOException * @throws CheatAttemptException if decommit phase returned invalid. * @throws ClassNotFoundException * @throws CommitValueException */ private byte[] receiveDecommit(long id) throws IOException, CheatAttemptException, ClassNotFoundException, CommitValueException{ CmtCommitValue val = receiver.receiveDecommitment(id); if (val == null){ throw new CheatAttemptException("Decommit phase returned invalid"); } return receiver.generateBytesFromCommitValue(val); } /** * Processes the second message of the Zero Knowledge protocol: * "COMPUTE the response z to (a,e) according to sigma * SEND z to V * OUTPUT nothing". * This is a blocking function! * @throws CheatAttemptException if the challenge's length is not as expected. * @throws IOException if failed to send the message. */ public void processSecondMsg(byte[] e) throws CheatAttemptException, IOException { //Compute the second message by the underlying proverComputation. SigmaProtocolMsg z = sProver.computeSecondMsg(e); //Send the second message. sendMsgToVerifier(z); } /** * Sends the given message to the verifier. * @param message to send to the verifier. * @throws IOException if failed to send the message. */ private void sendMsgToVerifier(SigmaProtocolMsg message) throws IOException{ try { //Send the message by the channel. channel.send(message); } catch (IOException e) { throw new IOException("failed to send the message. The thrown exception is: " + e.getMessage()); } } }
ics-unisg/nassy
dcap/src/main/java/com/dcap/service/UserService.java
<reponame>ics-unisg/nassy package com.dcap.service; import com.dcap.domain.*; import com.dcap.repository.NotificationsInterface; import com.dcap.repository.StudyInterface; import com.dcap.repository.UsersInterface; import com.dcap.service.Exceptions.RepoExeption; import com.dcap.service.serviceInterfaces.SubjectServiceInterface; import com.dcap.service.serviceInterfaces.UserServiceInterface; import com.dcap.domain.*; import com.dcap.repository.NotificationsInterface; import com.dcap.repository.StudyInterface; import com.dcap.repository.UsersInterface; import com.dcap.service.Exceptions.RepoExeption; import com.dcap.service.serviceInterfaces.SubjectServiceInterface; import com.dcap.service.serviceInterfaces.UserDataServiceInterface; import com.dcap.service.serviceInterfaces. UserServiceInterface; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import javax.transaction.Transactional; import java.util.List; /** * Service to manage the Users in the database * * @author uli */ @Service public class UserService implements UserServiceInterface { private final UsersInterface userRepo; private final NotificationsInterface notificationService; private final SubjectServiceInterface subjectService; private final StudyInterface studyRepo; private final UserDataServiceInterface userDataService; private final PasswordEncoder pwe; @Autowired public UserService(UsersInterface userRepo, NotificationsInterface notificationRepo, SubjectServiceInterface subjectService, StudyInterface studyRepo, UserDataServiceInterface userDataService, PasswordEncoder pwe) { this.userRepo = userRepo; this.notificationService = notificationRepo; this.subjectService = subjectService; this.studyRepo = studyRepo; this.userDataService = userDataService; this.pwe = pwe; } /** * finds User for given id * * @param id id that belongs to User * @return User for given id */ @Override public User getUserById(Long id) throws RepoExeption { User one = userRepo.findOne(id); if (one == null) { throw new RepoExeption("user not found"); } one.setPassword(null); return one; } @Override public User save(User u) throws RepoExeption { User userByEmail = userRepo.findUserByEmail(u.getEmail()); if (userByEmail != null) { throw new RepoExeption("User already in database"); } String encode = pwe.encode(u.getPassword()); u.setPassword(encode); User savedUser = userRepo.save(u); return savedUser; } @Override public User updatePassword(User user, String password) throws RepoExeption { User userByEmail = userRepo.findUserByEmail(user.getEmail()); if (userByEmail == null) { throw new RepoExeption("User not in database"); } String encode = pwe.encode(password); userByEmail.setPassword(encode); User savedUser = userRepo.save(userByEmail); savedUser.setPassword(<PASSWORD>); return savedUser; } @Override public User findUserById(Long userId) throws RepoExeption { User user = userRepo.findOne(userId); if (user == null) { throw new RepoExeption("No user for given id."); } return user; } /** * deletes given User in the database, removes User from Study and deletes all notifications for User * * @param user User to be deleted */ @Override @Transactional(Transactional.TxType.REQUIRED) public void delete(User user) { List<Notifications> notificationByUser = notificationService.findByUser(user); for (Notifications not : notificationByUser) { notificationService.delete(not); } List<Subject> allSubjectsForUser = subjectService.getAllSubjectsForUser(user); for(Subject s : allSubjectsForUser){ subjectService.delete(s); } List<Study> studies = studyRepo.findByUser(user); for (Study st : studies) { studyRepo.delete(st.getId()); } List<UserData> userDataByUser = userDataService.getUserDataByUser(user); for(UserData ud:userDataByUser){ userDataService.delete(ud); } userDataService.detacheUserData(user); userRepo.delete(user); } /** * deletes given User in the database, removes User from Study and deletes all notifications for User * * @param id id of User to be deleted */ @Override public Boolean delete(Long id) throws RepoExeption { User user = userRepo.findById(id); if(user==null){ throw new RepoExeption("No such user..."); } delete(user); return true; } @Override public User findUserByEmail(String username) { return userRepo.findUserByEmail(username); } public List<User> getAllUser() { List<User> all = userRepo.findAll(); return all; } }
meetzy/netty-websocket
src/main/java/com/xfmeet/websocket/db/domain/GroupUserInfoRepository.java
<gh_stars>1-10 package com.xfmeet.websocket.db.domain; import com.xfmeet.websocket.db.entity.GroupUserInfo; import com.xfmeet.websocket.db.support.BaseRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Optional; /** * @author meetzy */ @Repository public interface GroupUserInfoRepository extends BaseRepository<GroupUserInfo> { /** * 通过群号查询在线用户 * * @param groupNum groupNum * @return list */ @Query(value = "select g.* from group_user_info g left user_info u on g.user_id= u.user_id where g.group_num=?1 and u.is_online=1", nativeQuery = true) List<GroupUserInfo> findOnlineUsers(Integer groupNum); /** * 通过群号跟用户ID查询是否加入某群 * * @param groupNum groupNum * @param userId userId * @return optional */ Optional<GroupUserInfo> findByGroupNumAndUserId(Integer groupNum, String userId); }
jupiter-tools/spring-test-kafka
src/main/java/com/jupitertools/springtestkafka/KafkaTestContainers.java
<filename>src/main/java/com/jupitertools/springtestkafka/KafkaTestContainers.java package com.jupitertools.springtestkafka; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Container for repeatable annotation {@link KafkaTestContainer}. * * Using to support a KafkaTestContainer as a repeatable annotation. * * Don't need to use this annotation directly, to run more than one * Kafka instance you can write multiple times of KafkaTestContainer * annotation in the single test file. * * @author <NAME> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface KafkaTestContainers { /** * @return Container to multiple {@link KafkaTestContainer} annotations */ KafkaTestContainer[] value(); }
84KaliPleXon3/necrobrowser
core/zombie.go
package core import ( "fmt" "github.com/muraenateam/necrobrowser/zombie" "github.com/muraenateam/necrobrowser/zombie/dropbox" "github.com/muraenateam/necrobrowser/zombie/github" "github.com/muraenateam/necrobrowser/zombie/gsuite" "github.com/muraenateam/necrobrowser/zombie/owa" "github.com/muraenateam/necrobrowser/zombie/slack" "github.com/muraenateam/necrobrowser/zombie/vultr" ) var zombies = []string{"gsuite", "github", "owa2016", "dropbox", "atlassian", "vultr", "slack"} func GetZombie(name string, target zombie.Target, options Options) (z zombie.Zombie, err error) { target.Config = zombie.Config{ LootPath: options.LootPath, } switch name { case "github": z = github.NewGithub(target) case "dropbox": z = dropbox.NewDrobox(target) case "vultr": z = vultr.NewVultr(target) case "slack": z = slack.NewSlack(target) case "owa2016": z = owa.NewOWA(target) case "gsuite": z = gsuite.NewGSuite(target) } // Update loot path lp := GetZombieLootPath(options.LootPath, zombie.GetTag(z.Name())) if _, err := CheckLoot(lp); err != nil { return nil, err } z.SetLootPath(lp) return } func GetZombieLootPath(loot string, tag string) string { return fmt.Sprintf("%s/%s/", loot, tag) } func IsValidZombie(name string) bool { for _, value := range zombies { if value == name { return true } } return false }
Pecherskiy88/UpPeople
src/components/Users/Users.js
// Core import React, { Component } from "react"; import PropTypes from "prop-types"; import { Button, Col, Row, TabPane } from "reactstrap"; // Components import Tabs from "../shared/Tabs/Tabs"; import UsersTable from "./Table"; import UserCreateForm from "../User/CreateForm"; // HOC import withModal from "../hoc/withModal"; // Context import { ModalConsumer } from "../../providers/ModalProvider"; // Instruments import { getUsers } from "../../utils/api/users"; import { addNewUser, deleteUserById } from "../../utils/api/user"; import { updateRecruiterForFreelancer } from "../../utils/api/freelancer"; const WithModalUserCreateForm = withModal(UserCreateForm); const tabsForAdmin = [ { id: "1", name: "Staff" }, { id: "2", name: "Partners" }, { id: "3", name: "Freelancers" } ]; const tabsForManager = [{ id: "1", name: "Freelancers" }]; export default class Users extends Component { static propTypes = { user: PropTypes.shape({ id: PropTypes.number.isRequired, role: PropTypes.number.isRequired }).isRequired }; state = { staff: [], partners: [], freelancers: [] }; componentDidMount() { getUsers().then(users => { this.setState({ ...users }); }); } addUser = user => { addNewUser(user).then(createdUser => { if (createdUser.role_id) { this.checkUserRole(createdUser, createdUser.role_id); } }); }; checkUserRole = (user, role) => { console.log("user: ", user, "role: ", role); switch (role) { case 4: return this.setState(state => ({ freelancers: [user, ...state.freelancers] })); case 5: return this.setState(state => ({ partners: [user, ...state.partners] })); default: return this.setState(state => ({ staff: [user, ...state.staff] })); } }; deleteUser = id => { deleteUserById(id).then(users => { this.setState({ ...users }); }); }; changeRecruiterForFreelancer = content => { updateRecruiterForFreelancer(content); }; render() { const { freelancers } = this.state; const { user: { role } } = this.props; const users = this.state; const tabPanesForAdmin = Object.keys(users).map((key, idx) => { const index = (idx + 1).toString(); return key !== undefined ? ( <TabPane tabId={index} key={key}> <UsersTable users={users[key]} usersGroup={key} onChangeRecruiter={this.changeRecruiterForFreelancer} onDeleteUser={this.deleteUser} /> </TabPane> ) : ( <p>`There are no ${key} yet. You could add a new one.`</p> ); }); return ( <> <Row style={{ marginBottom: "1rem" }}> <Col> <ModalConsumer> {({ showModal }) => ( <Button color="success" onClick={() => showModal(WithModalUserCreateForm, { isOpenModal: true, title: "New user", userRole: role, onAddUser: this.addUser }) } > <i style={{ marginRight: "0.2rem" }} className="fa fa-plus-circle" /> Create </Button> )} </ModalConsumer> </Col> </Row> <Row> <Col> {role === 1 ? ( <Tabs tabs={tabsForAdmin}>{tabPanesForAdmin}</Tabs> ) : ( <Tabs tabs={tabsForManager}> <TabPane tabId="1"> {freelancers !== undefined ? ( <UsersTable users={freelancers} onDeleteUser={this.deleteUser} /> ) : ( <p> There are no freelancers yet. Click the button "Create" above to add a new one. </p> )} </TabPane> </Tabs> )} </Col> </Row> </> ); } }
DirectXceriD/gridgain
modules/ml/src/test/java/org/apache/ignite/ml/tree/randomforest/data/impurity/MSEHistogramTest.java
/* * GridGain Community Edition Licensing * Copyright 2019 GridGain Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") modified with Commons Clause * Restriction; you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. * * Commons Clause Restriction * * The Software is provided to you by the Licensor under the License, as defined below, subject to * the following condition. * * Without limiting other conditions in the License, the grant of rights under the License will not * include, and the License does not grant to you, the right to Sell the Software. * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you * under the License to provide to third parties, for a fee or other consideration (including without * limitation fees for hosting or consulting/ support services related to the Software), a product or * service whose value derives, entirely or substantially, from the functionality of the Software. * Any license notice or attribution required by the License must also include this Commons Clause * License Condition notice. * * For purposes of the clause above, the “Licensor” is Copyright 2019 GridGain Systems, Inc., * the “License” is the Apache License, Version 2.0, and the Software is the GridGain Community * Edition software provided with this notice. */ package org.apache.ignite.ml.tree.randomforest.data.impurity; import java.util.ArrayList; import java.util.List; import org.apache.ignite.ml.dataset.feature.BucketMeta; import org.apache.ignite.ml.dataset.feature.FeatureMeta; import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertTrue; /** */ public class MSEHistogramTest extends ImpurityHistogramTest { /** Feature 1 meta. */ private BucketMeta feature1Meta = new BucketMeta(new FeatureMeta("", 0, true)); /** Feature 2 meta. */ private BucketMeta feature2Meta = new BucketMeta(new FeatureMeta("", 1, false)); /** */ @Before public void setUp() throws Exception { feature2Meta.setMinVal(-5); feature2Meta.setBucketSize(1); } /** */ @Test public void testAdd() { MSEHistogram catHist1 = new MSEHistogram(0, feature1Meta); MSEHistogram contHist1 = new MSEHistogram(0, feature2Meta); MSEHistogram catHist2 = new MSEHistogram(1, feature1Meta); MSEHistogram contHist2 = new MSEHistogram(1, feature2Meta); for (BootstrappedVector vec : dataset) { catHist1.addElement(vec); catHist2.addElement(vec); contHist1.addElement(vec); contHist2.addElement(vec); } checkBucketIds(catHist1.buckets(), new Integer[] {0, 1}); checkBucketIds(catHist2.buckets(), new Integer[] {0, 1}); checkBucketIds(contHist1.buckets(), new Integer[] {1, 4, 6, 7, 8}); checkBucketIds(contHist2.buckets(), new Integer[] {1, 4, 6, 7, 8}); //counters checkCounters(catHist1.getCounters(), new double[] {4, 4}); checkCounters(catHist2.getCounters(), new double[] {1, 5}); checkCounters(contHist1.getCounters(), new double[] {1, 1, 2, 2, 2}); checkCounters(contHist2.getCounters(), new double[] {2, 2, 1, 1, 0}); //ys checkCounters(catHist1.getSumOfLabels(), new double[] {2 * 4 + 2 * 3, 5 + 1 + 2 * 2}); checkCounters(catHist2.getSumOfLabels(), new double[] {4, 2 * 5 + 2 * 1 + 2}); checkCounters(contHist1.getSumOfLabels(), new double[] {5 * 1, 1 * 1, 4 * 2, 2 * 2, 3 * 2}); checkCounters(contHist2.getSumOfLabels(), new double[]{ 2 * 5, 2 * 1, 1 * 4, 2 * 1, 0 * 3 }); //y2s checkCounters(catHist1.getSumOfSquaredLabels(), new double[] {2 * 4 * 4 + 2 * 3 * 3, 5 * 5 + 1 + 2 * 2 * 2}); checkCounters(catHist2.getSumOfSquaredLabels(), new double[] {4 * 4, 2 * 5 * 5 + 2 * 1 * 1 + 2 * 2}); checkCounters(contHist1.getSumOfSquaredLabels(), new double[] {1 * 5 * 5, 1 * 1 * 1, 2 * 4 * 4, 2 * 2 * 2, 2 * 3 * 3}); checkCounters(contHist2.getSumOfSquaredLabels(), new double[]{ 2 * 5 * 5, 2 * 1 * 1, 1 * 4 * 4, 1 * 2 * 2, 0 * 3 * 3 }); } /** */ @Test public void testOfSums() { int sampleId = 0; BucketMeta bucketMeta1 = new BucketMeta(new FeatureMeta("", 0, false)); bucketMeta1.setMinVal(0.); bucketMeta1.setBucketSize(0.1); BucketMeta bucketMeta2 = new BucketMeta(new FeatureMeta("", 1, true)); MSEHistogram forAllHist1 = new MSEHistogram(sampleId, bucketMeta1); MSEHistogram forAllHist2 = new MSEHistogram(sampleId, bucketMeta2); List<MSEHistogram> partitions1 = new ArrayList<>(); List<MSEHistogram> partitions2 = new ArrayList<>(); int cntOfPartitions = rnd.nextInt(100); for (int i = 0; i < cntOfPartitions; i++) { partitions1.add(new MSEHistogram(sampleId, bucketMeta1)); partitions2.add(new MSEHistogram(sampleId, bucketMeta2)); } int datasetSize = rnd.nextInt(1000); for(int i = 0; i < datasetSize; i++) { BootstrappedVector vec = randomVector(false); vec.features().set(1, (vec.features().get(1) * 100) % 100); forAllHist1.addElement(vec); forAllHist2.addElement(vec); int partId = rnd.nextInt(cntOfPartitions); partitions1.get(partId).addElement(vec); partitions2.get(partId).addElement(vec); } checkSums(forAllHist1, partitions1); checkSums(forAllHist2, partitions2); MSEHistogram emptyHist1 = new MSEHistogram(sampleId, bucketMeta1); MSEHistogram emptyHist2 = new MSEHistogram(sampleId, bucketMeta2); assertTrue(forAllHist1.isEqualTo(forAllHist1.plus(emptyHist1))); assertTrue(forAllHist2.isEqualTo(forAllHist2.plus(emptyHist2))); assertTrue(forAllHist1.isEqualTo(emptyHist1.plus(forAllHist1))); assertTrue(forAllHist2.isEqualTo(emptyHist2.plus(forAllHist2))); } /** Dataset. */ private BootstrappedVector[] dataset = new BootstrappedVector[] { new BootstrappedVector(VectorUtils.of(1, -4), 5, new int[] {1, 2}), new BootstrappedVector(VectorUtils.of(1, -1), 1, new int[] {1, 2}), new BootstrappedVector(VectorUtils.of(0, 1), 4, new int[] {2, 1}), new BootstrappedVector(VectorUtils.of(1, 2), 2, new int[] {2, 1}), new BootstrappedVector(VectorUtils.of(0, 3), 3, new int[] {2, 0}), }; }
gublan24/umpleSPLFull
testbed/src-gen-umple/cruise/associations/specializations/Pp.java
/*PLEASE DO NOT EDIT THIS CODE*/ /*This code was generated using the UMPLE 1.31.1.5860.78bb27cc6 modeling language!*/ package cruise.associations.specializations; import java.util.*; // line 210 "../../../../src/TestHarnessAssociationSpecializations.ump" public class Pp { //------------------------ // MEMBER VARIABLES //------------------------ //Pp Associations private List<Oo> o; //------------------------ // CONSTRUCTOR //------------------------ public Pp() { o = new ArrayList<Oo>(); } //------------------------ // INTERFACE //------------------------ /* Code from template association_GetMany */ public Oo getO(int index) { Oo aO = o.get(index); return aO; } public List<Oo> getO() { List<Oo> newO = Collections.unmodifiableList(o); return newO; } public int numberOfO() { int number = o.size(); return number; } public boolean hasO() { boolean has = o.size() > 0; return has; } public int indexOfO(Oo aO) { int index = o.indexOf(aO); return index; } /* Code from template association_GetMany_clear */ protected void clear_o() { o.clear(); } /* Code from template association_MinimumNumberOfMethod */ public static int minimumNumberOfO() { return 0; } /* Code from template association_AddManyToManyMethod */ public boolean addO(Oo aO) { boolean wasAdded = false; if (o.contains(aO)) { return false; } o.add(aO); if (aO.indexOfP(this) != -1) { wasAdded = true; } else { wasAdded = aO.addP(this); if (!wasAdded) { o.remove(aO); } } return wasAdded; } /* Code from template association_RemoveMany */ public boolean removeO(Oo aO) { boolean wasRemoved = false; if (!o.contains(aO)) { return wasRemoved; } int oldIndex = o.indexOf(aO); o.remove(oldIndex); if (aO.indexOfP(this) == -1) { wasRemoved = true; } else { wasRemoved = aO.removeP(this); if (!wasRemoved) { o.add(oldIndex,aO); } } return wasRemoved; } /* Code from template association_AddIndexControlFunctions */ public boolean addOAt(Oo aO, int index) { boolean wasAdded = false; if(addO(aO)) { if(index < 0 ) { index = 0; } if(index > numberOfO()) { index = numberOfO() - 1; } o.remove(aO); o.add(index, aO); wasAdded = true; } return wasAdded; } public boolean addOrMoveOAt(Oo aO, int index) { boolean wasAdded = false; if(o.contains(aO)) { if(index < 0 ) { index = 0; } if(index > numberOfO()) { index = numberOfO() - 1; } o.remove(aO); o.add(index, aO); wasAdded = true; } else { wasAdded = addOAt(aO, index); } return wasAdded; } public void delete() { ArrayList<Oo> copyOfO = new ArrayList<Oo>(o); o.clear(); for(Oo aO : copyOfO) { if (aO.numberOfP() <= Oo.minimumNumberOfP()) { aO.delete(); } else { aO.removeP(this); } } } }
protimient/Glyphs-Scripts
Anchors/alignBetweenNodes_Centre.py
# MenuTitle: Align Anchor Between Selected Nodes - Centre # -*- coding: utf-8 -*- __doc__ = """ Aligns the selected anchor centrally between the selected Nodes. """ Glyphs.clearLog() # Glyphs.showMacroWindow() current_selection = Glyphs.font.selectedLayers[0].selection try: anchor = [x for x in current_selection if isinstance(x, GSAnchor)][0] except KeyError: anchor = None nodes = [x for x in current_selection if isinstance(x, GSNode)] if anchor is not None and len(nodes) > 1: leftmost_node = min(nodes, key=lambda n: n.x) rightmost_node = max(nodes, key=lambda n: n.x) middle_x = (leftmost_node.x + rightmost_node.x) / 2 anchor.x = middle_x
srimadha/opencourseware-algo-6.006
algos/src/com/leetcode/hard/BurstBallons.java
<gh_stars>0 package com.leetcode.hard; /** * Created by Sri on 4/27/2019. * * https://leetcode.com/problems/burst-balloons/ * Given n balloons, indexed from 0 to n-1. Each balloon is painted with a number on it represented by array nums. You are asked to burst all the balloons. If the you burst balloon i you will get nums[left] * nums[i] * nums[right] coins. Here left and right are adjacent indices of i. After the burst, the left and right then becomes adjacent. Find the maximum coins you can collect by bursting the balloons wisely. Note: You may imagine nums[-1] = nums[n] = 1. They are not real therefore you can not burst them. 0 ≤ n ≤ 500, 0 ≤ nums[i] ≤ 100 Example: Input: [3,1,5,8] Output: 167 Explanation: nums = [3,1,5,8] --> [3,5,8] --> [3,8] --> [8] --> [] coins = 3*1*5 + 3*5*8 + 1*3*8 + 1*8*1 = 167 */ public class BurstBallons { public int maxCoins(int[] iNums) { int[] nums = new int[iNums.length + 2]; int n = 1; for (int x : iNums) if (x > 0) nums[n++] = x; nums[0] = nums[n++] = 1; int[][] memo = new int[n][n]; return burst(memo, nums, 0, n - 1); } public int burst(int[][] memo, int[] nums, int left, int right) { if (left + 1 == right) return 0; if (memo[left][right] > 0) return memo[left][right]; int ans = 0; for (int i = left + 1; i < right; i++) ans = Math.max(ans, nums[left] * nums[i] * nums[right] + burst(memo, nums, left, i) + burst(memo, nums, i, right)); memo[left][right] = ans; return ans; } }
zjukk/LeetCode-is-my-son
21-merge2List.cpp
#include <iostream> #include <string> #include <algorithm> #include <vector> #include <map> #include <limits> using namespace std; ListNode* mergeTwoLists(ListNode* l1, ListNode* l2) { ListNode* ans = new ListNode(0); ListNode* res = ans; while (l1 != NULL && l2 != NULL) { if (l1->val < l2->val) { ans->next = l1; l1 = l1->next; ans = ans->next; } else { ans->next = l2; l2 = l2->next; ans = ans->next; } } while (l1 != NULL) { ans->next = l1; l1 = l1->next; ans = ans->next; } while (l2 != NULL) { ans->next = l2; l2 = l2->next; ans = ans->next; } return res->next; }
kami-lang/madex-r8
src/test/java/com/android/tools/r8/ir/optimize/library/ObjectsToStringWithNullDefaultTest.java
// Copyright (c) 2021, the R8 project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. package com.android.tools.r8.ir.optimize.library; import static com.android.tools.r8.utils.codeinspector.CodeMatchers.invokesMethodWithName; import static com.android.tools.r8.utils.codeinspector.Matchers.isPresent; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; import com.android.tools.r8.NeverInline; import com.android.tools.r8.TestBase; import com.android.tools.r8.TestParameters; import com.android.tools.r8.TestParametersCollection; import com.android.tools.r8.utils.AndroidApiLevel; import com.android.tools.r8.utils.codeinspector.ClassSubject; import com.android.tools.r8.utils.codeinspector.MethodSubject; import java.util.Objects; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @RunWith(Parameterized.class) public class ObjectsToStringWithNullDefaultTest extends TestBase { private final TestParameters parameters; @Parameterized.Parameters(name = "{0}") public static TestParametersCollection data() { return getTestParameters() .withCfRuntimes() .withDexRuntimes() .withApiLevelsStartingAtIncluding(AndroidApiLevel.K) .build(); } public ObjectsToStringWithNullDefaultTest(TestParameters parameters) { this.parameters = parameters; } @Test public void test() throws Exception { testForR8(parameters.getBackend()) .addInnerClasses(getClass()) .addKeepMainRule(Main.class) .enableInliningAnnotations() .setMinApi(parameters.getApiLevel()) .compile() .inspect( inspector -> { ClassSubject mainClassSubject = inspector.clazz(Main.class); assertThat(mainClassSubject, isPresent()); MethodSubject testNonNullArgumentMethodSubject = mainClassSubject.uniqueMethodWithName("testNonNullArgument"); assertThat(testNonNullArgumentMethodSubject, isPresent()); assertThat(testNonNullArgumentMethodSubject, not(invokesMethodWithName("toString"))); MethodSubject testNullArgumentMethodSubject = mainClassSubject.uniqueMethodWithName("testNullArgument"); assertThat(testNullArgumentMethodSubject, isPresent()); assertThat(testNullArgumentMethodSubject, not(invokesMethodWithName("toString"))); }) .run(parameters.getRuntime(), Main.class) .assertSuccessWithOutputLines("Foo", "Bar"); } static class Main { public static void main(String[] args) { testNonNullArgument(); testNullArgument(); } @NeverInline static void testNonNullArgument() { System.out.println(Objects.toString("Foo", ":-(")); } @NeverInline static void testNullArgument() { System.out.println(Objects.toString(null, "Bar")); } } }
kubukoz/tapir
core/src/main/scala/sttp/tapir/generic/internal/Debug.scala
<reponame>kubukoz/tapir<gh_stars>0 package sttp.tapir.generic.internal object Debug { import scala.reflect.macros.blackbox private val macroDebugEnabled = System.getenv("TAPIR_LOG_GENERATED_CODE") == "true" def logGeneratedCode(c: blackbox.Context)(typeName: String, tree: c.universe.Tree): Unit = { import c.universe._ if (macroDebugEnabled) { println(s"""$typeName macro output start:""") println(showCode(tree)) println(s"""$typeName macro output end.""") } } }
ADFC-Hamburg/adfc-tempo30
js/tempo30/view/polizeireview_ermitteln_fehler_dialog.js
<gh_stars>1-10 define('tempo30/view/polizeireview_ermitteln_fehler_dialog', [ 'jquery', 'tempo30/model/version', 'bootstrap', 'bootstrap-dialog', 'gettext!tempo30', ], function ($, version, bootstrap, BootstrapDialog, gt) { 'use strict'; var url="https://www.hamburg.de/behoerdenfinder/hamburg/11262156/"; function getDialog(data, errorDialog) { var buttons=[ { id: 'btn-err', cssClass: 'btn-primary', label: gt('Fehler/Problem melden'), action: function (dialogRef) { dialogRef.close(); errorDialog('Polizeireview ermitteln.', '(Polizei suche:'+JSON.stringify(data)+')').open(); } }, { id: 'cancel-btn', label: gt('schließen'), cssClass: 'btn-warning', action: function (dialogRef) { dialogRef.close(); } }]; var dialog = new BootstrapDialog({ 'type': BootstrapDialog.TYPE_WARNING, 'title': gt('Konnten das Polizeirevier nicht ermitteln'), 'message': gt('Leider konnten wir das Polizeirevier nicht ermitteln. Wir werden an dieser Stelle im Antrag einen Platzhalter einfügen, den sie manuell ersetzen müssen. Sie finden das Polizeirevier z.B. über den ')+'<a href="'+url+'" target="_blank">'+gt('Behördenfinder der Stadt Hamburg')+'</a>. '+gt('Wenn Ihre Adresse wirklich in Hamburg liegt, melden Sie uns gerne einen Fehler, damit wir unser Tool verbessern können.')+gt('Vielen Dank!'), 'buttons': buttons }); return dialog; } return getDialog; });
Ressmann/starthinker
examples/ga360_segmentology_example.py
########################################################################### # # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ########################################################################### # # This code generated (see scripts folder for possible source): # - Command: "python starthinker_ui/manage.py example" # ########################################################################### import argparse import textwrap from starthinker.util.configuration import Configuration from starthinker.task.dataset.run import dataset from starthinker.task.bigquery.run import bigquery from starthinker.task.ga.run import ga from starthinker.task.census.run import census def recipe_ga360_segmentology(config, auth_write, auth_read, view, recipe_slug): """GA360 funnel analysis using Census data. Args: auth_write (authentication) - Authorization used for writing data. auth_read (authentication) - Authorization for reading GA360. view (string) - View Id recipe_slug (string) - Name of Google BigQuery dataset to create. """ dataset(config, { 'description':'Create a dataset for bigquery tables.', 'hour':[ 4 ], 'auth':auth_write, 'dataset':recipe_slug }) bigquery(config, { 'auth':auth_write, 'function':'Pearson Significance Test', 'to':{ 'dataset':recipe_slug } }) ga(config, { 'auth':auth_read, 'kwargs':{ 'reportRequests':[ { 'viewId':view, 'dateRanges':[ { 'startDate':'90daysAgo', 'endDate':'today' } ], 'dimensions':[ { 'name':'ga:userType' }, { 'name':'ga:userDefinedValue' }, { 'name':'ga:latitude' }, { 'name':'ga:longitude' } ], 'metrics':[ { 'expression':'ga:users' }, { 'expression':'ga:sessionsPerUser' }, { 'expression':'ga:bounces' }, { 'expression':'ga:timeOnPage' }, { 'expression':'ga:pageviews' } ] } ], 'useResourceQuotas':False }, 'out':{ 'bigquery':{ 'dataset':recipe_slug, 'table':'GA360_KPI' } } }) bigquery(config, { 'auth':auth_write, 'from':{ 'query':'''WITH GA360_SUM AS ( SELECT A.Dimensions.userType AS User_Type, A.Dimensions.userDefinedValue AS User_Value, B.zip_code AS Zip, SUM(Metrics.users) AS Users, SUM(Metrics.sessionsPerUser) AS Sessions, SUM(Metrics.timeOnPage) AS Time_On_Site, SUM(Metrics.bounces) AS Bounces, SUM(Metrics.pageviews) AS Page_Views FROM `{dataset}.GA360_KPI` AS A JOIN `bigquery-public-data.geo_us_boundaries.zip_codes` AS B ON ST_WITHIN(ST_GEOGPOINT(A.Dimensions.longitude, A.Dimensions.latitude), B.zip_code_geom) GROUP BY 1,2,3 ) SELECT User_Type, User_Value, Zip, Users, SAFE_DIVIDE(Users, SUM(Users) OVER()) AS User_Percent, SAFE_DIVIDE(Sessions, SUM(Sessions) OVER()) AS Impression_Percent, SAFE_DIVIDE(Time_On_Site, SUM(Time_On_Site) OVER()) AS Time_On_Site_Percent, SAFE_DIVIDE(Bounces, SUM(Bounces) OVER()) AS Bounce_Percent, SAFE_DIVIDE(Page_Views, SUM(Page_Views) OVER()) AS Page_View_Percent FROM GA360_SUM ''', 'parameters':{ 'dataset':recipe_slug }, 'legacy':False }, 'to':{ 'dataset':recipe_slug, 'view':'GA360_KPI_Normalized' } }) census(config, { 'auth':auth_write, 'normalize':{ 'census_geography':'zip_codes', 'census_year':'2018', 'census_span':'5yr' }, 'to':{ 'dataset':recipe_slug, 'type':'view' } }) census(config, { 'auth':auth_write, 'correlate':{ 'join':'Zip', 'pass':[ '<PASSWORD>', '<PASSWORD>' ], 'sum':[ 'Users' ], 'correlate':[ 'User_Percent', 'Impression_Percent', 'Time_On_Site_Percent', 'Bounce_Percent', 'Page_View_Percent' ], 'dataset':recipe_slug, 'table':'GA360_KPI_Normalized', 'significance':80 }, 'to':{ 'dataset':recipe_slug, 'type':'view' } }) if __name__ == "__main__": parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent(""" GA360 funnel analysis using Census data. 1. Wait for <b>BigQuery->->->Census_Join</b> to be created. 2. Join the <a href='https://groups.google.com/d/forum/starthinker-assets' target='_blank'>StarThinker Assets Group</a> to access the following assets 3. Copy <a href='https://datastudio.google.com/c/u/0/reporting/3673497b-f36f-4448-8fb9-3e05ea51842f/' target='_blank'>GA360 Segmentology Sample</a>. Leave the Data Source as is, you will change it in the next step. 4. Click Edit Connection, and change to <b>BigQuery->->->Census_Join</b>. 5. Or give these intructions to the client. """)) parser.add_argument("-project", help="Cloud ID of Google Cloud Project.", default=None) parser.add_argument("-key", help="API Key of Google Cloud Project.", default=None) parser.add_argument("-client", help="Path to CLIENT credentials json file.", default=None) parser.add_argument("-user", help="Path to USER credentials json file.", default=None) parser.add_argument("-service", help="Path to SERVICE credentials json file.", default=None) parser.add_argument("-verbose", help="Print all the steps as they happen.", action="store_true") parser.add_argument("-auth_write", help="Authorization used for writing data.", default='service') parser.add_argument("-auth_read", help="Authorization for reading GA360.", default='service') parser.add_argument("-view", help="View Id", default='service') parser.add_argument("-recipe_slug", help="Name of Google BigQuery dataset to create.", default='') args = parser.parse_args() config = Configuration( project=args.project, user=args.user, service=args.service, client=args.client, key=args.key, verbose=args.verbose ) recipe_ga360_segmentology(config, args.auth_write, args.auth_read, args.view, args.recipe_slug)
louis-ver/CARMIN
carmin-server/server/resources/models/path.py
<reponame>louis-ver/CARMIN import os from pathlib import PurePath import mimetypes from server import app from flask_restful import request from marshmallow import Schema, fields, post_load, post_dump class Path(): """Path represents a filesystem resource (file or directory). Attributes: platform_path (str): Pathname, relative to the root data directory. last_modification_date (int): Date of last modification, in seconds since the Epoch (UNIX timestamp). is_directory (bool): True if the path represents a directory. size (int): For a file, size in bytes. For a directory, sum of all the sizes of the files contained in the directory (recursively). execution_id (str): ID of the execution that produced the Path. mime_type (str): MIME type based on RFC 6838. """ def __init__(self, platform_path: str, last_modification_date: int, is_directory: bool, size: int = None, execution_id: str = None, mime_type: str = None): self.platform_path = platform_path self.last_modification_date = last_modification_date self.is_directory = is_directory self.size = size self.execution_id = execution_id self.mime_type = mime_type def __eq__(self, other): return self.__dict__ == other.__dict__ @classmethod def object_from_pathname(cls, absolute_path_to_resource: str): """object_from_pathname takes the path of the platform data root directory as its first argument, and the path of the requested resource relative to the root directory as second argument. It then returns a Path object based on the associated file or directory. """ is_directory = os.path.isdir(absolute_path_to_resource) # TODO: Add execution_id to Path object rel_path = PurePath( os.path.relpath(absolute_path_to_resource, app.config['DATA_DIRECTORY'])).as_posix() return Path( platform_path='{}path/{}'.format(request.url_root, rel_path), last_modification_date=os.path.getmtime(absolute_path_to_resource), is_directory=os.path.isdir(absolute_path_to_resource), size=Path.get_path_size(absolute_path_to_resource, is_directory), mime_type=mimetypes.guess_type(absolute_path_to_resource)[0]) @classmethod def get_path_size(cls, absolute_path: str, is_dir: bool) -> int: """get_path_size returns the size of the resource. Attributes: absolute_path (str): Absolute path to the resource. is_dir (bool): True if the resource is a directory. Returns: (int): Size of the resource. """ size = 0 if is_dir: for dirpath, _, filenames in os.walk(absolute_path): for f in filenames: fp = os.path.join(dirpath, f) size += os.path.getsize(fp) else: size = os.path.getsize(absolute_path) return size class PathSchema(Schema): SKIP_VALUES = set([None]) class Meta: ordered = True platform_path = fields.Str( required=True, dump_to='platformPath', load_from='platformPath') last_modification_date = fields.Int( required=True, dump_to='lastModificationDate', load_from='lastModificationDate') is_directory = fields.Bool( required=True, dump_to='isDirectory', load_from='isDirectory') size = fields.Int() execution_id = fields.Str(dump_to='executionId', load_from='executionId') mime_type = fields.Str(dump_to='mimeType', load_from='mimeType') @post_load def to_model(self, data): return Path(**data) @post_dump def remove_skip_values(self, data): """remove_skip_values removes all values specified in the SKIP_VALUES set from appearing in the 'dumped' JSON. """ return { key: value for key, value in data.items() if value not in self.SKIP_VALUES }
slipstream/cimi-mf2c
jar-service/src/main/java/com/sixsq/slipstream/initialstartup/FileLoader.java
package com.sixsq.slipstream.initialstartup; import com.sixsq.slipstream.connector.ConnectorFactory; import com.sixsq.slipstream.exceptions.ConfigurationException; import com.sixsq.slipstream.persistence.Parameter; import com.sixsq.slipstream.persistence.ServiceConfiguration; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; public class FileLoader { private static final String[] FILE_EXTENSIONS = {".conf", ".xml", ".json"}; /** * Load configuration files, assumed to be in /etc/slipstream for * system installation. * @param configDir pointing to directory containing the config files * @return * @throws ConfigurationException */ public static List<File> loadConfigurationFiles(File configDir) throws ConfigurationException { List<File> filteredFiles = new ArrayList<File>(); if (configDir == null) { return filteredFiles; } if (!configDir.exists()) { return filteredFiles; } File[] files = configDir.listFiles(); for (File f : files) { for (String ext : FILE_EXTENSIONS) { if(f.getName().endsWith(ext)) { filteredFiles.add(f); } } } return filteredFiles; } public static String fileToString(File f) throws IOException { return new String(Files.readAllBytes(Paths.get(f.getPath()))); } }
yazad3/atlas
src/main/java/org/openstreetmap/atlas/geography/boundary/CountryBoundary.java
package org.openstreetmap.atlas.geography.boundary; import java.io.Serializable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.openstreetmap.atlas.geography.Located; import org.openstreetmap.atlas.geography.MultiPolygon; import org.openstreetmap.atlas.geography.Polygon; import org.openstreetmap.atlas.geography.Rectangle; import org.openstreetmap.atlas.geography.sharding.SlippyTile; import org.openstreetmap.atlas.utilities.collections.Iterables; /** * This {@link CountryBoundary} holds country name and country boundary, and will be stored in * spatial index directly for best query performance * * @author tony */ public class CountryBoundary implements Located, Serializable { private static final long serialVersionUID = 4728303272397434187L; private final String countryName; private final MultiPolygon boundary; public CountryBoundary(final String name, final MultiPolygon boundary) { this.countryName = name; this.boundary = boundary; } @Override public Rectangle bounds() { return this.boundary.bounds(); } public boolean covers(final Rectangle bound) { boolean covers = false; for (final Polygon outer : this.boundary.outers()) { if (outer.fullyGeometricallyEncloses(bound)) { covers = true; break; } if (outer.overlaps(bound)) { covers = true; break; } } return covers; } public MultiPolygon getBoundary() { return this.boundary; } public String getCountryName() { return this.countryName; } /** * Iterate through outers of country boundary to avoid unnecessary overlap checks * * @param zoom * The zoom level of slippy tiles * @return A set of slippy tiles */ public Set<SlippyTile> tiles(final int zoom) { final Set<SlippyTile> validTiles = new HashSet<>(); for (final Polygon subBoundary : this.boundary.outers()) { final List<SlippyTile> tiles = Iterables .asList(SlippyTile.allTiles(zoom, subBoundary.bounds())); validTiles.addAll(tiles.stream().filter(tile -> subBoundary.overlaps(tile.bounds())) .collect(Collectors.toList())); } return validTiles; } }
i-want-a-plus/rn
RNIwap/src/reducers/professor.js
import * as types from '../actions/types'; import _ from 'lodash'; let initialState = { error: false, isPending: false, data: null }; export function professorSearch (state = initialState, action) { switch (action.type) { case `${types.PROFESSOR_SEARCH}_PENDING`: return Object.assign({}, state, { isPending: true }); case `${types.PROFESSOR_SEARCH}_FULFILLED`: return Object.assign({}, state, { error: false, data: action.payload, isPending: false }); case `${types.PROFESSOR_SEARCH}_REJECTED`: return Object.assign({}, state, { error: action.payload, isPending: false }); default: return state; }; }; export function professor (state = {}, action) { if (!action.payload || !action.payload.id) return state; let id = action.payload.id; switch (action.type) { case `${types.PROFESSOR}_PENDING`: return Object.assign({}, state, { [id]: { isPending: true } }); case `${types.PROFESSOR}_FULFILLED`: return Object.assign({}, state, { [id]: { error: false, isPending: false, ...action.payload } }); case `${types.PROFESSOR}_REJECTED`: return Object.assign({}, state, { [id]: { error: action.payload, isPending: false } }); default: return state; }; };
PAChain/android
app/src/main/java/com/pachain/android/util/Toast.java
package com.pachain.android.util; import android.content.Context; import android.os.Handler; import android.os.Looper; public class Toast { private final Context context; private final Handler handler = new Handler(Looper.getMainLooper()); public Toast(final Context context) { this.context = context; } public final void postToast(final int textResId, final Object... formatArgs) { handler.post(() -> toast(textResId, formatArgs)); } public final void toast(final int textResId, final Object... formatArgs) { customToast(textResId, android.widget.Toast.LENGTH_SHORT, formatArgs); } public final void postToast(final CharSequence text) { handler.post(() -> toast(text)); } public final void toast(final CharSequence text) { customToast(text, android.widget.Toast.LENGTH_SHORT); } public final void postLongToast(final int textResId, final Object... formatArgs) { handler.post(() -> longToast(textResId, formatArgs)); } public final void longToast(final int textResId, final Object... formatArgs) { customToast(textResId, android.widget.Toast.LENGTH_LONG, formatArgs); } public final void postLongToast(final CharSequence text) { handler.post(() -> longToast(text)); } public final void longToast(final CharSequence text) { customToast(text, android.widget.Toast.LENGTH_LONG); } private void customToast(final int textResId, final int duration, final Object... formatArgs) { customToast(context.getString(textResId, formatArgs), duration); } private void customToast(final CharSequence text, final int duration) { android.widget.Toast.makeText(context, text, duration).show(); } }
anthonySchafrik/gift-giving-elephant
src/proxies/updateUser.js
import api from '../proxies/api'; export function updateUser(user) { return api.patch('/user', user); }
anon-submis1/nisq-analyzer
org.planqk.nisq.analyzer.core/src/main/java/org/planqk/nisq/analyzer/core/prioritization/promethee/PrometheeIIMethod.java
<reponame>anon-submis1/nisq-analyzer /******************************************************************************* * Copyright (c) 2021 University of Stuttgart * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.planqk.nisq.analyzer.core.prioritization.promethee; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; import javax.xml.bind.JAXBElement; import org.planqk.nisq.analyzer.core.model.ExecutionResultStatus; import org.planqk.nisq.analyzer.core.model.McdaJob; import org.planqk.nisq.analyzer.core.model.McdaResult; import org.planqk.nisq.analyzer.core.prioritization.JobDataExtractor; import org.planqk.nisq.analyzer.core.prioritization.McdaConstants; import org.planqk.nisq.analyzer.core.prioritization.McdaInformation; import org.planqk.nisq.analyzer.core.prioritization.McdaMethod; import org.planqk.nisq.analyzer.core.prioritization.McdaWebServiceHandler; import org.planqk.nisq.analyzer.core.prioritization.XmlUtils; import org.planqk.nisq.analyzer.core.repository.McdaJobRepository; import org.planqk.nisq.analyzer.core.repository.McdaResultRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.xmcda.v2.AlternativeValue; import org.xmcda.v2.MethodParameters; import org.xmcda.v2.ObjectFactory; import org.xmcda.v2.XMCDA; import lombok.RequiredArgsConstructor; /** * Service implementing the Promethee II method to prioritize analysis results of the NISQ Analyzer. */ @Service @RequiredArgsConstructor public class PrometheeIIMethod implements McdaMethod { private final static Logger LOG = LoggerFactory.getLogger(PrometheeIIMethod.class); private final JobDataExtractor jobDataExtractor; private final McdaJobRepository mcdaJobRepository; private final McdaResultRepository mcdaResultRepository; private final McdaWebServiceHandler mcdaWebServiceHandler; private final XmlUtils xmlUtils; @Value("${org.planqk.nisq.analyzer.mcda.url}") private String baseURL; @Override public String getName() { return "promethee-II"; } @Override public String getDescription() { return "TODO"; } @Override public void executeMcdaMethod(McdaJob mcdaJob) { LOG.debug("Starting Promethee II MCDA method to prioritize job with ID: {}", mcdaJob.getJobId()); McdaInformation mcdaInformation = jobDataExtractor.getJobInformationFromUuid(mcdaJob); // abort if job can not be found and therefore no information available if (Objects.isNull(mcdaInformation)) { setJobToFailed(mcdaJob, "Unable to retrieve information about job with ID: " + mcdaJob.getJobId()); return; } try { // invoke the preference service for Promethee-II LOG.debug("Invoking preference service for Promethee-II!"); URL url = new URL((baseURL.endsWith("/") ? baseURL : baseURL + "/") + McdaConstants.WEB_SERVICE_NAME_PROMETHEEII_PREFERENCE); HashMap<String, String> bodyFields = new HashMap<>(); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_CRITERIA, createVersionedXMCDAString(mcdaInformation.getCriteria())); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_ALTERNATIVES, createVersionedXMCDAString(mcdaInformation.getAlternatives())); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_PERFORMANCES, createVersionedXMCDAString(mcdaInformation.getPerformances())); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_WEIGHTS, createVersionedXMCDAString(mcdaInformation.getWeights())); Map<String, String> resultsPreferences = mcdaWebServiceHandler.invokeMcdaOperation(url, McdaConstants.WEB_SERVICE_OPERATIONS_INVOKE, bodyFields); LOG.debug("Invoked preference service successfully and retrieved {} results!", resultsPreferences.size()); // check for required results if (!resultsPreferences.containsKey(McdaConstants.WEB_SERVICE_DATA_PREFERENCE)) { setJobToFailed(mcdaJob, "Invocation must contain " + McdaConstants.WEB_SERVICE_DATA_PREFERENCE + " in the results but doesn´t! Aborting!"); return; } // invoke the flows service for Promethee-II to calculate positive flows LOG.debug("Invoking flows service for Promethee-II to calculate positive flows!"); url = new URL((baseURL.endsWith("/") ? baseURL : baseURL + "/") + McdaConstants.WEB_SERVICE_NAME_PROMETHEEII_FLOWS); bodyFields = new HashMap<>(); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_PREFERENCE, resultsPreferences.get(McdaConstants.WEB_SERVICE_DATA_PREFERENCE)); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_ALTERNATIVES, createVersionedXMCDAString(mcdaInformation.getAlternatives())); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_FLOW_TYPE, createFlowTypeParameter("POSITIVE")); Map<String, String> resultsPositiveFlows = mcdaWebServiceHandler.invokeMcdaOperation(url, McdaConstants.WEB_SERVICE_OPERATIONS_INVOKE, bodyFields); LOG.debug("Invoked flows service successfully and retrieved {} results for positive flows!", resultsPositiveFlows.size()); // invoke the flows service for Promethee-II to calculate negative flows LOG.debug("Invoking flows service for Promethee-II to calculate negative flows!"); url = new URL((baseURL.endsWith("/") ? baseURL : baseURL + "/") + McdaConstants.WEB_SERVICE_NAME_PROMETHEEII_FLOWS); bodyFields.put(McdaConstants.WEB_SERVICE_DATA_FLOW_TYPE, createFlowTypeParameter("NEGATIVE")); Map<String, String> resultsNegativeFlows = mcdaWebServiceHandler.invokeMcdaOperation(url, McdaConstants.WEB_SERVICE_OPERATIONS_INVOKE, bodyFields); LOG.debug("Invoked flows service successfully and retrieved {} results for negative flows!", resultsNegativeFlows.size()); // check for required results if (!resultsPositiveFlows.containsKey(McdaConstants.WEB_SERVICE_DATA_FLOWS) || !resultsNegativeFlows.containsKey(McdaConstants.WEB_SERVICE_DATA_FLOWS)) { setJobToFailed(mcdaJob, "Invocation must contain " + McdaConstants.WEB_SERVICE_DATA_FLOWS + " in the results but doesn´t! Aborting!"); return; } // parse results to use the namespace required by the XMCDA library String updatedPositiveFlows = xmlUtils.changeXMCDAVersion(resultsPositiveFlows.get(McdaConstants.WEB_SERVICE_DATA_FLOWS), McdaConstants.WEB_SERVICE_NAMESPACE_2_1_0, McdaConstants.WEB_SERVICE_NAMESPACE_DEFAULT); String updatedNegativeFlows = xmlUtils.changeXMCDAVersion(resultsNegativeFlows.get(McdaConstants.WEB_SERVICE_DATA_FLOWS), McdaConstants.WEB_SERVICE_NAMESPACE_2_1_0, McdaConstants.WEB_SERVICE_NAMESPACE_DEFAULT); // rank the results based on the flows and update the job with the ranking List<McdaResult> rankResultsByFlows = rankResultsByFlows(xmlUtils.stringToXmcda(updatedPositiveFlows), xmlUtils.stringToXmcda(updatedNegativeFlows)); if (Objects.isNull(rankResultsByFlows)) { setJobToFailed(mcdaJob, "Unable to rank results by given positive and negative flows!"); return; } mcdaJob.setRankedResults(rankResultsByFlows); mcdaJob.setState(ExecutionResultStatus.FINISHED.toString()); mcdaJob.setReady(true); mcdaJobRepository.save(mcdaJob); } catch (MalformedURLException e) { setJobToFailed(mcdaJob, "Unable to create URL for invoking the web services!"); } } /** * Rank the results of the job by calculating the net flow and sorting them accordingly * * @param positiveFlows the positive flows calculated by promethee * @param negativeFlows the negative flows calculated by promethee * @return the ranked results based on the net flow */ private List<McdaResult> rankResultsByFlows(XMCDA positiveFlows, XMCDA negativeFlows) { // get alternative values for the two XMCDA documents List<AlternativeValue> alternativeValuesPositive = xmlUtils.getAlternativeValues(positiveFlows); List<AlternativeValue> alternativeValuesNegative = xmlUtils.getAlternativeValues(negativeFlows); if (Objects.isNull(alternativeValuesPositive) || Objects.isNull(alternativeValuesNegative)) { LOG.error("Unable to retrieve alternative values for positive and negative flows!"); return null; } LOG.debug("Found {} alternatives in positive flows and {} in negative flows!", alternativeValuesPositive.size(), alternativeValuesNegative.size()); if (alternativeValuesPositive.size() != alternativeValuesNegative.size()) { LOG.error("Positive and negative flows must contain the same number of alternatives!"); return null; } // calculate the net flows for the alternatives List<McdaResult> rankedResults = new ArrayList<>(); for (AlternativeValue alternativeValuePositive : alternativeValuesPositive) { boolean found = false; for (AlternativeValue alternativeValueNegative : alternativeValuesNegative) { if (alternativeValuePositive.getAlternativeID().equals(alternativeValueNegative.getAlternativeID())) { double netFlow = xmlUtils.getValue(alternativeValuePositive) - xmlUtils.getValue(alternativeValueNegative); UUID resultId = UUID.fromString(alternativeValuePositive.getAlternativeID()); LOG.debug("Adding alternative with ID {} and net flow: {}", resultId, netFlow); McdaResult mcdaResult = new McdaResult(); mcdaResult.setScore(netFlow); mcdaResult.setResultId(resultId); mcdaResult = mcdaResultRepository.save(mcdaResult); rankedResults.add(mcdaResult); found = true; break; } } if (!found) { LOG.error("Unable to find matching alternative in negative flows for ID: {}!", alternativeValuePositive.getAlternativeID()); return null; } } // sort the list using the scores Collections.sort(rankedResults, (o1, o2) -> Double.compare(o2.getScore(), o1.getScore())); for (int i = 0; i < rankedResults.size(); i++) { McdaResult mcdaResult = rankedResults.get(i); mcdaResult.setPosition(i + 1); mcdaResultRepository.save(mcdaResult); } return rankedResults; } private void setJobToFailed(McdaJob mcdaJob, String errorMessage) { LOG.error(errorMessage); mcdaJob.setState(ExecutionResultStatus.FAILED.toString()); mcdaJob.setReady(true); mcdaJobRepository.save(mcdaJob); } private String createFlowTypeParameter(String value) { ObjectFactory objectFactory = new ObjectFactory(); MethodParameters methodParameters = new MethodParameters(); methodParameters.getDescriptionOrApproachOrProblematique() .add(new JAXBElement(McdaConstants.WEB_SERVICE_QNAMES_PARAMETER, JAXBElement.class, new JAXBElement(McdaConstants.WEB_SERVICE_QNAMES_VALUE, JAXBElement.class, new JAXBElement(McdaConstants.WEB_SERVICE_QNAMES_LABEL, String.class, value)))); XMCDA methodParametersWrapper = objectFactory.createXMCDA(); methodParametersWrapper.getProjectReferenceOrMethodMessagesOrMethodParameters() .add(objectFactory.createXMCDAMethodParameters(methodParameters)); return createVersionedXMCDAString(methodParametersWrapper); } private String createVersionedXMCDAString(XMCDA xmcda) { return xmlUtils.changeXMCDAVersion(xmlUtils.xmcdaToString(xmcda), McdaConstants.WEB_SERVICE_NAMESPACE_DEFAULT, McdaConstants.WEB_SERVICE_NAMESPACE_2_1_0); } }
declarativitydotnet/p2
oni/stasis.c
#include "stasis.h" #include <stdlib.h> ONI_TUP_STAGE(stasis_init) { Tinit(); Tup_Push(ret,Val_Tup(tup,2)); // request return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_deinit) { Tdeinit(); Tup_Push(ret,Val_Tup(tup,2)); // request return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_begin) { int xid = Tbegin(); Tup_Push(ret,Val_Tup(tup,2)); // request Tup_Push_int(ret,xid); return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_commit) { int xid = int_Val(Val_Tup(tup,3)); Tcommit(xid); Tup_Push(ret,Val_Tup(tup,2)); // request return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_abort) { int xid = int_Val(Val_Tup(tup,3)); Tabort(xid); Tup_Push(ret,Val_Tup(tup,2)); // request return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_prepare) { int xid = int_Val(Val_Tup(tup,3)); Tprepare(xid,NULLRID); Tup_Push(ret,Val_Tup(tup,2)); // request return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_create_hash) { int xid = int_Val(Val_Tup(tup,3)); recordid rid = ThashCreate(xid, VARIABLE_LENGTH, VARIABLE_LENGTH); Tup_Push(ret,Val_Tup(tup,2)); // request Tup_Push_recordid(ret,rid); return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_insert_hash) { int xid = int_Val(Val_Tup(tup,3)); recordid rid = recordid_Val(Val_Tup(tup,4)); int cnt = int_Val(Val_Tup(tup,5)); size_t key_len; byte* key = colsBytes_Tup(tup,6,cnt,&key_len); size_t val_len; byte* val = colsBytes_Tup(tup,6+cnt,colCount_Tup(tup)-(6+cnt),&val_len); // retval is 1 if key was already defined... int retval = ThashInsert(xid,rid,key,key_len,val,val_len); Tup_Push(ret,Val_Tup(tup,2)); // request Tup_Push_int(ret,retval); return 0; } END_ONI_STAGE ONI_TUP_STAGE(stasis_lookup_hash) { int xid = int_Val(Val_Tup(tup,3)); recordid rid = recordid_Val(Val_Tup(tup,4)); int cnt = int_Val(Val_Tup(tup,5)); size_t key_len; byte* key = colsBytes_Tup(tup,6,cnt,&key_len); byte* val; size_t val_len = ThashLookup(xid,rid,key,key_len,&val); if(val_len != -1) { Tup_Push(ret,Val_Tup(tup,2)); // request Tup_Push_int(ret,cnt); Tup_Push_colsBytes(ret,key,key_len); Tup_Push_colsBytes(ret,val,val_len); free(val); } else { int i; Tup_Push(ret,Val_Tup(tup,2)); // request // 5 is the column in tup that should contain a count. P2 wants // us to return a consistent number of columns, so we pad the // output with extra nulls. for(i = 5; i < colCount_Tup(tup) - 6; i++) { Tup_Push_Null(ret); } } return 0; } END_ONI_STAGE
best08618/asylo
gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/pr47555.c
<reponame>best08618/asylo /* { dg-do compile } */ /* { dg-options "-O2" } */ #define FILL_BACK *ptrRGB++=0; *ptrRGB++=0; *ptrRGB++=0; void uncompact(unsigned char* ptrRGB, const unsigned int* ptrSrc, const unsigned char* ptrRGBcompact, int line, int nbPixLeft) { #define BIT_2_RGB32 \ if ((v & 0x00000001)){ nbPixLeft--; *ptrRGB++ = *ptrRGBcompact++; *ptrRGB++ = *ptrRGBcompact++; *ptrRGB++ = *ptrRGBcompact++; } \ else{ FILL_BACK } \ v >>= 1; #define BIT_2_RGB16 \ if ((v16 & 0x0001)){ nbPixLeft--; *ptrRGB++ = *ptrRGBcompact++; *ptrRGB++ = *ptrRGBcompact++; *ptrRGB++ = *ptrRGBcompact++; } \ else{ FILL_BACK } \ v16 >>= 1; int x; unsigned int v, *ptrSrc32bits=(unsigned int*)ptrSrc; unsigned short v16,*ptrSrc16bits; for(x=0; x<line; x++) { v = *ptrSrc32bits++; BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 BIT_2_RGB32 } ptrSrc16bits=(unsigned short *)ptrSrc32bits; v16 = *ptrSrc16bits++; BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 BIT_2_RGB16 }
MoysheBenRabi/setp
java/src/mxp/message/MessageSerializationTest.java
/* * Copyright (c) 2009-2010 Tyrell Corporation. * * The contents of this file are subject to the Mozilla Public License * Version 1.1 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * The Original Code is an implementation of the Metaverse eXchange Protocol. * * The Initial Developer of the Original Code is <NAME>. * All Rights Reserved. * * Contributor(s): <NAME>. * * Alternatively, the contents of this file may be used under the terms * of the Affero General Public License (the "AGPL"), in which case the * provisions of the AGPL are applicable instead of those * above. If you wish to allow use of your version of this file only * under the terms of the AGPL and not to allow others to use * your version of this file under the MPL, indicate your decision by * deleting the provisions above and replace them with the notice and * other provisions required by the AGPL. If you do not delete * the provisions above, a recipient may use your version of this file * under either the MPL or the AGPL. */ package mxp.message; import mxp.serialization.SerializationInputStream; import mxp.serialization.SerializationOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import junit.framework.TestCase; /** * Unit test to check for correct serialization of messages. */ public class MessageSerializationTest extends TestCase { /** * Check serialization of the Acknowledge message. * * @throws IOException on I/O errors */ public void testAcknowledge() throws IOException { Acknowledge ack = MessageTest.generateAcknowledge(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ack.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); Acknowledge aack = new Acknowledge(); aack.deserialize(in, counter); assertEquals(ack, aack); } /** * Check serialization of the Keepalive message. * * @throws IOException on I/O errors */ public void testKeepalive() throws IOException { Keepalive ka = MessageTest.generateKeepalive(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ka.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); Keepalive kka = new Keepalive(); kka.deserialize(in, counter); assertEquals(ka, kka); } /** * Check serialization of the Throttle message. * * @throws IOException on I/O errors */ public void testThrottle() throws IOException { Throttle throttle = MessageTest.generateThrottle(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = throttle.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); Throttle tthrottle = new Throttle(); tthrottle.deserialize(in, counter); assertEquals(throttle, tthrottle); } /** * Check serialization of the Challenge request message. * * @throws IOException on I/O errors */ public void testChallengeRequest() throws IOException { ChallengeRequest cr = MessageTest.generateChallengeRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = cr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ChallengeRequest ccr = new ChallengeRequest(); ccr.deserialize(in, counter); assertEquals(cr, ccr); } /** * Check serialization of the Challenge response message. * * @throws IOException on I/O errors */ public void testChallengeResponse() throws IOException { ChallengeResponse cr = MessageTest.generateChallengeResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = cr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ChallengeResponse ccr = new ChallengeResponse(); ccr.deserialize(in, counter); assertEquals(cr, ccr); } /** * Check serialization of a message program fragment. * * @throws IOException on I/O errors */ public void testProgramFragment() throws IOException { ProgramFragment pf = MessageTest.generateProgramFragment(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = pf.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ProgramFragment ppf = new ProgramFragment(); ppf.deserialize(in, counter); assertEquals(pf, ppf); } /** * Check serialization of a join request message. * * @throws IOException on I/O errors */ public void testJoinRequest() throws IOException { JoinRequest jr = MessageTest.generateJoinRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = jr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); JoinRequest jjr = new JoinRequest(); jjr.deserialize(in, counter); assertEquals(jr, jjr); } /** * Check serialization of a join response message. * * @throws IOException on I/O errors */ public void testJoinResponse() throws IOException { JoinResponse jr = MessageTest.generateJoinResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = jr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); JoinResponse jjr = new JoinResponse(); jjr.deserialize(in, counter); assertEquals(jr, jjr); } /** * Check serialization of the Leave Request message. * * @throws IOException on I/O errors */ public void testLeaveRequest() throws IOException { LeaveRequest lr = MessageTest.generateLeaveRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = lr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); LeaveRequest llr = new LeaveRequest(); llr.deserialize(in, counter); assertEquals(lr, llr); } /** * Check serialization of the Leave Response message. * * @throws IOException on I/O errors */ public void testLeaveResponse() throws IOException { LeaveResponse lr = MessageTest.generateLeaveResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = lr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); LeaveResponse llr = new LeaveResponse(); llr.deserialize(in, counter); assertEquals(lr, llr); } /** * Check serialization of a message object fragment. * * @throws IOException on I/O errors */ public void testObjectFragment() throws IOException { ObjectFragment of = MessageTest.generateObjectFragment(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = of.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ObjectFragment oof = new ObjectFragment(); oof.deserialize(in, counter); assertEquals(of, oof); } /** * Check serialization of an inject request message. * * @throws IOException on I/O errors */ public void testInjectRequest() throws IOException { InjectRequest ir = MessageTest.generateInjectRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ir.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); InjectRequest iir = new InjectRequest(); iir.deserialize(in, counter); assertEquals(ir, iir); } /** * Check serialization of the Inject Response message. * * @throws IOException on I/O errors */ public void testInjectResponse() throws IOException { InjectResponse ir = MessageTest.generateInjectResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ir.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); InjectResponse iir = new InjectResponse(); iir.deserialize(in, counter); assertEquals(ir, iir); } /** * Check serialization of a modify request message. * * @throws IOException on I/O errors */ public void testModifyRequest() throws IOException { ModifyRequest mr = MessageTest.generateModifyRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = mr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ModifyRequest mmr = new ModifyRequest(); mmr.deserialize(in, counter); assertEquals(mr, mmr); } /** * Check serialization of the Modify Response message. * * @throws IOException on I/O errors */ public void testModifyResponse() throws IOException { ModifyResponse mr = MessageTest.generateModifyResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = mr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ModifyResponse mmr = new ModifyResponse(); mmr.deserialize(in, counter); assertEquals(mr, mmr); } /** * Check serialization of an eject request message. * * @throws IOException on I/O errors */ public void testEjectRequest() throws IOException { EjectRequest er = MessageTest.generateEjectRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = er.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); EjectRequest eer = new EjectRequest(); eer.deserialize(in, counter); assertEquals(er, eer); } /** * Check serialization of the Eject Response message. * * @throws IOException on I/O errors */ public void testEjectResponse() throws IOException { EjectResponse er = MessageTest.generateEjectResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = er.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); EjectResponse eer = new EjectResponse(); eer.deserialize(in, counter); assertEquals(er, eer); } /** * Check serialization of a message interaction fragment. * * @throws IOException on I/O errors */ public void testInteractionFragment() throws IOException { InteractionFragment iaf = MessageTest.generateInteractionFragment(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = iaf.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); InteractionFragment iiaf = new InteractionFragment(); iiaf.deserialize(in, counter); assertEquals(iaf, iiaf); } /** * Check serialization of an interaction request message. * * @throws IOException on I/O errors */ public void testInteractRequest() throws IOException { InteractRequest ir = MessageTest.generateInteractRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ir.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); InteractRequest iir = new InteractRequest(); iir.deserialize(in, counter); assertEquals(ir, iir); } /** * Check serialization of an interaction response message. * * @throws IOException on I/O errors */ public void testInteractResponse() throws IOException { InteractResponse ir = MessageTest.generateInteractResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ir.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); InteractResponse iir = new InteractResponse(); iir.deserialize(in, counter); assertEquals(ir, iir); } /** * Check serialization of an examine request message. * * @throws IOException on I/O errors */ public void testExamineRequest() throws IOException { ExamineRequest er = MessageTest.generateExamineRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = er.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ExamineRequest eer = new ExamineRequest(); eer.deserialize(in, counter); assertEquals(er, eer); } /** * Check serialization of an examine response message. * * @throws IOException on I/O errors */ public void testExamineResponse() throws IOException { ExamineResponse er = MessageTest.generateExamineResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = er.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ExamineResponse eer = new ExamineResponse(); eer.deserialize(in, counter); assertEquals(er, eer); } /** * Check serialization of a message bubble fragment. * * @throws IOException on I/O errors */ public void testBubbleFragment() throws IOException { BubbleFragment bf = MessageTest.generateBubbleFragment(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = bf.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); BubbleFragment bbf = new BubbleFragment(); bbf.deserialize(in, counter); assertEquals(bf, bbf); } /** * Check serialization of a bubble attach request. * * @throws IOException on I/O errors */ public void testAttachRequest() throws IOException { AttachRequest ar = MessageTest.generateAttachRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ar.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); AttachRequest aar = new AttachRequest(); aar.deserialize(in, counter); assertEquals(ar, aar); } /** * Check serialization of a bubble attach response. * * @throws IOException on I/O errors */ public void testAttachResponse() throws IOException { AttachResponse ar = MessageTest.generateAttachResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ar.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); AttachResponse aar = new AttachResponse(); aar.deserialize(in, counter); assertEquals(ar, aar); } /** * Check serialization of a bubble detach request. * * @throws IOException on I/O errors */ public void testDetachRequest() throws IOException { DetachRequest dr = MessageTest.generateDetachRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = dr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); DetachRequest ddr = new DetachRequest(); ddr.deserialize(in, counter); assertEquals(dr, ddr); } /** * Check serialization of a bubble detach response. * * @throws IOException on I/O errors */ public void testDetachResponse() throws IOException { DetachResponse dr = MessageTest.generateDetachResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = dr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); DetachResponse ddr = new DetachResponse(); ddr.deserialize(in, counter); assertEquals(dr, ddr); } /** * Check serialization of an object handover request from bubble to bubble. * * @throws IOException on I/O errors */ public void testHandoverRequest() throws IOException { HandoverRequest hr = MessageTest.generateHandoverRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = hr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); HandoverRequest hhr = new HandoverRequest(); hhr.deserialize(in, counter); assertEquals(hr, hhr); } /** * Check serialization of a bubble to bubble object handover response. * * @throws IOException on I/O errors */ public void testHandoverResponse() throws IOException { HandoverResponse hr = MessageTest.generateHandoverResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = hr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); HandoverResponse hhr = new HandoverResponse(); hhr.deserialize(in, counter); assertEquals(hr, hhr); } /** * Check serialization of a bubble list request message. * * @throws IOException on I/O errors */ public void testListBubbleRequest() throws IOException { ListBubblesRequest lbr = MessageTest.generateListBubblesRequest(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = lbr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ListBubblesRequest llbr = new ListBubblesRequest(); llbr.deserialize(in, counter); assertEquals(lbr, llbr); } /** * Check serialization of a bubble list response message. * * @throws IOException on I/O errors */ public void testListBubbleResponse() throws IOException { ListBubblesResponse lbr = MessageTest.generateListBubblesResponse(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = lbr.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ListBubblesResponse llbr = new ListBubblesResponse(); llbr.deserialize(in, counter); assertEquals(lbr, llbr); } /** * Check serialization of a perception event message. * * @throws IOException on I/O errors */ public void testPerceptionEvent() throws IOException { PerceptionEvent pe = MessageTest.generatePerceptionEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = pe.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); PerceptionEvent ppe = new PerceptionEvent(); ppe.deserialize(in, counter); assertEquals(pe, ppe); } /** * Check serialization of a disappearance event message. * * @throws IOException on I/O errors */ public void testDisappearanceEvent() throws IOException { DisappearanceEvent de = MessageTest.generateDisappearanceEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = de.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); DisappearanceEvent dde = new DisappearanceEvent(); dde.deserialize(in, counter); assertEquals(de, dde); } /** * Check serialization of a movement event message. * * @throws IOException on I/O errors */ public void testMovementEvent() throws IOException { MovementEvent me = MessageTest.generateMovementEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = me.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); MovementEvent mme = new MovementEvent(); mme.deserialize(in, counter); assertEquals(me, mme); } /** * Check serialization of an action event message. * * @throws IOException on I/O errors */ public void testActionEvent() throws IOException { ActionEvent ae = MessageTest.generateActionEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = ae.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); ActionEvent aae = new ActionEvent(); aae.deserialize(in, counter); assertEquals(ae, aae); } /** * Check serialization of an object handover event from bubble to bubble. * This is the same message as the handover request. * * @throws IOException on I/O errors */ public void testHandoverEvent() throws IOException { HandoverEvent he = MessageTest.generateHandoverEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = he.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); HandoverEvent hhe = new HandoverEvent(); hhe.deserialize(in, counter); assertEquals(he, hhe); } /** * Check serialization of a synchronization begin event message. * * @throws IOException on I/O errors */ public void testSynchronizationBeginEvent() throws IOException { SynchronizationBeginEvent sbe = MessageTest.generateSynchronizationBeginEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = sbe.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); SynchronizationBeginEvent ssbe = new SynchronizationBeginEvent(); ssbe.deserialize(in, counter); assertEquals(sbe, ssbe); } /** * Check serialization of a synchronization end event message. * * @throws IOException on I/O errors */ public void testSynchronizationEndEvent() throws IOException { SynchronizationEndEvent see = MessageTest.generateSynchronizationEndEvent(); ByteArrayOutputStream baOut = new ByteArrayOutputStream(); SerializationOutputStream out = new SerializationOutputStream(baOut); int counter = 0; counter = see.serialize(out); out.flush(); ByteArrayInputStream baIn = new ByteArrayInputStream( baOut.toByteArray()); SerializationInputStream in = new SerializationInputStream(baIn); SynchronizationEndEvent ssee = new SynchronizationEndEvent(); ssee.deserialize(in, counter); assertEquals(see, ssee); } }
nghiapt1112/microservices-libraries
email-utils/src/main/java/com/nghia/libraries/email/infrustructure/mail/Template.java
package com.nghia.libraries.email.infrustructure.mail; import org.springframework.stereotype.Component; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Component public @interface Template { String value(); }
xwu99/oneCCL
src/common/event/impls/host_event.cpp
<reponame>xwu99/oneCCL<gh_stars>1-10 /* Copyright 2016-2020 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "common/request/request.hpp" #include "common/event/impls/host_event.hpp" #include "exec/exec.hpp" namespace ccl { host_event_impl::host_event_impl(ccl_request* r) : req(r) { if (!req) { // If the user calls collective with coll_attr->synchronous=1 then it will be progressed // in place and API will return null event. In this case mark cpp wrapper as completed, // all calls to wait() or test() will do nothing completed = true; } } host_event_impl::~host_event_impl() { if (!completed) { LOG_ERROR("not completed event is destroyed"); } } void host_event_impl::wait() { if (!completed) { ccl_wait_impl(ccl::global_data::get().executor.get(), req); completed = true; } } bool host_event_impl::test() { if (!completed) { completed = ccl_test_impl(ccl::global_data::get().executor.get(), req); } return completed; } bool host_event_impl::cancel() { throw ccl::exception(std::string(__FUNCTION__) + " - is not implemented"); } event::native_t& host_event_impl::get_native() { throw ccl::exception(std::string(__FUNCTION__) + " - is not implemented"); } } // namespace ccl
mrkienkptn/job-management-gui
src/pages/job_group/kanban/ColumnAdder.js
<reponame>mrkienkptn/job-management-gui import * as React from 'react'; import Button from '@mui/material/Button'; import AddIcon from '@mui/icons-material/Add'; import Popover from '@mui/material/Popover'; import TextField from '@mui/material/TextField'; import Card from '@mui/material/Card'; import CardActions from '@mui/material/CardActions'; import CardContent from '@mui/material/CardContent'; export default function IconLabelButtons(props) { const [anchorEl, setAnchorEl] = React.useState(null); const [columnTitle, setColumnTitle] = React.useState('') const [addStatus, setAddStatus] = React.useState() const handleClick = (event) => { setAnchorEl(event.currentTarget); }; const handleClose = () => { setAnchorEl(null); }; const open = Boolean(anchorEl); const id = open ? 'simple-popover' : undefined; const onAddColumn = () => { if (columnTitle === ''){ return } props.addNewColumn({ name: columnTitle }) setColumnTitle('') handleClose() } const onCancel = () => { handleClose() setColumnTitle('') } return ( <div> <Button onClick={handleClick} variant="outlined" startIcon={<AddIcon />} style={{ width: 250, marginTop: 5 }} > Add onther list </Button> <Popover id={id} open={open} anchorEl={anchorEl} onClose={handleClose} anchorOrigin={{ vertical: 'bottom', horizontal: 'left', }} > <Card sx={{ maxWidth: 345 }}> <CardContent> <TextField label="Title" id="outlined-size-small" size="small" value={columnTitle} onChange={t => setColumnTitle(t.target.value)} /> </CardContent> <CardActions> <Button onClick={onAddColumn} size="small" variant="contained">Add List</Button> <Button onClick={onCancel} size="small" variant="outlined">Cancel</Button> </CardActions> </Card> </Popover> </div> ); }
Nohysiwe/FastAPIBlogBackend
PythonBlog/configs/constant/__init__.py
<reponame>Nohysiwe/FastAPIBlogBackend<gh_stars>1-10 from .articleConst import ArticleConst from .deleteConst import DeleteConst from .loginConst import LoginConst from .pathConst import PathConst from .statusConst import StatusConst from .userConst import UserConst __all__ = [ "ArticleConst", "DeleteConst", "LoginConst", "PathConst", "StatusConst", "UserConst", ]
haggi/OpenMaya
src/mayaToKray/src/kray/krayEnvironment.cpp
#include "kraysdk/symbol/bitmap.h" #include "../mtkr_common/mtkr_mayaObject.h" #include "../mtkr_common/mtkr_renderGlobals.h" #include "maya/MFnMesh.h" #include "maya/MItMeshPolygon.h" #include <maya/MPointArray.h> #include <maya/MFloatPointArray.h> #include <maya/MFloatArray.h> #include <maya/MFloatVectorArray.h> #include <maya/MTransformationMatrix.h> #include "krayRenderer.h" #include "krayUtils.h" #include "utilities/tools.h" #include "utilities/logging.h" static Logging logger; namespace krayRender{ void KrayRenderer::defineEnvironment() { //environment colorFade, <rgb> color, <double> amount; // fade, <double> amount; // gVolume, <texture> t2, <int> max_recurse, <double> adaptive_threshold, <double> max_distance, <double> probe_per_length, <double> photon_per_lenght, <double> photon_radius; // phySky, <sharedObject> s2, <int> flags; // vEffect, <double> intensity, <double> start, <double> step1, <double> step2, <int> n; // volume, <texture> t2, <int> max_recurse, <double> adaptive_threshold, <double> max_distance, <double> probe_per_length, <double> photon_per_lenght, <double> photon_radius; switch( this->mtkr_renderGlobals->environmentType ) { case 0: // color fade {} case 1: // fade {} case 2: // gVolume {} case 3: // physSky {} case 4: // vEffekt {} case 5: // volume {} } } void KrayRenderer::defineBackground() { //background bitmap, <bitmap> b2, <xyz> ax, <xyz> az; // bitmap2, <bitmap> b2, <bitmap> b3, <hpb> a4; // color, <rgba> color; // directionsMap; // lightMap, <bitmap> b2, <hpb> a3; // phySky, <sharedObject> s2; // phySkyEnv, <sharedObject> s2; // sky, <rgba> horizon, <rgba> zenith, <rgba> nadir, <xyz> zenith_direction; // sky2, <rgba> horizon_zenith, <rgba> horizon_nadir, <rgba> zenith, <rgba> nadir, <xyz> zenith_direction, <double> skyGamma, <double> groundGamma; // sphereMap, <bitmap> b2, <hpb> a3; // toneMap; // toneMapperFilter, <toneMapper> t2; // background types switch( this->mtkr_renderGlobals->backgroundType ) { case 0: { // constant Kray::Vector bgColor; bgColor.setRgb(this->mtkr_renderGlobals->environmentColor.r, this->mtkr_renderGlobals->environmentColor.g, this->mtkr_renderGlobals->environmentColor.b); this->pro->background_color(bgColor); break; } case 1: { // phySky Kray::Symbol psky(*this->pro, "psky"); MVector sunDirV = this->mtkr_renderGlobals->sunDir; MVector orientV = this->mtkr_renderGlobals->orientation; Kray::Vector sunDir(sunDirV.x, sunDirV.y, sunDirV.z); Kray::AxesHpb orientationH(orientV.x, orientV.y, orientV.z); Kray::Axes orientation(orientationH); this->pro->phySky(psky, sunDir, this->mtkr_renderGlobals->turbidity, this->mtkr_renderGlobals->exposure, orientation); this->pro->phySkyParam_solidAngle(psky, this->mtkr_renderGlobals->solidAngle); this->pro->phySkyParam_sunIntensity(psky, this->mtkr_renderGlobals->sunIntensity); this->pro->phySkyParam_sunSpotAngle(psky, this->mtkr_renderGlobals->sunSpotAngle); this->pro->background_phySky(psky); break; } case 2: { // phySky2 Kray::Symbol psky(*this->pro, "psky"); MVector sunDirV = this->mtkr_renderGlobals->sunDir; MVector orientV = this->mtkr_renderGlobals->orientation; Kray::Vector sunDir(sunDirV.x, sunDirV.y, sunDirV.z); Kray::AxesHpb orientationH(orientV.x, orientV.y, orientV.z); Kray::Axes orientation(orientationH); Kray::Vector groundAlbedo; this->pro->phySky2(psky, sunDir, this->mtkr_renderGlobals->turbidity, this->mtkr_renderGlobals->exposure, orientation, groundAlbedo); this->pro->background_phySky(psky); break; } case 3: { // sky Kray::Vector zenithDir(this->mtkr_renderGlobals->zenithDir.x, this->mtkr_renderGlobals->zenithDir.y, this->mtkr_renderGlobals->zenithDir.z); Kray::Vector nadir, horizonColor, zenithColor; horizonColor.setRgb(this->mtkr_renderGlobals->gradientHorizon.r, this->mtkr_renderGlobals->gradientHorizon.g, this->mtkr_renderGlobals->gradientHorizon.b); zenithColor.setRgb(this->mtkr_renderGlobals->gradientZenit.r, this->mtkr_renderGlobals->gradientZenit.g, this->mtkr_renderGlobals->gradientZenit.b); nadir.setRgb(this->mtkr_renderGlobals->nadir.r, this->mtkr_renderGlobals->nadir.g, this->mtkr_renderGlobals->nadir.b); this->pro->background_sky(horizonColor, zenithColor, nadir, zenithDir); //prot.background_sky(Kray::Vector(0.4,0.8,0.9),Kray::Vector(0,0,0),Kray::Vector(1,1,1),Kray::Vector(0,1,0)); // gradient background break; } case 4: { Kray::Vector zenithDir(this->mtkr_renderGlobals->zenithDir.x, this->mtkr_renderGlobals->zenithDir.y, this->mtkr_renderGlobals->zenithDir.z); Kray::Vector nadir, horizon_zenith, horizon_nadir, zenith; horizon_zenith.setRgb(this->mtkr_renderGlobals->gradientHorizon.r, this->mtkr_renderGlobals->gradientHorizon.g, this->mtkr_renderGlobals->gradientHorizon.b); horizon_nadir.setRgb(this->mtkr_renderGlobals->gradientZenit.r, this->mtkr_renderGlobals->gradientZenit.g, this->mtkr_renderGlobals->gradientZenit.b); nadir.setRgb(this->mtkr_renderGlobals->nadir.r, this->mtkr_renderGlobals->nadir.g, this->mtkr_renderGlobals->nadir.b); this->pro->background_sky2(horizon_zenith, horizon_nadir, zenith, nadir, zenithDir, this->mtkr_renderGlobals->skyGamma, this->mtkr_renderGlobals->groundGamma); break; } case 5: { // bitmap MString imagePath; if(getConnectedFileTexturePath(MString("environmentMap"), MString("krayGlobals"), imagePath)) { Kray::BitmapSymbol bmp(imagePath.asChar(), *this->pro); //this->pro->bitmap(bmp); Kray::Vector ax, az; this->pro->background_bitmap(bmp, ax, az); }else{ logger.error("Could not set fileTextureNode."); } break; } case 6: { // bitmap2 Kray::Axes axes; MString imagePath1, imagePath2; if(getConnectedFileTexturePath(MString("environmentMap"), MString("krayGlobals"), imagePath1)) { if(getConnectedFileTexturePath(MString("environmentMap2"), MString("krayGlobals"), imagePath1)) { Kray::BitmapSymbol bmp1(imagePath1.asChar(), *this->pro); Kray::BitmapSymbol bmp2(imagePath2.asChar(), *this->pro); this->pro->background_bitmap2(bmp1, bmp2, axes); }else{ logger.error("Could not set fileTextureNode."); } }else{ logger.error("Could not set fileTextureNode."); } break; } case 7: // directions map this->pro->background_directionsMap(); break; case 8: // light map // Kray::Axes axes; // Kray::BitmapSymbol lmap("../Bitmaps/kray-logo.png", *this->pro); // this->pro->background_lightMap(lmap, axes); break; case 9: { // sphere map MString imagePath; if(getConnectedFileTexturePath(MString("environmentMap"), MString("krayGlobals"), imagePath)) { Kray::BitmapSymbol bmp(imagePath.asChar(), *this->pro); Kray::Axes axes; this->pro->background_sphereMap(bmp, axes); }else{ logger.error("Could not set spherical fileTextureNode."); } break; } } } }
Vfuryx/fagin
app/responses/admin/admin_menus_list.go
package admin_responses import ( "fagin/app" "fagin/app/models/admin_menu" "fagin/pkg/response" ) type adminMenusList struct { ms []admin_menu.AdminMenu response.Collect } var _ response.Response = &adminMenusList{} func AdminMenusList(models ...admin_menu.AdminMenu) *adminMenusList { res := adminMenusList{ms: models} res.SetCollect(&res) return &res } func (res *adminMenusList) Serialize() []map[string]interface{} { return res.getMenuTree(res.ms, 0) } func (res *adminMenusList) getMenuTree(data []admin_menu.AdminMenu, pid uint) []map[string]interface{} { result := make([]map[string]interface{}, 0, 10) for index := range data { if data[index].ParentID == pid { m := map[string]interface{}{ "id": data[index].ID, "parent_id": data[index].ParentID, "icon": data[index].Icon, "title": data[index].Title, "permission": data[index].Permission, "path": data[index].Path, "component": data[index].Component, "sort": data[index].Sort, "status": data[index].Status, "created_at": app.TimeToStr(data[index].CreatedAt), } if children := res.getMenuTree(data, data[index].ID); len(children) > 0 { m["children"] = children } result = append(result, m) } } return result }
golemiso/playframework
framework/src/play-test/src/main/scala/play/api/test/package.scala
/* * Copyright (C) 2009-2019 Lightbend Inc. <https://www.lightbend.com> */ package play.api /** * Contains test helpers. */ package object test { /** * Provided as an implicit by WithServer and WithBrowser. */ type Port = Int /** * A structural type indicating there is an application. */ type HasApp = { def app: Application } }
raviranjan1996/Java14
ProblemSolving/src/records/BreakingRecords.java
<reponame>raviranjan1996/Java14 package records; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Scanner; import java.util.Set; public class BreakingRecords { public static void main(String[] args) { // TODO Auto-generated method stub Scanner sc = new Scanner(System.in); int n = sc.nextInt(); int[] array = new int[n]; for (int i = 0; i < array.length; i++) { array[i] = sc.nextInt(); } int number = array[0]; int countMin=0; int countMax=0; Set<Integer> max = new LinkedHashSet<Integer>(); for (int i = 0; i < array.length; i++) { max.add(array[i]); } Set<Integer> m = new LinkedHashSet<>(); Set<Integer> maximum = new LinkedHashSet<Integer>(); Iterator<Integer> it = max.iterator(); while (it.hasNext()) { int num = it.next(); if (num <= number) { m.add(num); } if (num >= number) { maximum.add(num); } } if(m.size()==1) { // System.out.println(m.size()); } else { for(int element:m) { if(element < number) { number = element; countMin++; } else { continue; } } } number = array[0]; if(maximum.size() ==1) { //System.out.println(maximum.size()); } else { for(int element:maximum) { if(element > number) { number = element; countMax++; } else { continue; } } } System.out.print(countMax + " " + countMin); } }
bukka/fpmi
fpmi/fpmi_php_trace.h
/* (c) 2007,2008 <NAME> */ #ifndef FPMI_PHP_TRACE_H #define FPMI_PHP_TRACE_H 1 struct fpmi_child_s; void fpmi_php_trace(struct fpmi_child_s *); #endif
misTrasteos/micro-integrator
components/business-adaptors/org.wso2.micro.integrator.business.messaging.hl7/src/main/java/org/wso2/micro.integrator/business/messaging/hl7/common/data/conf/HL7MessagePublisherConfig.java
/* * Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.micro.integrator.business.messaging.hl7.common.data.conf; import ca.uhn.hl7v2.HL7Exception; import ca.uhn.hl7v2.model.Composite; import ca.uhn.hl7v2.model.GenericPrimitive; import ca.uhn.hl7v2.model.Message; import ca.uhn.hl7v2.model.Primitive; import ca.uhn.hl7v2.model.Segment; import ca.uhn.hl7v2.model.Structure; import ca.uhn.hl7v2.model.Type; import ca.uhn.hl7v2.model.Varies; import ca.uhn.hl7v2.util.Terser; import org.apache.axis2.context.MessageContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.micro.integrator.analytics.data.publisher.util.PublisherUtil; import org.wso2.micro.integrator.business.messaging.hl7.common.HL7Constants; import org.wso2.micro.integrator.business.messaging.hl7.common.data.MessageData; import org.wso2.micro.integrator.core.services.CarbonServerConfigurationService; import java.util.HashMap; import java.util.Map; /** * This class holds HL7 message extraction operations */ public class HL7MessagePublisherConfig { private static final Log log = LogFactory.getLog(HL7MessagePublisherConfig.class); private static final String NAME = "Name"; private String serverName = null; public HL7MessagePublisherConfig() { } /** * This extracts the incoming HL7 message * * @param message * @return MessageData * @throws HL7Exception */ public MessageData createMessage(Message message, MessageContext msgCtx) throws HL7Exception { MessageData messageData = new MessageData(); Map<String, String> extractedValues = createCustomMap(message); messageData.setExtractedValues(extractedValues); messageData.setPayload(message.encode()); messageData.setOpName(msgCtx.getAxisOperation().getName().getLocalPart()); messageData.setServiceName(msgCtx.getAxisService().getName()); messageData.setMsgDirection(HL7Constants.OUT_DIRECTION); messageData.setServerName(getServerName()); Terser terser = new Terser(message); String activityId = terser.get("/MSH-10"); if (activityId != null) { messageData.setActivityId(activityId); } else { messageData.setActivityId( String.valueOf(System.nanoTime()) + Math.round(Math.random() * HL7Constants.ACTIVITY_ID_GEN)); } messageData.setStatus((String) msgCtx.getProperty(HL7Constants.HL7_DEFAULT_VALIDATION_PASSED)); messageData.setHost(PublisherUtil.getHostAddress()); messageData.setTimestamp(System.currentTimeMillis()); messageData.setType(HL7Constants.TRANSPORT_NAME); return messageData; } /** * This creates arbitrary data map which contains extracted values of the HL7 message * * @param message * @return Map * @throws HL7Exception */ private Map<String, String> createCustomMap(Message message) throws HL7Exception { String[] segments; HashMap<String, String> elements = new HashMap<String, String>(); if (message != null) { segments = message.getNames(); } else { return null; } for (String segmentName : segments) { try { Structure[] structures = message.getAll(segmentName); String keyA = segmentName; for (int y = 0; y < structures.length; y++) { if (y > 0) { keyA = segmentName + "[" + y + "]"; } Structure structure = structures[y]; if (structure instanceof Segment) { Segment segment = (Segment) structure; String[] fieldNames = segment.getNames(); for (int i = 1; i < segment.numFields(); i++) { Type[] fields = segment.getField(i); for (int x = 0; x < fields.length; x++) { String value = null; String keyB = keyA + "." + fieldNames[i - 1].replaceAll("\\s+", ""); if (x > 0) { keyB = keyA + "." + fieldNames[i - 1].replaceAll("\\s+", "") + "[" + x + "]"; } Type field = fields[x]; if (field instanceof Composite) { evaluateComposite((Composite) field, elements, keyB); } else if (field instanceof Varies) { value = ((GenericPrimitive) ((Varies) field).getData()).getValue(); } else { value = ((Primitive) field).getValue(); } if (value != null) { elements.put(keyB, value); } } } } } } catch (HL7Exception e) { throw new HL7Exception("Error creating publisher message : " + e.getMessage(), e); } } return elements; } /** * This extracts the Composite * * @param composite * @param elements * @param key */ private void evaluateComposite(Composite composite, Map<String, String> elements, String key) { Type[] types = composite.getComponents(); for (int z = 0; z < types.length; z++) { Type type = types[z]; String value = null; String keyC = key; if (z > 0) { keyC = key + "[" + z + "]"; } if (type instanceof Composite) { evaluateComposite((Composite) type, elements, keyC); } else if (type instanceof Varies) { value = ((GenericPrimitive) ((Varies) type).getData()).getValue(); } else { value = ((Primitive) type).getValue(); } if (value != null) { elements.put(keyC, value); } } } public String getServerName() { if (serverName == null) { String[] properties = CarbonServerConfigurationService.getInstance().getProperties(NAME); if (properties != null && properties.length > 0) { serverName = properties[0]; } } return serverName; } }
michaellperry/CorrespondenceAndroid
library/correspondence/src/com/updatecontrols/correspondence/strategy/SubscriptionStrategy.java
<reponame>michaellperry/CorrespondenceAndroid<filename>library/correspondence/src/com/updatecontrols/correspondence/strategy/SubscriptionStrategy.java package com.updatecontrols.correspondence.strategy; import com.updatecontrols.correspondence.CorrespondenceFact; public interface SubscriptionStrategy { Iterable<CorrespondenceFact> getSubscriptions(); }
ronpandolfi/Xi-cam
xicam/plugins/tomography/config.py
<gh_stars>10-100 __author__ = "<NAME>, <NAME>" __copyright__ = "Copyright 2016, CAMERA, LBL, ALS" __credits__ = ["<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>"] __license__ = "" __version__ = "1.2.1" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Beta" import os import numpy as np import inspect from collections import OrderedDict import yaml from pipeline import msg from modpkgs import yamlmod PARAM_TYPES = {'int': int, 'float': float} # Load yaml with names of all available functions in pipeline with open('xicam/yaml/tomography/functions.yml','r') as stream: funcs=yaml.load(stream) # load various function dictionaries from function_info.yml file parameters = {}; als832defaults = {}; aps_defaults = {}; names = {}; function_defaults = {} with open('xicam/yaml/tomography/functions_info.yml', 'r') as stream: info = yaml.load(stream) for key in info.keys(): # load parameter data for available functions if 'parameters' in info[key].keys(): parameters[key] = info[key]['parameters'] # load dictionary with function parameters to be retrieved from metadata try: als832defaults[key] = info[key]['conversions']['als'] except KeyError: pass try: aps_defaults[key] = info[key]['conversions']['aps'] except KeyError: pass # load dictionary with pipeline names and function names if 'name' in info[key].keys(): names[key] = info[key]['name'] # load dictionary of set defaults if 'defaults' in info[key].keys(): function_defaults[key] = info[key]['defaults'] # Add reconstruction methods to function name dictionary, but include the package the method is in for algorithm in funcs['Functions']['Reconstruction']['TomoPy']: names[algorithm] = ['recon', 'tomopy'] for algorithm in funcs['Functions']['Reconstruction']['Astra']: names[algorithm] = ['recon', 'astra'] for algorithm in funcs['Functions']['Reconstruction']['TomoCam']: names[algorithm] = ['recon','mbir'] def load_pipeline(yaml_file): """ Load a workflow pipeline from a yaml file """ with open(yaml_file, 'r') as y: pipeline = yamlmod.ordered_load(y) return pipeline def save_function_pipeline(pipeline, file_name): """ Save a workflow pipeline from dict Parameters ---------- pipeline : dict dictionary specifying the workflow pipeline file_name : str file name to save as yml """ if file_name != '': file_name = file_name.split('.')[0] + '.yml' with open(file_name, 'w') as y: yamlmod.ordered_dump(pipeline, y) def set_als832_defaults(mdata, funcwidget_list, path, shape): """ Set defaults for ALS Beamline 8.3.2 from dataset metadata Parameters ---------- mdata : dict dataset metadata funcwidget_list : list of FunctionWidgets list of FunctionWidgets exposed in the UI workflow pipeline path: str path to dataset shape: tuple tuple containing dataset shape """ from psutil import cpu_count for f in funcwidget_list: if f is None: continue if f.subfunc_name in als832defaults: for p in f.params.children(): if p.name() in als832defaults[f.subfunc_name]: try: v = mdata[als832defaults[f.subfunc_name][p.name()]['name']] t = PARAM_TYPES[als832defaults[f.subfunc_name][p.name()]['type']] v = t(v) if t is not int else t(float(v)) # String literals for ints should not have 0's if 'conversion' in als832defaults[f.subfunc_name][p.name()]: v *= als832defaults[f.subfunc_name][p.name()]['conversion'] p.setDefault(v) p.setValue(v) except KeyError as e: msg.logMessage('Key {} not found in metadata. Error: {}'.format(p.name(), e.message), level=40) elif f.func_name == 'Reader': #dataset specific read values set_reader_defaults(f, shape, cpu_count()) elif f.func_name == 'Write': #dataset specific write values data_folders = {'bl832data-raw':'bl832data-scratch', 'data-raw':'data-scratch'} file_name = path.split("/")[-1].split(".")[0] working_dir = path.split(file_name)[0] for key in data_folders.keys(): if key in working_dir: user = working_dir.split('/' + key)[-1].split('/')[1] mount = working_dir.split(key)[0] working_dir = os.path.join(mount, data_folders[key], user) outname = os.path.join(working_dir, *2*('RECON_' + file_name,)) f.params.child('parent folder').setValue(working_dir) f.params.child('parent folder').setDefault(working_dir) f.params.child('folder name').setValue('RECON_' + file_name) f.params.child('folder name').setDefault('RECON_' + file_name) f.params.child('file name').setValue('RECON_' + file_name) f.params.child('file name').setDefault('RECON_' + file_name) f.params.child('fname').setValue(outname) f.params.child('fname').setDefault(outname) if f.input_functions: set_als832_defaults(mdata, funcwidget_list=f.input_functions.values(), path=path, shape=shape) def set_aps_defaults(mdata, funcwidget_list, path, shape): """ Set defaults for ALS Beamline 8.3.2 from dataset metadata Parameters ---------- mdata : dict dataset metadata funcwidget_list : list of FunctionWidgets list of FunctionWidgets exposed in the UI workflow pipeline path: str path to dataset shape: tuple tuple containing dataset shape """ from psutil import cpu_count for f in funcwidget_list: if f is None: continue if aps_defaults and f.subfunc_name in aps_defaults: for p in f.params.children(): if p.name() in aps_defaults[f.subfunc_name]: try: v = mdata[aps_defaults[f.subfunc_name][p.name()]['name']] t = PARAM_TYPES[aps_defaults[f.subfunc_name][p.name()]['type']] v = t(v) if t is not int else t( float(v)) # String literals for ints should not have 0's if 'conversion' in aps_defaults[f.subfunc_name][p.name()]: v *= aps_defaults[f.subfunc_name][p.name()]['conversion'] p.setDefault(v) p.setValue(v) except KeyError as e: msg.logMessage('Key {} not found in metadata. Error: {}'.format(p.name(), e.message), level=40) elif f.func_name == 'Reader': # dataset specific read values set_reader_defaults(f, shape, cpu_count()) elif f.func_name == 'Padding': pad = int(np.ceil((shape[2] * np.sqrt(2) - shape[2]) / 2)) f.params.child('npad').setValue(pad) f.params.child('npad').setDefault(pad) elif f.func_name == 'Crop': pad = int(np.ceil((shape[2] * np.sqrt(2) - shape[2]) / 2)) f.params.child('p11').setValue(pad) f.params.child('p11').setDefault(pad) f.params.child('p12').setValue(pad) f.params.child('p12').setDefault(pad) f.params.child('p21').setValue(pad) f.params.child('p21').setDefault(pad) f.params.child('p22').setValue(pad) f.params.child('p22').setDefault(pad) elif f.func_name == 'Write': # dataset specific write values data_folders = {'bl832data-raw': 'bl832data-scratch', 'data-raw': 'data-scratch'} file_name = path.split("/")[-1].split(".")[0] working_dir = path.split(file_name)[0] for key in data_folders.keys(): if key in working_dir: user = working_dir.split('/' + key)[-1].split('/')[1] mount = working_dir.split(key)[0] working_dir = os.path.join(mount, data_folders[key], user) outname = os.path.join(working_dir, *2 * ('RECON_' + file_name,)) f.params.child('parent folder').setValue(working_dir) f.params.child('parent folder').setDefault(working_dir) f.params.child('folder name').setValue('RECON_' + file_name) f.params.child('folder name').setDefault('RECON_' + file_name) f.params.child('file name').setValue('RECON_' + file_name) f.params.child('file name').setDefault('RECON_' + file_name) f.params.child('fname').setValue(outname) f.params.child('fname').setDefault(outname) if f.input_functions: set_als832_defaults(mdata, funcwidget_list=f.input_functions.values(), path=path, shape=shape) def set_reader_defaults(reader_widget, shape, cpu): """ Sets defaults for reader widget based on dataset size """ reader_widget.params.child('start_sinogram').setLimits([0, shape[2]]) reader_widget.params.child('end_sinogram').setLimits([0, shape[2]]) reader_widget.params.child('step_sinogram').setLimits([1, shape[2] + 1]) reader_widget.params.child('start_projection').setLimits([0, shape[0]]) reader_widget.params.child('end_projection').setLimits([0, shape[0]]) reader_widget.params.child('step_projection').setLimits([1, shape[0] + 1]) reader_widget.params.child('start_width').setLimits([0, shape[1]]) reader_widget.params.child('end_width').setLimits([0, shape[1]]) reader_widget.params.child('step_width').setLimits([1, shape[2] + 1]) reader_widget.params.child('end_sinogram').setValue(shape[2]) reader_widget.params.child('end_sinogram').setDefault(shape[2]) reader_widget.params.child('end_projection').setValue(shape[0]) reader_widget.params.child('end_projection').setDefault(shape[0]) reader_widget.params.child('end_width').setValue(shape[1]) reader_widget.params.child('end_width').setDefault(shape[1]) reader_widget.params.child('sinograms_per_chunk').setValue(cpu * 5) reader_widget.params.child('projections_per_chunk').setValue(cpu * 5) reader_widget.params.child('sinograms_per_chunk').setDefault(cpu * 5) reader_widget.params.child('projections_per_chunk').setDefault(cpu * 5) def extract_pipeline_dict(funwidget_list): """ Extract a dictionary from a FunctionWidget list in the appropriate format to save as a yml file Parameters ---------- funwidget_list : list of FunctionWidgets list of FunctionWidgets exposed in the UI workflow pipeline Returns ------- dict dictionary specifying the workflow pipeline """ # list of parameter name exceptions, for the "Write" function ex_lst = ['file name', 'fname', 'folder name', 'parent folder'] d = OrderedDict() count = 1 for f in funwidget_list: # a bunch of special cases for the write function func_name = str(count) + ". " + f.func_name if "Write" in f.func_name: write_dict = OrderedDict() d[func_name] = OrderedDict({f.subfunc_name: write_dict}) for child in f.params.children(): if child.name() in ex_lst: d[func_name][f.subfunc_name][child.name()] = str(child.value()) elif child.name() == "Browse": pass else: d[func_name][f.subfunc_name][child.name()] = child.value() else: d[func_name] = {f.subfunc_name: {'Parameters': {p.name(): p.value() for p in f.params.children()}}} d[func_name][f.subfunc_name]['Enabled'] = f.enabled if f.func_name == 'Reconstruction': d[func_name][f.subfunc_name].update({'Package': f.packagename}) for param, ipf in f.input_functions.iteritems(): if 'Input Functions' not in d[func_name][f.subfunc_name]: d[func_name][f.subfunc_name]['Input Functions'] = {} id = {ipf.func_name: {ipf.subfunc_name: {'Parameters': {p.name(): p.value() for p in ipf.params.children()}}}} d[func_name][f.subfunc_name]['Input Functions'][param] = id count += 1 return d def extract_runnable_dict(funwidget_list): """ Extract a dictionary from a FunctionWidget list in the appropriate format to save as a python runnable. Parameters ---------- funwidget_list : list of FunctionWidgets list of FunctionWidgets exposed in the UI workflow pipeline Returns ------- dict dictionary specifying the workflow pipeline and important parameters """ center_functions = {'find_center_pc': {'proj1': 'tomo[0]', 'proj2': 'tomo[-1]'}, 'find_center': {'tomo': 'tomo', 'theta': 'theta'}, 'find_center_vo': {'tomo': 'tomo'}} d = OrderedDict() func_dict = OrderedDict(); subfuncs = OrderedDict() count = 1 for f in funwidget_list: keywords = {} if not f.enabled or 'Reader' in f.name: continue func = "{}.{}".format(f.package, f._function.func_name) if 'xicam' in func: func = func.split(".")[-1] fpartial = f.partial for key, val in fpartial.keywords.iteritems(): keywords[key] = val for arg in inspect.getargspec(f._function)[0]: if arg not in f.partial.keywords.iterkeys() or 'center' in arg: keywords[arg] = arg # get rid of degenerate keyword arguments if 'arr' in keywords and 'tomo' in keywords: keywords['tomo'] = keywords['arr'] keywords.pop('arr', None) # special cases for the 'write' function if 'start' in keywords: keywords['start'] = 'start' if 'Write' in f.name: keywords.pop('parent folder', None) keywords.pop('folder name', None) keywords.pop('file name', None) if 'Reconstruction' in f.name: for param, ipf in f.input_functions.iteritems(): if 'theta' in param or 'center' in param: subfunc = "{}.{}(".format(ipf.package,ipf._function.func_name) for key, val in ipf.partial.keywords.iteritems(): subfunc += "{}={},".format(key, val) if not isinstance(val, str) \ else '{}=\'{}\','.format(key, val) for cor_func in center_functions.iterkeys(): if ipf._function.func_name == cor_func: for k, v in center_functions[cor_func].iteritems(): subfunc += "{}={},".format(k, v) subfunc += ")" subfuncs[param] = subfunc if 'astra' in keywords['algorithm']: keywords['algorithm'] = 'tomopy.astra' func_dict[str(count) + ". " + func] = keywords count += 1 d['func'] = func_dict d['subfunc'] = subfuncs return d
Mannan2812/azure-cli-extensions
src/confluent/azext_confluent/tests/latest/test_confluent_scenario.py
<gh_stars>1-10 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- import os from azure.cli.testsdk import ScenarioTest from azure.cli.testsdk import ResourceGroupPreparer from .example_steps import step_terms_list from .example_steps import step_organization_create from .example_steps import step_organization_show from .example_steps import step_organization_list from .example_steps import step_organization_list from .example_steps import step_organization_update from .example_steps import step_organization_delete from .. import ( try_manual, raise_if, calc_coverage ) TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) # Env setup_scenario @try_manual def setup_scenario(test, rg): pass # Env cleanup_scenario @try_manual def cleanup_scenario(test, rg): pass # Testcase: Scenario @try_manual def call_scenario(test, rg): setup_scenario(test, rg) step_terms_list(test, rg, checks=[ test.greater_than('length(@)', 1) ]) step_organization_create(test, rg, checks=[ test.check("location", "eastus2euap", case_sensitive=False), test.check("userDetail.emailAddress", "<EMAIL>", case_sensitive=False), test.check("userDetail.firstName", "contoso", case_sensitive=False), test.check("userDetail.lastName", "zhou", case_sensitive=False), test.check("tags.environment", "Dev", case_sensitive=False), test.check("name", "{myOrganization}", case_sensitive=False), ]) step_organization_show(test, rg, checks=[ test.check("location", "eastus2euap", case_sensitive=False), test.check("userDetail.emailAddress", "<EMAIL>", case_sensitive=False), test.check("userDetail.firstName", "contoso", case_sensitive=False), test.check("userDetail.lastName", "zhou", case_sensitive=False), test.check("tags.environment", "Dev", case_sensitive=False), test.check("name", "{myOrganization}", case_sensitive=False), ]) step_organization_list(test, rg, checks=[ test.greater_than('length(@)', 0), ]) step_organization_list(test, "", checks=[ test.greater_than('length(@)', 0), ]) step_organization_update(test, rg, checks=[ test.check("location", "eastus2euap", case_sensitive=False), test.check("userDetail.emailAddress", "<EMAIL>", case_sensitive=False), test.check("userDetail.firstName", "contoso", case_sensitive=False), test.check("userDetail.lastName", "zhou", case_sensitive=False), test.check("name", "{myOrganization}", case_sensitive=False), test.check("tags.client", "dev-client", case_sensitive=False), ]) step_organization_delete(test, rg, checks=[]) cleanup_scenario(test, rg) # Test class for Scenario @try_manual class ConfluentScenarioTest(ScenarioTest): @ResourceGroupPreparer(name_prefix='clitestconfluent_myResourceGroup'[:7], key='rg', parameter_name='rg') def test_confluent_Scenario(self, rg): self.kwargs.update({ 'myOrganization': 'myOrganization', }) call_scenario(self, rg) calc_coverage(__file__) raise_if()