repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
9d77v/leetcode
internal/leetcode/256.paint-house/main.go
package main import ( "fmt" . "github.com/9d77v/leetcode/pkg/algorithm/math" ) /* 题目:粉刷房子 假如有一排房子,共 n 个,每个房子可以被粉刷成红色、蓝色或者绿色这三种颜色中的一种,你需要粉刷所有的房子并且使其相邻的两个房子颜色不能相同。 当然,因为市场上不同颜色油漆的价格不同,所以房子粉刷成不同颜色的花费成本也是不同的。每个房子粉刷成不同颜色的花费是以一个 n x 3 的矩阵来表示的。 例如,costs[0][0] 表示第 0 号房子粉刷成红色的成本花费;costs[1][2] 表示第 1 号房子粉刷成绿色的花费,以此类推。请你计算出粉刷完所有房子最少的花费成本。 注意: 所有花费均为正整数。 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/paint-house/ */ /* 方法一:记忆化递归 时间复杂度:О(n) 空间复杂度:O(n) 运行时间:4 ms 内存消耗:4.9 MB */ func minCost(costs [][]int) int { memo := map[string]int{} var dfs func(n, color int) int dfs = func(n, color int) int { key := fmt.Sprintf("%d_%d", n, color) if v, ok := memo[key]; ok { return v } totalCost := costs[n][color] if n == len(costs)-1 { } else if color == 0 { totalCost += Min(dfs(n+1, 1), dfs(n+1, 2)) } else if color == 1 { totalCost += Min(dfs(n+1, 0), dfs(n+1, 2)) } else { totalCost += Min(dfs(n+1, 0), dfs(n+1, 1)) } memo[key] = totalCost return totalCost } return MinArr(dfs(0, 0), dfs(0, 1), dfs(0, 2)) } /* 方法二:动态规划(使用已有数组) 时间复杂度:О(n) 空间复杂度:O(1) 运行时间:4 ms 内存消耗:3 MB */ func minCostFunc2(costs [][]int) int { for i := len(costs) - 2; i >= 0; i-- { costs[i][0] += Min(costs[i+1][1], costs[i+1][2]) costs[i][1] += Min(costs[i+1][0], costs[i+1][2]) costs[i][2] += Min(costs[i+1][1], costs[i+1][0]) } return MinArr(costs[0][0], costs[0][1], costs[0][2]) } /* 方法三:动态规划(不修改已有数组) 时间复杂度:О(n) 空间复杂度:O(n) 运行时间:4 ms 内存消耗:3.3 MB */ func minCostFunc3(costs [][]int) int { n := len(costs) f := make([][]int, n) for i := range f { f[i] = make([]int, 3) } f[0][0], f[0][1], f[0][2] = costs[0][0], costs[0][1], costs[0][2] for i := range f[1:] { f[i+1][0] = Min(f[i][1], f[i][2]) + costs[i+1][0] f[i+1][1] = Min(f[i][0], f[i][2]) + costs[i+1][1] f[i+1][2] = Min(f[i][1], f[i][0]) + costs[i+1][2] } return MinArr(f[n-1][0], f[n-1][1], f[n-1][2]) } /* 方法四:动态规划(不修改已有数组,优化) 时间复杂度:О(n) 空间复杂度:O(n) 运行时间:4 ms 内存消耗:3 MB */ func minCostFunc4(costs [][]int) int { f0, f1, f2 := costs[0][0], costs[0][1], costs[0][2] for i := range costs[1:] { f0, f1, f2 = Min(f1, f2)+costs[i+1][0], Min(f0, f2)+costs[i+1][1], Min(f1, f0)+costs[i+1][2] } return MinArr(f0, f1, f2) }
WarMachineSwe/IronWorks
server/utils/io.js
<gh_stars>0 var fs = require('fs') var path = require('path') /** * Buffers file from path * @param relPath {string} path to buffer * @return {Buffer | string} content of file */ function bufferFile (relPath) { return fs.readFileSync(path.join(__dirname, relPath)) } module.exports = { 'bufferFile': bufferFile }
pablrod/cppplotly
include/CppPlotly/Trace/Isosurface/Slices.h
<filename>include/CppPlotly/Trace/Isosurface/Slices.h<gh_stars>1-10 /** * @file Slices.h This attribute is part of the possible options for the trace isosurface. This file has been autogenerated from the official plotly.js source. If you like Plotly, please support them: L<https://plot.ly/> Open source announcement: L<https://plot.ly/javascript/open-source-announcement/> Full reference: L<https://plot.ly/javascript/reference/#isosurface> DISCLAIMER This is an unofficial Plotly Perl module. Currently I'm not affiliated in any way with Plotly. But I think plotly.js is a great library and I want to use it with C++. */ #include <vector> #include <string> #include <sstream> #include <json11.hpp> #include "CppPlotly/Trace/Isosurface/Slices/X.h" #include "CppPlotly/Trace/Isosurface/Slices/Y.h" #include "CppPlotly/Trace/Isosurface/Slices/Z.h" namespace CppPlotly { namespace Trace { namespace isosurface { /** * * * */ class Slices { public: Slices & X(const CppPlotly::Trace::isosurface::slices::X &x ) { _slices.insert({"x", x}); return *this; } Slices & Y(const CppPlotly::Trace::isosurface::slices::Y &y ) { _slices.insert({"y", y}); return *this; } Slices & Z(const CppPlotly::Trace::isosurface::slices::Z &z ) { _slices.insert({"z", z}); return *this; } json11::Json to_json() const { return _slices; } private: json11::Json::object _slices; }; } } }
rdubois/tcommon-studio-se
main/plugins/org.talend.cwm.mip/src/orgomg/cwm/analysis/businessnomenclature/Concept.java
/** * <copyright> </copyright> * * $Id$ */ package orgomg.cwm.analysis.businessnomenclature; import org.eclipse.emf.common.util.EList; /** * <!-- begin-user-doc --> A representation of the model object ' * <em><b>Concept</b></em>'. <!-- end-user-doc --> * * <!-- begin-model-doc --> * This represents a business idea or notion. * * Concepts are represented by Terms. Users use Terms that are familiar to them in their business environment to refer to Concepts. * <!-- end-model-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link orgomg.cwm.analysis.businessnomenclature.Concept#getTerm <em>Term</em>}</li> * <li>{@link orgomg.cwm.analysis.businessnomenclature.Concept#getRelatedConcept <em>Related Concept</em>}</li> * <li>{@link orgomg.cwm.analysis.businessnomenclature.Concept#getConcept <em>Concept</em>}</li> * </ul> * </p> * * @see orgomg.cwm.analysis.businessnomenclature.BusinessnomenclaturePackage#getConcept() * @model * @generated */ public interface Concept extends VocabularyElement { /** * Returns the value of the '<em><b>Term</b></em>' reference list. The list * contents are of type * {@link orgomg.cwm.analysis.businessnomenclature.Term}. It is * bidirectional and its opposite is ' * {@link orgomg.cwm.analysis.businessnomenclature.Term#getConcept * <em>Concept</em>}'. <!-- begin-user-doc --> <!-- end-user-doc --> <!-- * begin-model-doc --> Identifies a Term. <!-- end-model-doc --> * * @return the value of the '<em>Term</em>' reference list. * @see orgomg.cwm.analysis.businessnomenclature.BusinessnomenclaturePackage#getConcept_Term() * @see orgomg.cwm.analysis.businessnomenclature.Term#getConcept * @model opposite="concept" * @generated */ EList<Term> getTerm(); /** * Returns the value of the '<em><b>Related Concept</b></em>' reference * list. The list contents are of type * {@link orgomg.cwm.analysis.businessnomenclature.Concept}. It is * bidirectional and its opposite is ' * {@link orgomg.cwm.analysis.businessnomenclature.Concept#getConcept * <em>Concept</em>}'. <!-- begin-user-doc --> <!-- end-user-doc --> <!-- * begin-model-doc --> Identifies the related Concepts. <!-- end-model-doc * --> * * @return the value of the '<em>Related Concept</em>' reference list. * @see orgomg.cwm.analysis.businessnomenclature.BusinessnomenclaturePackage#getConcept_RelatedConcept() * @see orgomg.cwm.analysis.businessnomenclature.Concept#getConcept * @model opposite="concept" * @generated */ EList<Concept> getRelatedConcept(); /** * Returns the value of the '<em><b>Concept</b></em>' reference list. * The list contents are of type {@link orgomg.cwm.analysis.businessnomenclature.Concept}. * It is bidirectional and its opposite is '{@link orgomg.cwm.analysis.businessnomenclature.Concept#getRelatedConcept <em>Related Concept</em>}'. * <!-- begin-user-doc --> <!-- end-user-doc --> * <!-- begin-model-doc --> * Identifies a Concept. * <!-- end-model-doc --> * @return the value of the '<em>Concept</em>' reference list. * @see orgomg.cwm.analysis.businessnomenclature.BusinessnomenclaturePackage#getConcept_Concept() * @see orgomg.cwm.analysis.businessnomenclature.Concept#getRelatedConcept * @model opposite="relatedConcept" * @generated */ EList<Concept> getConcept(); } // Concept
adixonn/SVT-AV1
Source/Lib/Common/Codec/EbEntropyCoding.h
<reponame>adixonn/SVT-AV1 /* * Copyright(c) 2019 Intel Corporation * SPDX - License - Identifier: BSD - 2 - Clause - Patent */ /* * Copyright (c) 2016, Alliance for Open Media. All rights reserved * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at www.aomedia.org/license/software. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at www.aomedia.org/license/patent. */ #ifndef EbEntropyCoding_h #define EbEntropyCoding_h #include "EbDefinitions.h" #include "EbEntropyCodingObject.h" #include "EbEntropyCodingUtil.h" #include "EbEntropyCodingProcess.h" #include "EbCodingUnit.h" #include "EbPredictionUnit.h" #include "EbPictureBufferDesc.h" #include "EbSequenceControlSet.h" #include "EbPictureControlSet.h" #include "EbCabacContextModel.h" #include "EbModeDecision.h" #include "EbIntraPrediction.h" #include "EbBitstreamUnit.h" #include "EbPacketizationProcess.h" #ifdef __cplusplus extern "C" { #endif /*!\brief OBU types. */ typedef enum ATTRIBUTE_PACKED { OBU_SEQUENCE_HEADER = 1, OBU_TEMPORAL_DELIMITER = 2, OBU_FRAME_HEADER = 3, OBU_TILE_GROUP = 4, OBU_METADATA = 5, OBU_FRAME = 6, OBU_REDUNDANT_FRAME_HEADER = 7, OBU_PADDING = 15, } obuType; /************************************** * Extern Function Declarations **************************************/ struct EntropyCodingContext_s; extern EbErrorType write_sb( struct EntropyCodingContext_s *context_ptr, LargestCodingUnit_t *tbPtr, PictureControlSet_t *picture_control_set_ptr, EntropyCoder_t *entropy_coder_ptr, EbPictureBufferDesc_t *coeffPtr); extern EbErrorType EncodeSliceFinish( EntropyCoder_t *entropy_coder_ptr); extern EbErrorType ResetBitstream( EbPtr bitstreamPtr); extern EbErrorType ResetEntropyCoder( EncodeContext_t *encode_context_ptr, EntropyCoder_t *entropy_coder_ptr, uint32_t qp, EB_SLICE slice_type); extern EbErrorType Av1TuEstimateCoeffBits( PictureControlSet_t *picture_control_set_ptr, struct ModeDecisionCandidateBuffer_s *candidate_buffer_ptr, CodingUnit_t *cu_ptr, uint32_t tuOriginIndex, uint32_t tuChromaOriginIndex, EntropyCoder_t *entropy_coder_ptr, EbPictureBufferDesc_t *coeff_buffer_sb, uint32_t yEob, uint32_t cbEob, uint32_t crEob, uint64_t *y_tu_coeff_bits, uint64_t *cb_tu_coeff_bits, uint64_t *cr_tu_coeff_bits, TxSize txsize, TxSize txsize_uv, COMPONENT_TYPE component_type, EbAsm asm_type); extern EbErrorType CopyRbspBitstreamToPayload( Bitstream_t *bitstreamPtr, EbByte outputBuffer, uint32_t *outputBufferIndex, uint32_t *outputBufferSize, EncodeContext_t *encode_context_ptr); //**********************************************************************************************************// //onyxc_int.h static INLINE int32_t frame_is_intra_only(const PictureParentControlSet_t *const pcsPtr) { return pcsPtr->av1FrameType == KEY_FRAME || pcsPtr->av1FrameType == INTRA_ONLY_FRAME; } static INLINE int32_t frame_is_sframe(const PictureParentControlSet_t *pcsPtr) { return pcsPtr->av1FrameType == S_FRAME; } // Returns 1 if this frame might allow mvs from some reference frame. static INLINE int32_t frame_might_allow_ref_frame_mvs(const PictureParentControlSet_t *pcsPtr, SequenceControlSet *scsPtr) { return !pcsPtr->error_resilient_mode && scsPtr->enable_ref_frame_mvs && scsPtr->enable_order_hint && !frame_is_intra_only(pcsPtr); } // Returns 1 if this frame might use warped_motion static INLINE int32_t frame_might_allow_warped_motion(const PictureParentControlSet_t *pcsPtr, SequenceControlSet *scsPtr) { return !pcsPtr->error_resilient_mode && !frame_is_intra_only(pcsPtr) && scsPtr->static_config.enable_warped_motion; } static INLINE uint8_t major_minor_to_seq_level_idx(BitstreamLevel bl) { assert(bl.major >= LEVEL_MAJOR_MIN && bl.major <= LEVEL_MAJOR_MAX); //assert(bl.minor >= LEVEL_MINOR_MIN && bl.minor <= LEVEL_MINOR_MAX); return ((bl.major - LEVEL_MAJOR_MIN) << LEVEL_MINOR_BITS) + bl.minor; } //**********************************************************************************************************// //encoder.h static INLINE int32_t get_ref_frame_map_idx(const PictureParentControlSet_t *pcsPtr, MvReferenceFrame ref_frame) { // (void)(*pcsPtr); // (void)ref_frame; // return 0; return pcsPtr->av1RefSignal.refDpbIndex[ref_frame - LAST_FRAME];//LAST-LAST2-LAST3-GOLDEN-BWD-ALT2-ALT //if (ref_frame >= LAST_FRAME && ref_frame <= LAST3_FRAME) // return pcsPtr->lst_fb_idxes[ref_frame - 1]; //else if (ref_frame == GOLDEN_FRAME) // return pcsPtr->gld_fb_idx; //else if (ref_frame == BWDREF_FRAME) // return pcsPtr->bwd_fb_idx; //else if (ref_frame == ALTREF2_FRAME) // return pcsPtr->alt2_fb_idx; //else // return pcsPtr->alt_fb_idx; } //*******************************************************************************************// // bitwriter_buffer.h struct aom_write_bit_buffer { uint8_t *bit_buffer; uint32_t bit_offset; }; int32_t aom_wb_is_byte_aligned(const struct aom_write_bit_buffer *wb); uint32_t aom_wb_bytes_written(const struct aom_write_bit_buffer *wb); void aom_wb_write_bit(struct aom_write_bit_buffer *wb, int32_t bit); void aom_wb_overwrite_bit(struct aom_write_bit_buffer *wb, int32_t bit); void aom_wb_write_literal(struct aom_write_bit_buffer *wb, int32_t data, int32_t bits); void aom_wb_write_inv_signed_literal(struct aom_write_bit_buffer *wb, int32_t data, int32_t bits); //*******************************************************************************************// // bitstream.h struct aom_write_bit_buffer; //void WriteSequenceHeader(/*AV1_COMP *cpi, */struct aom_write_bit_buffer *wb); void WriteSequenceHeader(SequenceControlSet *scsPtr/*AV1_COMP *cpi*/, struct aom_write_bit_buffer *wb); uint32_t WriteObuHeader(obuType obuType, int32_t obuExtension, uint8_t *const dst); int32_t WriteUlebObuSize(uint32_t obuHeaderSize, uint32_t obuPayloadSize, uint8_t *dest); /*int32_t av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dest, size_t *size); static INLINE int32_t av1_preserve_existing_gf(AV1_COMP *cpi) { // Do not swap gf and arf indices for internal overlay frames return !cpi->multi_arf_allowed && cpi->rc.is_src_frame_alt_ref && !cpi->rc.is_src_frame_ext_arf; } void av1_write_tx_type(const Av1Common *const cm, const MacroBlockD *xd, int32_t blk_row, int32_t blk_col, int32_t plane, TxSize tx_size, aom_writer *w); */ //*******************************************************************************************// // blockd.h static INLINE uint32_t have_nearmv_in_inter_mode(PredictionMode mode) { return (mode == NEARMV || mode == NEAR_NEARMV || mode == NEAR_NEWMV || mode == NEW_NEARMV); } void GetTxbCtx( const int32_t plane, NeighborArrayUnit_t *dcSignLevelCoeffNeighborArray, uint32_t cu_origin_x, uint32_t cu_origin_y, const block_size plane_bsize, const TxSize tx_size, int16_t *const txb_skip_ctx, int16_t *const dc_sign_ctx); extern int32_t Av1GetReferenceModeContext( uint32_t cu_origin_x, uint32_t cu_origin_y, NeighborArrayUnit_t *mode_type_neighbor_array, NeighborArrayUnit_t *inter_pred_dir_neighbor_array); extern int32_t Av1GetCompReferenceTypeContext( uint32_t cu_origin_x, uint32_t cu_origin_y, NeighborArrayUnit_t *mode_type_neighbor_array, NeighborArrayUnit_t *inter_pred_dir_neighbor_array); extern void Av1CollectNeighborsRefCounts( CodingUnit_t *cu_ptr, uint32_t cu_origin_x, uint32_t cu_origin_y, NeighborArrayUnit_t *mode_type_neighbor_array, NeighborArrayUnit_t *inter_pred_dir_neighbor_array, NeighborArrayUnit_t *ref_frame_type_neighbor_array); // Obtain contexts to signal a reference frame be either BWDREF/ALTREF2, or // ALTREF. //extern int32_t get_pred_context_brfarf2_or_arf(const MacroBlockD *xd); // Obtain contexts to signal a reference frame be either BWDREF or ALTREF2. //extern int32_t get_pred_context_brf_or_arf2(const MacroBlockD *xd); // == Context functions for comp ref == // // Returns a context number for the given MB prediction signal // Signal the first reference frame for a compound mode be either // GOLDEN/LAST3, or LAST/LAST2. extern int32_t av1_get_pred_context_comp_ref_p(const MacroBlockD *xd); // Returns a context number for the given MB prediction signal // Signal the first reference frame for a compound mode be LAST, // conditioning on that it is known either LAST/LAST2. extern int32_t av1_get_pred_context_comp_ref_p1(const MacroBlockD *xd); // Returns a context number for the given MB prediction signal // Signal the first reference frame for a compound mode be GOLDEN, // conditioning on that it is known either GOLDEN or LAST3. extern int32_t av1_get_pred_context_comp_ref_p2(const MacroBlockD *xd); // Signal the 2nd reference frame for a compound mode be either // ALTREF, or ALTREF2/BWDREF. extern int32_t av1_get_pred_context_comp_bwdref_p(const MacroBlockD *xd); // Signal the 2nd reference frame for a compound mode be either // ALTREF2 or BWDREF. extern int32_t av1_get_pred_context_comp_bwdref_p1(const MacroBlockD *xd); // == Context functions for single ref == // // For the bit to signal whether the single reference is a forward reference // frame or a backward reference frame. extern int32_t av1_get_pred_context_single_ref_p1(const MacroBlockD *xd); // For the bit to signal whether the single reference is ALTREF_FRAME or // non-ALTREF backward reference frame, knowing that it shall be either of // these 2 choices. extern int32_t av1_get_pred_context_single_ref_p2(const MacroBlockD *xd); // For the bit to signal whether the single reference is LAST3/GOLDEN or // LAST2/LAST, knowing that it shall be either of these 2 choices. extern int32_t av1_get_pred_context_single_ref_p3(const MacroBlockD *xd); // For the bit to signal whether the single reference is LAST2_FRAME or // LAST_FRAME, knowing that it shall be either of these 2 choices. extern int32_t av1_get_pred_context_single_ref_p4(const MacroBlockD *xd); // For the bit to signal whether the single reference is GOLDEN_FRAME or // LAST3_FRAME, knowing that it shall be either of these 2 choices. extern int32_t av1_get_pred_context_single_ref_p5(const MacroBlockD *xd); // For the bit to signal whether the single reference is ALTREF2_FRAME or // BWDREF_FRAME, knowing that it shall be either of these 2 choices. extern int32_t av1_get_pred_context_single_ref_p6(const MacroBlockD *xd); extern EbErrorType WriteFrameHeaderAv1( Bitstream_t *bitstreamPtr, SequenceControlSet *scsPtr, PictureControlSet_t *pcsPtr, uint8_t showExisting); extern EbErrorType encode_td_av1( uint8_t *bitstreamPtr); extern EbErrorType EncodeSPSAv1( Bitstream_t *bitstreamPtr, SequenceControlSet *scsPtr); //*******************************************************************************************// MOTION_MODE motion_mode_allowed( const PictureControlSet_t *picture_control_set_ptr, const CodingUnit_t *cu_ptr, const block_size bsize, MvReferenceFrame rf0, MvReferenceFrame rf1, PredictionMode mode); #ifdef __cplusplus } #endif #endif //EbEntropyCoding_h
kevinbarabash/mobile-mathipulator
src/transforms/evaluate.js
import { generateId } from '../ast/node'; import Selection from '../ui/selection'; import eliminateZero from './eliminate-zero'; import params from '../params' const operations = { '+': (a, b) => a + b, '-': (a, b) => a - b, '*': (a, b) => a * b, '/': (a, b) => a / b, }; function canTransform(selections) { if (selections.length !== 1) return false; let selection = selections[0]; if (['Expression', 'Product'].includes(selection.first.type) && selection.length === 1) { selection = selection.first.children; } if (selection.length >= 3 && selection.first.type === 'Literal' && selection.last.type == 'Literal' && ['Expression', 'Product'].includes(selection.first.parent.type)) { if (selection.first.prev && selection.first.prev.operator === '-') { return false; } return true; } return false; } function doTransform(selections, userInput) { if (canTransform(selections)) { if (selections.length !== 1) return false; let selection = selections[0]; if (['Expression', 'Product'].includes(selection.first.type) && selection.length === 1) { selection = selection.first.children; } const [first, ...rest] = selection; const parent = first.parent; rest.forEach(node => parent.remove(node)); if (userInput) { parent.replace(first, userInput); // collapse if there is only one node in the expression if (userInput.prev == null && userInput.next == null) { if (parent.parent) { parent.parent.replace(parent, userInput); } } if (params.eliminateZero) { const selections = []; selections.push(new Selection(userInput)); if (eliminateZero.canTransform(selections)) { eliminateZero.doTransform(selections); } } } else { const replacement = first.clone(); for (let i = 0; i < rest.length; i += 2) { const operator = rest[i].operator; // TODO: moving parsing of the number into the operation const operand = parseFloat(rest[i + 1].value); replacement.value = operations[operator](parseFloat(replacement.value), operand); replacement.id = generateId(); } parent.replace(first, replacement); // collapse if there is only one node in the expression if (replacement.prev == null && replacement.next == null) { if (parent.parent) { parent.parent.replace(parent, replacement); } } if (params.eliminateZero) { const selections = []; selections.push(new Selection(replacement)); if (eliminateZero.canTransform(selections)) { eliminateZero.doTransform(selections); } } } } } module.exports = { label: 'evaluate', canTransform, doTransform, needsUserInput: !params.autoeval, };
antopen/alipay-sdk-java-all
src/main/java/com/alipay/api/response/KoubeiQualityTestShieldOrderCreateResponse.java
package com.alipay.api.response; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.AlipayResponse; /** * ALIPAY API: koubei.quality.test.shield.order.create response. * * @author <NAME> * @since 1.0, 2019-08-30 20:12:07 */ public class KoubeiQualityTestShieldOrderCreateResponse extends AlipayResponse { private static final long serialVersionUID = 1718588391836426124L; /** * 加购单批次号 */ @ApiField("batch_no") private String batchNo; /** * 扩展信息 */ @ApiField("ext_infos") private String extInfos; /** * 口碑订单号 */ @ApiField("order_id") private String orderId; public void setBatchNo(String batchNo) { this.batchNo = batchNo; } public String getBatchNo( ) { return this.batchNo; } public void setExtInfos(String extInfos) { this.extInfos = extInfos; } public String getExtInfos( ) { return this.extInfos; } public void setOrderId(String orderId) { this.orderId = orderId; } public String getOrderId( ) { return this.orderId; } }
albertosantiago/twitter-posts
src/resources/assets/js/components/tags/twp-post-link.js
'use strict'; import $ from 'jquery'; import React from 'react'; import ReactDOM from 'react-dom'; import interact from 'interact.js'; import App from 'app/app'; import {isMsieBrowser} from 'app/lib/util'; class TwpPostLinkHTMLElement extends HTMLElement { constructor(){ super(); this.data = { id : undefined }; this.wrapper = undefined; this.connectedCallback = this.connectedCallback.bind(this); this._renderTag = this._renderTag.bind(this); this._setData = this._setData.bind(this); this._setEdit = this._setEdit.bind(this); this._autoRemove = this._autoRemove.bind(this); this.root = $(this); } connectedCallback() { if(this.wrapper){ return; } this._renderTag(); } _renderTag(){ let self = this; this.data = { id : $(this).attr("id"), text : $(this).attr("data-text"), color : $(this).attr("data-color"), href : $(this).attr("href"), title : $(this).attr("title") }; this.innerHTML = ""; var link = document.createElement('a'); if(this.data.color!==undefined){ link.setAttribute('style','color:'+this.data.color); } link.setAttribute('href', this.data.href); link.setAttribute('title', this.data.title); link.innerText = this.data.text; var wrapper = document.createElement('span'); wrapper.appendChild(link); wrapper.setAttribute('class', 'twp-post-link-wrapper'); var oldWrapper = this.getElementsByClassName("twp-post-link-wrapper"); if(oldWrapper.length>0){ oldWrapper = oldWrapper[0]; this.replaceChild(wrapper, oldWrapper); }else{ this.appendChild(wrapper); } this.wrapper = wrapper; if(window.__editing){ this._setEdit(); } } _setEdit(){ var self = this; var removeFunction = function(event) { event.preventDefault(); var key = event.keyCode || event.charCode; if( key == 8 || key == 46 ){ self._autoRemove(); } }; this.root.find(".twp-post-link-wrapper").on('mousedown', function(e){ e.preventDefault(); }); this.root.find(".twp-post-link-wrapper").click(function(){ document.activeElement.blur(); $("html").on('keydown', removeFunction); self.root.find(".twp-post-link-wrapper").css('border','4px solid #ddd'); self.root.find(".twp-post-link-wrapper").css('padding','5px'); self.root.find(".twp-post-link-wrapper").css('width','auto'); self.root.find(".twp-post-link-wrapper").css('display','inline-block'); var wrapper = self.root.find(".twp-post-link-wrapper").first().get(0); }); this.root.find(".twp-post-link-wrapper").dblclick(function(){ window.modalManager.create('ModalLink',{ tag: self, callback: function(data){ self._setData(data); } }); }); $("body").click(function(event){ if($(event.target).parents('twp-post-link').length===0){ $("html").off('keydown', removeFunction); self.root.find(".twp-post-link-wrapper").css('border','0px'); self.root.find(".twp-post-link-wrapper").css('padding','0px'); self.root.find(".twp-post-link-wrapper").css('display','inline'); } }); } _setData(data){ var attr = { 'href' : data.href, 'title' : data.title, 'data-text' : data.text, 'data-color' : data.color }; $(this).attr(attr); this._renderTag(); } _autoRemove(){ $(this).detach(); } } if(window.customElements!==undefined){ if(window.customElements.get('twp-post-link')===undefined){ window.customElements.define('twp-post-link', TwpPostLinkHTMLElement); } } export default TwpPostLinkHTMLElement;
AkshayPathak/PrivacyStreams
privacystreams-android-sdk/src/main/java/io/github/privacystreams/core/transformations/filter/KeepChangesFilter.java
<filename>privacystreams-android-sdk/src/main/java/io/github/privacystreams/core/transformations/filter/KeepChangesFilter.java package io.github.privacystreams.core.transformations.filter; import io.github.privacystreams.core.Item; /** * Only keep the changed items in the stream. */ final class KeepChangesFilter extends StreamFilter { KeepChangesFilter() { } private transient Item lastItem; @Override public boolean keep(Item item) { if (item.equals(lastItem)) return false; this.lastItem = item; return true; } }
CarolinaAzcona/FreeRTOS
vendors/cypress/MTB/libraries/whd-bsp-integration/docs/html/modules.js
<reponame>CarolinaAzcona/FreeRTOS var modules = [ [ "Buffer management", "group__group__bsp__network__buffer.html", "group__group__bsp__network__buffer" ], [ "WiFi Initialization", "group__group__bsp__wifi.html", "group__group__bsp__wifi" ] ];
DingdingLuan/PycharmProjects
openingangle/calculate9.29.py
<filename>openingangle/calculate9.29.py from Calculatefunction import * import numpy as np integralnumber=10000 eps=0.4 alpha=-1.1 beta=-2.2 rho=1 samplenumber=300 proz=[] prol=[] zmock=[] #generate the new z-distribution function from N: zpdf=[] for i in range(integralnumber): # z=10**(i*1/integralnumber)-1 # zmax=10**((i+1)*1/integralnumber)-1 z=10/integralnumber*i zmax=10/integralnumber*(i+1) zpdf=np.append(zpdf,N(z,eps,alpha,beta,rho,zmax)) zpdf=zpdf/np.sum(zpdf) #test monte carlo for z: # standard=np.max(zpdf) # i=0 # while i<samplenumber: # a=np.random.uniform(0.,10) # b=np.random.uniform(0,standard) # aa=np.int(np.log10(a+1)*integralnumber) # if aa<=integralnumber: # if zpdf[aa]>=b: # zmock=np.append(zmock,a) # i=i+1 # else: # continue # n=20 # for i in range(n): # counter=0 # for j in range(170): # if 10**(i*1/n)-1<=zmock[j]<=10**((i+1)*1/n)-1: # counter=counter+1 # proz=np.append(proz,counter) # proz=proz/np.sum(proz) # # matplotlib.rcParams['xtick.direction'] = 'in' # matplotlib.rcParams['ytick.direction'] = 'in' # plt.figure(figsize=(7, 7)) # x=np.arange(0,1,1/n) #plot z # plt.bar(x,proz,width=1/len(x),edgecolor='r',facecolor='white',linestyle='--') # # plt.savefig('/Users/dingding/Desktop/calculate/9.18/mockz.eps') # plt.show() #------------------------------------------------------------------------------------------------------ zmocksample=[] lmocksample=[] thetamocksample=[] lmin=46 lmax=52 P_mock=[] i=0 standard=np.max(zpdf) while i<samplenumber: a=random.uniform(10**(-2.),10**(-0.5)) b=random.uniform(0,1.87960) theta=a/np.pi*180 if b<=thetalogdistri(a): # c=random.uniform(0,1-np.cos(10**(-1.27))) # c=random.uniform(0,1) # if eta_a(a)>c: while True: a=random.uniform(0.,10) b=np.random.uniform(0,standard) z=a aa=np.int(a/10*integralnumber) if aa<integralnumber: if zpdf[aa]>=b: break else: continue while True: a=random.uniform(10.**lmin,10.**lmax) b=random.uniform(0,6*10.**(-51)) l=a if b<=Luminosityfunction(a): break else: continue p=P(z,l,theta) if p<85: #normalize the probability of p-z a=random.uniform(0,1) if 0<a<eta_t(p): a=random.uniform(0,1) if 0<a<eta_z(p): P_mock=np.append(P_mock,p) zmocksample=np.append(zmocksample,z) thetamocksample=np.append(thetamocksample,theta) lmocksample=np.append(lmocksample,l/(1-np.cos(theta/180*np.pi))) # lmocksample=np.append(lmocksample,l) i=i+1 #------------------------------------------------------------------------------------------------------ # print('pmock:',P_mock) # print('zmock:',zmocksample) # print('lmock:',lmocksample) # print('thetamock:',thetamocksample) zmock= zmocksample lmock=lmocksample thetamock=thetamocksample pmock=P_mock proz=[] prol=[] protheta=[] prothetaz=[] prop=[] #------------------------------------------------------------------------------------------------------ z170num=22 for i in range(z170num): counter=0 for j in range(170): if 10**(i*1/z170num)-1<=zmock[j]<=10**((i+1)*(1/z170num))-1: counter=counter+1 proz=np.append(proz,counter) proz=proz/np.sum(proz) # n=20 # for i in range(n): # proz[i]=proz[i]/(10**((i+1)*1/n)-10**(i*1/n)) # ------------------------------------------------------------------------------------------------------ lnum=22 for i in range(lnum): counter=0 for j in range(170): if 10.**(49+i*6/lnum)<=lmock[j]<=10.**(49+(i+1)*6/lnum): counter=counter+1 prol=np.append(prol,counter) prol=prol/np.sum(prol) # n=20 # for i in range(n): # prol[i]=prol[i]/(10.**(46+(i+1)*(6/n))-10.**(46+i*(6/n))) # calculate the cumulation of the P: num=40 for i in range(num): counter=0 for j in range(samplenumber): if pmock[j]>=10**(-1+3/num*i): counter=counter+1 prop=np.append(prop,counter) prop=prop/prop[0] # print(pmock) #------------------------------------------------------------------------------------------------------ # randomly pick 77 grbs subsample from mock sample with 170 grbs: i=0 subsamplez=[] subsampletheta=[] min=-2.5 max=0 while i<77: a=random.randint(0,169) subsamplez=np.append(subsamplez,zmock[a]) subsampletheta=np.append(subsampletheta,thetamock[a]) i=i+1 thetanum=12 for i in range(thetanum): counter=0 for j in range(77): if 10.**(-2.5+2.5/thetanum*i)<=subsampletheta[j]/180*np.pi<=10.**(-2.5+2.5/thetanum*(i+1)): counter=counter+1 protheta=np.append(protheta,counter) protheta=protheta/np.sum(protheta) # n=10 # for i in range(n): # assign the weight # protheta[i]=protheta[i]/(10**(min+(max-min)/n*(i+1))-10**(min+(max-min)/n*i)) z77num=10 for i in range(z77num): counter=0 for j in range(77): if 10**(i*1/z77num)-1<=subsamplez[j]<=10**((i+1)*1/z77num)-1: counter=counter+1 prothetaz=np.append(prothetaz,counter) prothetaz=prothetaz/np.sum(prothetaz) # n=10 # for i in range(n): # prothetaz[i]=prothetaz[i]/(10**((i+1)*1/n)-10**(i*1/n)) #------------------------------------------------------------------------------------------------------ import pandas as pd import numpy as np import matplotlib.pyplot as plt import matplotlib df=pd.read_excel("/Users/dingding/Desktop/calculate/10.7/table1.xlsx") zreal=df['z'] thetareal=df['theta'] proz1=[] protheta1=[] z77num=10 for i in range(z77num): counter=0 for j in range(77): if 10**(i*1/z77num)-1<=zreal[j]<=10**((i+1)*1/z77num)-1: counter=counter+1 proz1=np.append(proz1,counter) proz1=proz1/np.sum(proz1) thetanum=12 for i in range(thetanum): counter=0 for j in range(77): if 10.**(-2.5+2.5/thetanum*i)<=thetareal[j]<=10.**(-2.5+2.5/thetanum*(i+1)): counter=counter+1 protheta1=np.append(protheta1,counter) protheta1=protheta1/np.sum(protheta1) # # from scipy.stats import ks_2samp print(ks_2samp(prothetaz,proz1)[1]) print(ks_2samp(protheta,protheta1)[1]) # # # # # # # #------------------------------------------------------------------------------------------------------ # # plot the cumulation figure of P: matplotlib.rcParams['xtick.direction'] = 'in' matplotlib.rcParams['ytick.direction'] = 'in' plt.figure(figsize=(7, 7)) x=np.arange(-1,2,3/num) #plot z # plt.plot(x,np.log10(prop),linewidth=2,c='red',linestyle='--') plt.bar(x,np.log10(prop),width=3/num,edgecolor='r',facecolor='white',linestyle='--') plt.title("Mock sample = 170") plt.xlabel(r'$P(photons cm^{-2}s^{-1})$') plt.ylabel('$Probability$') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/p.eps') plt.show() # # plot the result of theta: matplotlib.rcParams['xtick.direction'] = 'in' matplotlib.rcParams['ytick.direction'] = 'in' plt.figure(figsize=(7, 7)) t=np.arange(-2.5,0,2.5/thetanum) #plot theta plt.bar(t,protheta,width=2.5/thetanum,edgecolor='r',facecolor='w',linestyle='--') # plt.bar(t,protheta1,width=2.5/thetanum,edgecolor='b',facecolor='yellow',linestyle='-') plt.title("Mock sample = 77") plt.xlabel(r'$log(\theta_{j}/rad)$') plt.ylabel('$Probability$') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/theta9.17.eps') plt.show() # # # # # plot the result of z: # matplotlib.rcParams['xtick.direction'] = 'in' # matplotlib.rcParams['ytick.direction'] = 'in' # plt.figure(figsize=(7, 7)) # x=np.arange(0,1,1/z170num) #plot z # plt.bar(x,proz,width=1/z170num,edgecolor='r',facecolor='white',linestyle='--') # plt.title("Mock sample = 170") # plt.xlabel('$log(z+1)$') # plt.ylabel('$Probability$') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/z9.17.eps') # plt.show() # # # # # # plot the result of luminosity: matplotlib.rcParams['xtick.direction'] = 'in' matplotlib.rcParams['ytick.direction'] = 'in' plt.figure(figsize=(7, 7)) y=np.arange(49,55,6/lnum) #plot l plt.bar(y,prol,width=6/lnum,edgecolor='r',facecolor='white',linestyle='--') plt.title("Mock sample = 170") plt.xlabel('$log(L/ erg s^{-1})$') plt.ylabel('$Probability$') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/luminosity9.17.eps') plt.show() # # # # # # # plot z-l distribution matplotlib.rcParams['xtick.direction'] = 'in' matplotlib.rcParams['ytick.direction'] = 'in' plt.figure(figsize=(7, 7)) plt.xlim(0,1) plt.ylim(49,54.5) plt.scatter(np.log10(zmock+1),np.log10(lmock),s=77,alpha=1,marker='o',c='',edgecolors='r') plt.title("Mock sample = 170") plt.xlabel('$log(z+1)$') plt.ylabel('$log(L/ erg s^{-1})$') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/z-ldistribution.eps') plt.show() # # # plot z-theta distribution matplotlib.rcParams['xtick.direction'] = 'in' matplotlib.rcParams['ytick.direction'] = 'in' plt.figure(figsize=(7, 7)) plt.ylim(-2.5,0) plt.xlim(0,1) plt.scatter(np.log10(subsamplez+1),np.log10(subsampletheta/180*np.pi),s=77,alpha=1,marker='o',c='',edgecolors='r') plt.scatter(np.log10(zreal+1),np.log10(thetareal),s=77,alpha=1,marker='o',c='',edgecolors='b') plt.title("Mock sample = 77") plt.xlabel('$log(z+1)$') plt.ylabel(r'$log(\theta_{j}/rad$)') # plt.savefig('/Users/dingding/Desktop/calculate/9.17/z-thetadistribution9.17.eps') plt.show() # # plot z distribution in subsample: # matplotlib.rcParams['xtick.direction'] = 'in' # matplotlib.rcParams['ytick.direction'] = 'in' # plt.figure(figsize=(7, 7)) # x=np.arange(0,1,1/z77num) # plt.bar(x,prothetaz,width=1/z77num,edgecolor='r',facecolor='red',linestyle='--') # plt.bar(x,proz1,width=1/z77num,edgecolor='b',facecolor='yellow',linestyle='-') # plt.title("Mock sample = 77") # plt.xlabel('$log(z+1)$') # plt.ylabel('$Probability$') # # plt.savefig('/Users/dingding/Desktop/calculate/9.17/zdistributioninsubsample9.17.eps') # plt.show() #------------------------------------------------------------------------------------------------------
greeneries/greeneries.github.io
spring/spring-proxy-aop/src/main/java/com/example/demo/proxy/step2/Hello.java
<filename>spring/spring-proxy-aop/src/main/java/com/example/demo/proxy/step2/Hello.java package com.example.demo.proxy.step2; public interface Hello { public void say(); }
kmader/MMdnn
mmdnn/conversion/examples/paddle/extractor.py
<filename>mmdnn/conversion/examples/paddle/extractor.py #---------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. #---------------------------------------------------------------------------------------------- from __future__ import absolute_import from mmdnn.conversion.examples.imagenet_test import TestKit from mmdnn.conversion.examples.extractor import base_extractor from mmdnn.conversion.common.utils import download_file import paddle.v2 as paddle import gzip from paddle.trainer_config_helpers.config_parser_utils import \ reset_parser class paddle_extractor(base_extractor): _base_model_url = 'http://cloud.dlnel.org/filepub/?uuid=' _image_size = 224 architecture_map = { 'resnet50' : {'params' : _base_model_url + 'f63f237a-698e-4a22-9782-baf5bb183019',}, 'resnet101' : {'params' : _base_model_url + '3d5fb996-83d0-4745-8adc-13ee960fc55c',}, 'vgg16' : {'params': _base_model_url + 'aa0e397e-474a-4cc1-bd8f-65a214039c2e',}, } class_dim_map = { 'resnet50' : 1000, 'resnet101' : 1000, 'vgg16' : 1001, # work at 1001, but fail at 1000 'alexnet' : 1001, } @classmethod def dump_v2_config(cls, topology, save_path, binary=False): import collections from paddle.trainer_config_helpers.layers import LayerOutput from paddle.v2.layer import parse_network from paddle.proto import TrainerConfig_pb2 """ Dump the network topology to a specified file. This function is only used to dump network defined by using PaddlePaddle V2 API. :param topology: The output layers in the entire network. :type topology: LayerOutput|List|Tuple :param save_path: The path to save the dump network topology. :type save_path: str :param binary: Whether to dump the serialized network topology. The default value is false. :type binary: bool. """ if isinstance(topology, LayerOutput): topology = [topology] elif isinstance(topology, collections.Sequence): for out_layer in topology: assert isinstance(out_layer, LayerOutput), ( "The type of each element in the parameter topology " "should be LayerOutput.") else: raise RuntimeError("Error input type for parameter topology.") model_str = parse_network(topology) with open(save_path, "w") as fout: if binary: fout.write(model_str.SerializeToString()) else: fout.write(str(model_str)) @classmethod def download(cls, architecture, path="./"): if cls.sanity_check(architecture): reset_parser() DATA_DIM = 3 * paddle_extractor._image_size * paddle_extractor._image_size # Use 3 * 331 * 331 or 3 * 299 * 299 for Inception-ResNet-v2. CLASS_DIM = paddle_extractor.class_dim_map[architecture] image = paddle.layer.data( name="image", type=paddle.data_type.dense_vector(DATA_DIM)) if 'resnet' in architecture: from mmdnn.conversion.examples.paddle.models import resnet depth = int(architecture.strip('resnet')) out = resnet.resnet_imagenet(image, class_dim=CLASS_DIM, depth=depth) elif architecture == 'vgg16': from mmdnn.conversion.examples.paddle.models import vgg out = vgg.vgg16(image, class_dim=CLASS_DIM) else: print("Not support for {} yet.", architecture) return None architecture_file = path + architecture + '.bin' paddle_extractor.dump_v2_config(out, architecture_file, True) weight_file = download_file(cls.architecture_map[architecture]['params'], directory=path, local_fname= architecture +'.tar.gz') if not weight_file: return None print("MXNet Model {} saved as [{}] and [{}].".format(architecture, architecture_file, weight_file)) return (architecture_file, weight_file) else: return None @classmethod def inference(cls, architecture, files, path, image_path): import numpy as np if cls.sanity_check(architecture): # refer to https://github.com/PaddlePaddle/Paddle/blob/develop/python/paddle/v2/tests/test_rnn_layer.py#L35 reset_parser() # refer to https://github.com/PaddlePaddle/Paddle/issues/7403 paddle.init(use_gpu=False, trainer_count=1) DATA_DIM = 3 * paddle_extractor._image_size * paddle_extractor._image_size # Use 3 * 331 * 331 or 3 * 299 * 299 for Inception-ResNet-v2. CLASS_DIM = paddle_extractor.class_dim_map[architecture] image = paddle.layer.data( name="image", type=paddle.data_type.dense_vector(DATA_DIM)) if 'resnet' in architecture: from mmdnn.conversion.examples.paddle.models import resnet depth = int(architecture.strip('resnet')) out = resnet.resnet_imagenet(image, class_dim=CLASS_DIM, depth=depth) elif architecture == 'vgg16': from mmdnn.conversion.examples.paddle.models import vgg out = vgg.vgg16(image, class_dim=CLASS_DIM) else: print("Not support for {} yet.", architecture) return None _, parameters_file = files with gzip.open(parameters_file, 'r') as f: parameters = paddle.parameters.Parameters.from_tar(f) func = TestKit.preprocess_func['paddle'][architecture] img = func(image_path) img = np.transpose(img, [2, 0, 1]) test_data = [(img.flatten(),)] predict = paddle.infer(output_layer = out, parameters=parameters, input=test_data) predict = np.squeeze(predict) return predict else: return None
Youngho-Oh/Airmail
rf/zb/zdo/zdo_app.c
/*************************************************************************** * ZBOSS ZigBee Pro 2007 stack * * * * Copyright (c) 2012 DSR Corporation Denver CO, USA. * * http://www.dsr-wireless.com * * * * All rights reserved. * * Copyright (c) 2011 ClarIDy Solutions, Inc., Taipei, Taiwan. * * http://www.claridy.com/ * * * * Copyright (c) 2011 Uniband Electronic Corporation (UBEC), * * Hsinchu, Taiwan. * * http://www.ubec.com.tw/ * * * * Copyright (c) 2011 DSR Corporation Denver CO, USA. * * http://www.dsr-wireless.com * * * * All rights reserved. * * * * * * ZigBee Pro 2007 stack, also known as ZBOSS (R) ZB stack is available * * under either the terms of the Commercial License or the GNU General * * Public License version 2.0. As a recipient of ZigBee Pro 2007 stack, you* * may choose which license to receive this code under (except as noted in * * per-module LICENSE files). * * * * ZBOSS is a registered trademark of DSR Corporation AKA Data Storage * * Research LLC. * * * * GNU General Public License Usage * * This file may be used under the terms of the GNU General Public License * * version 2.0 as published by the Free Software Foundation and appearing * * in the file LICENSE.GPL included in the packaging of this file. Please * * review the following information to ensure the GNU General Public * * License version 2.0 requirements will be met: * * http://www.gnu.org/licenses/old-licenses/gpl-2.0.html. * * * * Commercial Usage * * Licensees holding valid ClarIDy/UBEC/DSR Commercial licenses may use * * this file in accordance with the ClarIDy/UBEC/DSR Commercial License * * Agreement provided with the Software or, alternatively, in accordance * * with the terms contained in a written agreement between you and * * ClarIDy/UBEC/DSR. * * * **************************************************************************** PURPOSE: Typical ZDO applications: ZC, ZR, ZE */ #include "rf/zb/hdr/zb_common.h" #include "rf/zb/hdr/zb_scheduler.h" #include "rf/zb/hdr/zb_bufpool.h" #include "rf/zb/hdr/zb_nwk.h" #include "rf/zb/hdr/zb_aps.h" #include "rf/zb/hdr/zb_zdo.h" #include "rf/zb/hdr/zb_secur.h" #include "rf/zb/hdr/zb_secur_api.h" #include "hdr/bank_5.h" /*! \addtogroup ZB_ZDO */ /*! @{ */ #ifdef APS_RETRANSMIT_TEST static void send_data(); #endif void zdo_send_device_annce(zb_uint8_t param); void zdo_join_done(zb_uint8_t param); void zb_zdo_force_child_leave(zb_uint8_t param, zb_uint16_t child_addr); static void init_config_attr(); void zb_zdo_init() { ZDO_CTX().conf_attr.nwk_indirect_poll_rate = ZB_ZDO_INDIRECT_POLL_TIMER; ZDO_CTX().max_parent_threshold_retry = ZB_ZDO_MAX_PARENT_THRESHOLD_RETRY; #if 0 /* that values already zeroed by global init */ ZDO_CTX().parent_threshold_retry = 0; ZDO_CTX().system_server_discovery_cb = NULL; ZDO_CTX().long_timer_cb = NULL; #endif ZDO_CTX().end_device_bind_ctx.bind_device_info[ZB_ZDO_BIND_DEV_1].end_device_bind_param = ZB_UNDEFINED_BUFFER; ZDO_CTX().end_device_bind_ctx.bind_device_info[ZB_ZDO_BIND_DEV_2].end_device_bind_param = ZB_UNDEFINED_BUFFER; ZDO_CTX().handle.allow_auth = 1; init_config_attr(); } static void init_config_attr() { ZDO_CTX().conf_attr.nwk_scan_attempts = ZB_ZDO_NWK_SCAN_ATTEMPTS; ZDO_CTX().conf_attr.nwk_time_btwn_scans = ZB_ZDO_NWK_TIME_BTWN_SCANS; ZDO_CTX().conf_attr.enddev_bind_timeout = ZB_ZDO_ENDDEV_BIND_TIMEOUT; ZDO_CTX().conf_attr.permit_join_duration = ZB_DEFAULT_PRMIT_JOINING_DURATION; #ifndef ZB_LIMITED_FEATURES #if defined ZB_COORDINATOR_ROLE zb_set_default_ffd_descriptor_values(ZB_COORDINATOR); #elif defined ZB_ROUTER_ROLE zb_set_default_ffd_descriptor_values(ZB_ROUTER); #else /* ZB_END_DEVICE */ zb_set_default_ed_descriptor_values(); #endif #endif } void zdo_main_loop() { while (1) { zb_sched_loop_iteration(); } } zb_ret_t zdo_dev_start() { zb_ret_t ret = RET_OK; /* zb_zdo_init(); it is called in zb_init() */ /* Startup procedure as defined in 2.5.5.5.6.2 Startup Procedure */ #ifdef ZB_USE_NVRAM zb_read_formdesc_data(); #endif if (ZB_EXTPANID_IS_ZERO(ZB_NIB_EXT_PAN_ID())) { /* This call is here to take into account parameters changed after * zb_init() but before zdo_dev_start(). For instance, it can be MAC * address and pan id. */ zb_handle_parms_before_start(); TRACE_MSG(TRACE_APS1, "ext pan id 0 - startup", (FMT__0)); if (ZB_AIB().aps_designated_coordinator) { #ifdef ZB_COORDINATOR_ROLE /* will start as coordinator: Formation */ zb_buf_t *buf = zb_get_out_buf(); zb_nlme_network_formation_request_t *req = ZB_GET_BUF_PARAM(buf, zb_nlme_network_formation_request_t); /* we must set nwkExtendedPanID to aspUseExtendedPanID if any */ if (!ZB_EXTPANID_IS_ZERO(ZB_AIB().aps_use_extended_pan_id)) { ZB_IEEE_ADDR_COPY(ZB_NIB_EXT_PAN_ID(), ZB_AIB().aps_use_extended_pan_id); } req->scan_channels = ZB_AIB().aps_channel_mask; req->scan_duration = ZB_DEFAULT_SCAN_DURATION; /* TODO: configure it somehow? */ /* timeout for every channel is ((1l<<duration) + 1) * 15360 / 1000000 For duration 8 ~ 4s For duration 5 ~0.5s For duration 2 ~0.08s For duration 1 ~0.05s */ ret = ZB_SCHEDULE_CALLBACK(zb_nlme_network_formation_request, ZB_REF_FROM_BUF(buf)); #else TRACE_MSG(TRACE_MAC1, "Coordinator role is not supported", (FMT__0)); ret = RET_NOT_IMPLEMENTED; #endif } #ifndef ZB_LIMITED_FEATURES else if (!ZB_EXTPANID_IS_ZERO(ZB_AIB().aps_use_extended_pan_id)) { /* try to rejoin */ zb_buf_t *buf = zb_get_out_buf(); ret = zdo_initiate_rejoin(buf); } #endif else { /* ZR or ZC: discovery, then join */ zb_buf_t *buf = zb_get_out_buf(); zb_nlme_network_discovery_request_t *req = ZB_GET_BUF_PARAM(buf, zb_nlme_network_discovery_request_t); #ifdef ZB_ROUTER_ROLE if (ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_NONE) { ZB_NIB_DEVICE_TYPE() = ZB_NWK_DEVICE_TYPE_ROUTER; } #endif req->scan_channels = ZB_AIB().aps_channel_mask; req->scan_duration = ZB_DEFAULT_SCAN_DURATION; /* TODO: configure it somehow? */ TRACE_MSG(TRACE_APS1, "disc, then join by assoc", (FMT__0)); ZDO_CTX().zdo_ctx.discovery_ctx.disc_count = ZDO_CTX().conf_attr.nwk_scan_attempts; ret = ZB_SCHEDULE_CALLBACK(zb_nlme_network_discovery_request, ZB_REF_FROM_BUF(buf)); } } else { #ifdef ZB_USE_NVRAM zb_buf_t *buf = zb_get_out_buf(); buf->u.hdr.status = ZB_NWK_STATUS_ALREADY_PRESENT; ZB_SCHEDULE_CALLBACK(zb_zdo_startup_complete, ZB_REF_FROM_BUF(buf)); #endif TRACE_MSG(TRACE_APS1, "already in nw", (FMT__0)); /* TODO: verify that we have right channel ID and, maybe, do active scan to * find the channel. */ } //temp trace TRACE_MSG(TRACE_APS1, "return now", (FMT__0)); return ret; } #ifndef ZB_LIMITED_FEATURES2 zb_ret_t zdo_initiate_rejoin(zb_buf_t *buf) { zb_nlme_join_request_t *req = ZB_GET_BUF_PARAM(buf, zb_nlme_join_request_t); TRACE_MSG(TRACE_APS1, ">>zdo_initiate_rejoin ", (FMT__0)); ZB_BZERO(req, sizeof(*req)); /* all defaults to 0 */ ZG->zdo.handle.started = 0; ZB_EXTPANID_COPY(req->extended_pan_id, ZB_AIB().aps_use_extended_pan_id); #ifdef ZB_ROUTER_ROLE if (ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_NONE) { ZB_NIB_DEVICE_TYPE() = ZB_NWK_DEVICE_TYPE_ROUTER; ZB_MAC_CAP_SET_ROUTER_CAPS(req->capability_information); /* join as ZR */ TRACE_MSG(TRACE_APS1, "Rejoin to pan " TRACE_FORMAT_64 " as ZR", (FMT__A, TRACE_ARG_64(ZB_AIB().aps_use_extended_pan_id))); } else #endif { TRACE_MSG(TRACE_APS1, "Rejoin to pan " TRACE_FORMAT_64 " as ZE", (FMT__A, TRACE_ARG_64(ZB_AIB().aps_use_extended_pan_id))); if (MAC_PIB().mac_rx_on_when_idle) { ZB_MAC_CAP_SET_RX_ON_WHEN_IDLE(req->capability_information, 1); } } /* if join as ZE - all cap to 0 (set by memset) */ ZB_MAC_CAP_SET_ALLOCATE_ADDRESS(req->capability_information, 1); req->rejoin_network = ZB_NLME_REJOIN_METHOD_REJOIN; req->scan_channels = ZB_AIB().aps_channel_mask; req->scan_duration = ZB_DEFAULT_SCAN_DURATION; /* TODO: configure it somehow? */ ZG->zdo.handle.rejoin = 1; ZG->nwk.handle.joined = 0; #ifndef ZB_NS_BUILD //ZB_CLEAR_SHORT_ADDR(); /* to prevent from receiving packets during rejoin */ #endif TRACE_MSG(TRACE_APS1, "<<zdo_initiate_rejoin ", (FMT__0)); return ZB_SCHEDULE_CALLBACK(zb_nlme_join_request, ZB_REF_FROM_BUF(buf)); } #endif #ifdef ZB_COORDINATOR_ROLE void zb_nlme_network_formation_confirm(zb_uint8_t param) { zb_nlme_permit_joining_request_t *request = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_permit_joining_request_t); TRACE_MSG(TRACE_NWK1, "formation conf st %hd", (FMT__H, ((zb_buf_t *)ZB_BUF_FROM_REF(param))->u.hdr.status)); #if defined ZB_SECURITY secur_tc_init(); #endif if (ZG->nwk.nib.max_children > 0) { request->permit_duration = ZDO_CTX().conf_attr.permit_join_duration; } else { request->permit_duration = 0; } ZB_SCHEDULE_CALLBACK(zb_nlme_permit_joining_request, param); } void zb_nlme_permit_joining_confirm(zb_uint8_t param) { zb_address_ieee_ref_t addr_ref; TRACE_MSG(TRACE_NWK1, "permit j conf st %hd", (FMT__H, ((zb_buf_t *)ZB_BUF_FROM_REF(param))->u.hdr.status)); ZB_BUF_FROM_REF(param)->u.hdr.status = 0; zb_address_by_short(ZB_PIB_SHORT_ADDRESS(), ZB_TRUE, ZB_FALSE, &addr_ref); ZB_SCHEDULE_CALLBACK(zb_zdo_startup_complete, param); } #endif /* ZB_COORDINATOR_ROLE */ void zb_nlme_join_indication(zb_uint8_t param) { #ifdef ZB_TRACE_LEVEL zb_nlme_join_indication_t *ind = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_join_indication_t); TRACE_MSG(TRACE_NWK1, "JOINED (st %hd) dev 0x%x/" TRACE_FORMAT_64 " cap: dev type %hd rx.w.i. %hd rejoin %hd secur %hd", (FMT__H_D_A_H_H_H_H, (zb_uint8_t)(((zb_buf_t *)ZB_BUF_FROM_REF(param))->u.hdr.status), (zb_uint16_t)ind->network_address, TRACE_ARG_64(ind->extended_address), ZB_MAC_CAP_GET_DEVICE_TYPE(ind->capability_information), ZB_MAC_CAP_GET_RX_ON_WHEN_IDLE(ind->capability_information), ind->rejoin_network, ind->secure_rejoin, ZB_NIB_SECURITY_LEVEL())); #endif #if defined ZB_SECURITY && defined ZB_COORDINATOR_ROLE if (ZG->nwk.nib.security_level != 0) { /* Authenticate device: send network key to it */ ZB_SCHEDULE_CALLBACK(secur_authenticate_child, param); } else #endif { zb_free_buf(ZB_BUF_FROM_REF(param)); } } void zb_nlme_network_discovery_confirm(zb_uint8_t param) { zb_nlme_network_discovery_confirm_t *cnf; zb_nlme_network_descriptor_t *dsc; zb_ushort_t i; zb_nlme_join_request_t *req; TRACE_MSG(TRACE_NWK1, "disc st %hd", (FMT__H, ((zb_buf_t *)ZB_BUF_FROM_REF(param))->u.hdr.status)); cnf = (zb_nlme_network_discovery_confirm_t *)ZB_BUF_BEGIN((zb_buf_t *)ZB_BUF_FROM_REF(param)); dsc = (zb_nlme_network_descriptor_t *)(cnf + 1); TRACE_MSG(TRACE_NWK1, "Disc res: st %hd, nw_cnt %hd", (FMT__H_H, (int)cnf->status, (int)cnf->network_count)); #ifdef ZB_TRACE_LEVEL for (i = 0 ; i < cnf->network_count ; ++i) { TRACE_MSG(TRACE_NWK1, "net %hd: xpanid " TRACE_FORMAT_64 ", ch %hd, s.prof %hd, zb v %hd, beacon_ord %hd, superf_ord %hd, permit_j %hd, rtr_cap %hd, ed_cap %hd", (FMT__H_A_H_H_H_H_H_H_H_H, i, TRACE_ARG_64(dsc->extended_pan_id), (int)dsc->logical_channel, (int)dsc->stack_profile, (int)dsc->zigbee_version, (int)dsc->beacon_order, (int)dsc->superframe_order, (int)dsc->permit_joining, (int)dsc->router_capacity, (int)dsc->end_device_capacity)); dsc++; } #endif req = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_join_request_t); dsc = (zb_nlme_network_descriptor_t *)(cnf + 1); /* Now join thru Association */ for (i = 0 ; i < cnf->network_count ; ++i) { if (ZB_EXTPANID_IS_ZERO(ZB_AIB().aps_use_extended_pan_id) || ZB_EXTPANID_CMP(dsc->extended_pan_id, ZB_AIB().aps_use_extended_pan_id)) { /* Now join to the first network or network with desired ext pan id. TODO: find best pan to join to. */ ZB_BZERO(req, sizeof(*req)); /* all defaults to 0 */ ZB_EXTPANID_COPY(req->extended_pan_id, dsc->extended_pan_id); #ifdef ZB_ROUTER_ROLE /* joined_pro, here's one of the key moments */ if ((ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_NONE || ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_ROUTER) #ifdef ZB_PRO_COMPATIBLE &&(dsc->stack_profile == 1) #endif ) { ZB_MAC_CAP_SET_ROUTER_CAPS(req->capability_information); /* join as router */ } else #endif { if (MAC_PIB().mac_rx_on_when_idle) { ZB_MAC_CAP_SET_RX_ON_WHEN_IDLE(req->capability_information, 1); } } ZB_MAC_CAP_SET_ALLOCATE_ADDRESS(req->capability_information, 1); ZB_SCHEDULE_CALLBACK(zb_nlme_join_request, param); break; } } /* for */ if (i == cnf->network_count) { TRACE_MSG(TRACE_APS1, "Can't find PAN to join to!", (FMT__0)); /* Indicate startup failure */ ZB_BUF_FROM_REF(param)->u.hdr.status = ZB_NWK_STATUS_NOT_PERMITTED; ZB_SCHEDULE_CALLBACK(zb_zdo_startup_complete, param); } } void zdo_join_done(zb_uint8_t param) { TRACE_MSG(TRACE_NWK1, ">>join_done %hd", (FMT__H, param)); /* Not sure this is right, but let's send annonce after authentication complete */ #ifdef ZB_SECURITY if (ZG->nwk.nib.security_level != 0) { zb_free_buf(ZB_BUF_FROM_REF(param)); } else #endif { ZB_SCHEDULE_CALLBACK(zdo_send_device_annce, param); } /* clear poll retry count */ ZDO_CTX().parent_threshold_retry = 0; TRACE_MSG(TRACE_NWK1, "mac_rx_on_when_idle %hd", (FMT__H, MAC_PIB().mac_rx_on_when_idle)); /* Start polling function */ TRACE_MSG(TRACE_COMMON1, "Join done, scheduling poll request with appropriate tmout", (FMT__0)); zb_zdo_reschedule_poll_parent(ZG->zdo.conf_attr.nwk_indirect_poll_rate); ZB_P3_ON(); TRACE_MSG(TRACE_NWK1, "<<join_done", (FMT__0)); } #ifndef ZB_ED_ROLE void zb_nlme_start_router_confirm(zb_uint8_t param) ZB_CALLBACK { TRACE_MSG(TRACE_NWK1, ">> start_router_confirm", (FMT__0)); ZB_SCHEDULE_CALLBACK(zdo_join_done, param); TRACE_MSG(TRACE_NWK1, "<< start_router_confirm", (FMT__0)); } #endif void zb_nlme_join_confirm(zb_uint8_t param) { zb_nlme_join_confirm_t *confirm = ZB_GET_BUF_PARAM((zb_buf_t *)ZB_BUF_FROM_REF(param), zb_nlme_join_confirm_t); TRACE_MSG(TRACE_NWK1, ">>nlme_join_conf %hd", (FMT__H, param)); if (confirm->status == 0) { TRACE_MSG(TRACE_COMMON1, "CONGRATULATIONS! joined status %hd, addr %d, xpanid " TRACE_FORMAT_64 ", ch %hd, addr 0x%x", (FMT__H_D_A_H_D, confirm->status, confirm->network_address, TRACE_ARG_64(confirm->extended_pan_id), confirm->active_channel, ZB_PIB_SHORT_ADDRESS())); ZG->zdo.handle.started = 0; #ifdef ZB_ROUTER_ROLE if ( ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_ROUTER ) { zb_nlme_start_router_request_t *request; ZB_BUF_REUSE((zb_buf_t *)ZB_BUF_FROM_REF(param)); request = ZB_GET_BUF_PARAM((zb_buf_t *)ZB_BUF_FROM_REF(param), zb_nlme_start_router_request_t); request->beacon_order = ZB_TURN_OFF_ORDER; request->superframe_order = ZB_TURN_OFF_ORDER; request->battery_life_extension = 0; ZB_SCHEDULE_CALLBACK(zb_nlme_start_router_request, param); } else #endif { ZB_SCHEDULE_CALLBACK(zdo_join_done, param); } } else if (ZG->zdo.handle.rejoin && ZB_AIB().aps_insecure_join) { zb_nlme_network_discovery_request_t *req = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_network_discovery_request_t); TRACE_MSG(TRACE_ZDO1, "rejoin failed st %hd - try assoc", (FMT__H, (int)confirm->status)); ZG->zdo.handle.rejoin = 0; #ifdef ZB_ROUTER_ROLE if (ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_NONE) { ZB_NIB_DEVICE_TYPE() = ZB_NWK_DEVICE_TYPE_ROUTER; } else #endif { ZB_NIB_DEVICE_TYPE() = ZB_NWK_DEVICE_TYPE_ED; } req->scan_channels = ZB_AIB().aps_channel_mask; req->scan_duration = ZB_DEFAULT_SCAN_DURATION; /* TODO: configure it somehow? */ ZDO_CTX().zdo_ctx.discovery_ctx.disc_count = ZDO_CTX().conf_attr.nwk_scan_attempts; ZB_SCHEDULE_CALLBACK(zb_nlme_network_discovery_request, param); } else { TRACE_MSG(TRACE_ZDO1, "assoc j failed st %hd", (FMT__H, (int)confirm->status)); ZB_BUF_FROM_REF(param)->u.hdr.status = ZB_NWK_STATUS_NOT_PERMITTED; ZB_SCHEDULE_CALLBACK(zb_zdo_startup_complete, param); } TRACE_MSG(TRACE_NWK1, "<<nlme_join_conf", (FMT__0)); } void zdo_send_device_annce(zb_uint8_t param) { TRACE_MSG(TRACE_ZDO1, "device_annce", (FMT__0)); { zb_zdo_device_annce_t *da; ZB_BUF_INITIAL_ALLOC(ZB_BUF_FROM_REF(param), sizeof(*da), da); ZDO_CTX().tsn++; da->tsn = ZDO_CTX().tsn; ZB_HTOLE16(&da->nwk_addr, &ZB_PIB_SHORT_ADDRESS()); ZB_IEEE_ADDR_COPY(da->ieee_addr, ZB_PIB_EXTENDED_ADDRESS()); da->capability = 0; #ifdef ZB_ROUTER_ROLE if (ZB_NIB_DEVICE_TYPE() == ZB_NWK_DEVICE_TYPE_ROUTER) { ZB_MAC_CAP_SET_ROUTER_CAPS(da->capability); /* ZB_MAC_CAP_SET_SECURITY means high security mode - never set it */ } else #endif { if (MAC_PIB().mac_rx_on_when_idle) { ZB_MAC_CAP_SET_RX_ON_WHEN_IDLE(da->capability, 1); } } } { zb_apsde_data_req_t *dreq = ZB_GET_BUF_TAIL(ZB_BUF_FROM_REF(param), sizeof(zb_apsde_data_req_t)); ZB_BZERO(dreq, sizeof(*dreq)); /* Broadcast to all devices for which macRxOnWhenIdle = TRUE. MAC layer in ZE sends unicast to its parent. */ dreq->dst_addr.addr_short = ZB_NWK_BROADCAST_RX_ON_WHEN_IDLE; dreq->addr_mode = ZB_APS_ADDR_MODE_16_ENDP_PRESENT; /* use default radius, max_depth * 2 */ dreq->clusterid = ZDO_DEVICE_ANNCE_CLID; ZG->zdo.handle.dev_annce = param; } ZB_SCHEDULE_CALLBACK(zb_apsde_data_request, param); #if 0 zb_zdo_device_annce(param); #endif } #if 0 /** Device_annce is special primitive: no reply to it. It is part of Discovery primitives section but, indeed, it is very special. Directly fill APS packet here. See 2.4.3.1.11 */ void zb_zdo_device_annce(zb_uint8_t param) ZB_SDCC_REENTRANT { { zb_uint8_t *tsn_p; ZB_BUF_ALLOC_LEFT(ZB_BUF_FROM_REF(param), 1, tsn_p); ZDO_CTX().tsn++; *tsn_p = ZDO_CTX().tsn; } { zb_apsde_data_req_t *dreq = ZB_GET_BUF_TAIL(ZB_BUF_FROM_REF(param), sizeof(zb_apsde_data_req_t)); TRACE_MSG(TRACE_ZDO1, "device_annce", (FMT__0)); ZB_BZERO(dreq, sizeof(*dreq)); /* Broadcast to all devices for which macRxOnWhenIdle = TRUE. MAC layer in ZE sends unicast to its parent. */ dreq->dst_addr.addr_short = ZB_NWK_BROADCAST_RX_ON_WHEN_IDLE; dreq->addr_mode = ZB_APS_ADDR_MODE_16_ENDP_PRESENT; /* use default radius, max_depth * 2 */ dreq->clusterid = ZDO_DEVICE_ANNCE_CLID; ZG->zdo.handle.dev_annce = param; } ZB_SCHEDULE_CALLBACK(zb_apsde_data_request, param); } #endif void zb_apsde_data_confirm(zb_uint8_t param) { zb_buf_t *buf = (zb_buf_t *)ZB_BUF_FROM_REF(param); zb_address_ieee_ref_t addr_ref; TRACE_MSG(TRACE_APS3, "apsde_data_conf: param %hd status %hd dev_annce %hd key_sw %hd", (FMT__H_H_H_H, param, buf->u.hdr.status, ZG->zdo.handle.dev_annce, ZG->zdo.handle.key_sw)); if (ZG->zdo.handle.dev_annce == param) { zb_uint8_t status = buf->u.hdr.status; ZB_BUF_REUSE(buf); ZG->zdo.handle.dev_annce = 0; /* Indicate startup complete */ buf->u.hdr.status = status; zb_address_by_short(ZB_PIB_SHORT_ADDRESS(), ZB_TRUE, ZB_TRUE, &addr_ref); TRACE_MSG(TRACE_ZDO1, "was device_annce, start compl, st %hd", (FMT__H, buf->u.hdr.status)); if ( !ZG->zdo.handle.started ) { ZG->zdo.handle.started = 1; ZB_SCHEDULE_CALLBACK(zb_zdo_startup_complete, param); } } #if defined ZB_SECURITY && defined ZB_COORDINATOR_ROLE else if (ZG->zdo.handle.key_sw == param) { TRACE_MSG(TRACE_SECUR3, "switch nwk key after this frame sent", (FMT__0)); ZG->zdo.handle.key_sw = 0; secur_nwk_key_switch(ZG->nwk.nib.active_key_seq_number + 1); } #endif #ifndef ZB_LIMITED_FEATURES else if (!ZG->nwk.leave_context.leave_after_mgmt_leave_rsp_conf || !zdo_try_mgmt_leave_complete(param)) { /* RET_OK and RET_NO_ACK statuses mean that confirm was * called from aps_ack_check_handle() */ if (buf->u.hdr.status == 0 || buf->u.hdr.status == (zb_uint8_t)RET_NO_ACK) { TRACE_MSG(TRACE_ZDO1, "buffer status %hd - call zb_apsde_data_acknowledged", (FMT__H, buf->u.hdr.status)); ZB_SCHEDULE_CALLBACK(zb_apsde_data_acknowledged, param); } else { TRACE_MSG(TRACE_ZDO1, "buffer status %hd - free buf", (FMT__H, buf->u.hdr.status)); zb_free_buf(buf); } #ifdef APS_RETRANSMIT_TEST send_data(); #endif } #endif } #ifdef APS_RETRANSMIT_TEST static void send_data() { zb_buf_t *buf = NULL; zb_apsde_data_req_t req; zb_uint8_t *ptr = NULL; zb_short_t i; buf = zb_get_out_buf(); req.dst_addr.addr_short = 0; /* send to ZC */ req.addr_mode = ZB_APS_ADDR_MODE_16_ENDP_PRESENT; req.tx_options = ZB_APSDE_TX_OPT_ACK_TX; req.radius = 1; req.profileid = 2; req.src_endpoint = 10; req.dst_endpoint = 10; buf->u.hdr.handle = 0x11; ZB_BUF_INITIAL_ALLOC(buf, 80, ptr); for (i = 0 ; i < ZB_TEST_DATA_SIZE ; ++i) { ptr[i] = i % 32 + '0'; } ZB_MEMCPY( ZB_GET_BUF_TAIL(buf, sizeof(req)), &req, sizeof(req)); TRACE_MSG(TRACE_APS3, "Sending apsde_data.request", (FMT__0)); ZB_SCHEDULE_CALLBACK(zb_apsde_data_request, ZB_REF_FROM_BUF(buf)); } #endif void zb_nlme_sync_confirm(zb_uint8_t param) { zb_bool_t sched_poll = (zb_bool_t)ZG->nwk.handle.joined; TRACE_MSG(TRACE_NWK1, ">>zb_nlme_sync_confirm %hd", (FMT__H, param)); ZDO_CTX().inside_poll = 0; TRACE_MSG(TRACE_NWK1, "status %hd", (FMT__H, ZB_BUF_FROM_REF(param)->u.hdr.status)); #ifndef ZB_LIMITED_FEATURES if ( ZB_BUF_FROM_REF(param)->u.hdr.status == MAC_SUCCESS || ZB_BUF_FROM_REF(param)->u.hdr.status == MAC_NO_DATA ) { /* nothing to do */ } else { ZDO_CTX().parent_threshold_retry++; if ( ZDO_CTX().parent_threshold_retry >= ZDO_CTX().max_parent_threshold_retry ) { sched_poll = ZB_FALSE; /* rejoin to current pan */ ZB_EXTPANID_COPY(ZB_AIB().aps_use_extended_pan_id, ZB_NIB_EXT_PAN_ID()); /* rejoin */ zdo_initiate_rejoin(ZB_BUF_FROM_REF(param)); /* prevent buffer from being free */ param = 0; } } #endif if (sched_poll) { TRACE_MSG(TRACE_NWK1, "schedule poll if needed rx_on_when_idle %hd", (FMT__H, MAC_PIB().mac_rx_on_when_idle)); /* Start polling function if necessary */ zb_zdo_reschedule_poll_parent(ZG->zdo.conf_attr.nwk_indirect_poll_rate); } if ( param ) { zb_free_buf(ZB_BUF_FROM_REF(param)); } TRACE_MSG(TRACE_NWK1, "<<zb_nlme_sync_confirm", (FMT__0)); } void zb_zdo_reschedule_poll_parent(zb_uint16_t timeout) { /* reschedule alarm only if we are not waiting for the poll confirm and * really need polls */ if (!(MAC_PIB().mac_rx_on_when_idle || ZDO_CTX().inside_poll)) { ZB_SCHEDULE_ALARM_CANCEL(zb_zdo_poll_parent, 0); /* If FFD have some pending data for us, we schedule poll w/o timeout */ ZB_SCHEDULE_ALARM(zb_zdo_poll_parent, 0, ZB_MAC_GET_PENDING_DATA()? 1 : timeout); } } void zb_zdo_poll_parent(zb_uint8_t param) { TRACE_MSG(TRACE_NWK1, ">>poll_prnt %hd", (FMT__H, param)); if ( !param ) { if (ZG->nwk.handle.joined) { ZB_GET_OUT_BUF_DELAYED(zb_zdo_poll_parent); } } else { zb_nlme_sync_request_t *request = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_sync_request_t); request->track = ZB_FALSE; ZDO_CTX().inside_poll = 1; ZB_SCHEDULE_CALLBACK(zb_nlme_sync_request, param); } TRACE_MSG(TRACE_NWK1, "<<poll_prnt", (FMT__0)); } /* 172.16.17.32 NLME-NWK-STATUS.indication */ void zb_nlme_status_indication(zb_uint8_t param) { zb_buf_t *buf = (zb_buf_t *)ZB_BUF_FROM_REF(param); zb_nlme_status_indication_t *status = ZB_GET_BUF_PARAM(buf, zb_nlme_status_indication_t); TRACE_MSG(TRACE_NWK1, ">>zb_nlme_status_indication %hd", (FMT__H, param)); TRACE_MSG(TRACE_NWK1, "Got nwk status indication: status %hd address %d", (FMT__H_D, status->status, status->network_addr)); #ifdef ZB_ED_ROLE if ( status->status == ZB_NWK_COMMAND_STATUS_PARENT_LINK_FAILURE ) { ZDO_CTX().parent_link_failure++; } TRACE_MSG(TRACE_NWK1, "parent link failure %hd", (FMT__H, ZDO_CTX().parent_link_failure)); #ifndef ZB_LIMITED_FEATURES if ( ZDO_CTX().parent_link_failure >= ZB_ZDO_PARENT_LINK_FAILURE_CNT /* ED must rejoin at first failed unsecure: that is key miss, probably */ #ifndef ZB_DISABLE_REJOIN_AFTER_SEC_FAIL || ZB_NWK_COMMAND_STATUS_IS_SECURE(status->status) #endif ) { ZB_MAC_CLEAR_CHANNEL_ERROR_TEST(); ZDO_CTX().parent_link_failure = 0; /* rejoin to current pan */ ZB_EXTPANID_COPY(ZB_AIB().aps_use_extended_pan_id, ZB_NIB_EXT_PAN_ID()); zdo_initiate_rejoin(buf); } else #endif { zb_free_buf(buf); } #else /* ZR/ZC*/ #ifdef ZB_SECURITY #ifndef ZB_DISABLE_REJOIN_AFTER_SEC_FAIL if (ZB_NWK_COMMAND_STATUS_IS_SECURE(status->status)) { zb_neighbor_tbl_ent_t *nbe; if (zb_nwk_neighbor_get_by_short(status->network_addr, &nbe) == RET_OK) { TRACE_MSG(TRACE_SECUR3, "nwk status %hd addr %d relationship %hd", (FMT__H_D_H, status->status, status->network_addr, nbe->relationship)); if (nbe->relationship == ZB_NWK_RELATIONSHIP_UNAUTHENTICATED_CHILD) { TRACE_MSG(TRACE_SECUR3, "Child %d security error - force its leave", (FMT__D, status->network_addr)); zb_zdo_force_child_leave(param, status->network_addr); param = 0; } else if (nbe->relationship == ZB_NWK_RELATIONSHIP_PARENT) { TRACE_MSG(TRACE_SECUR3, "Security error with my parent - rejoin", (FMT__0)); /* rejoin to current pan */ ZB_EXTPANID_COPY(ZB_AIB().aps_use_extended_pan_id, ZB_NIB_EXT_PAN_ID()); ZB_NIB_DEVICE_TYPE() = ZB_NWK_DEVICE_TYPE_NONE; secur_clear_preconfigured_key(); ZG->aps.authenticated = 0; zdo_initiate_rejoin(buf); param = 0; } /* Don't care about security errors not from my child or parent */ } } #endif if (param) #endif /* ZB_SECURITY */ { zb_free_buf(buf); } #endif /* role */ TRACE_MSG(TRACE_NWK1, "<<zb_nlme_status_indication", (FMT__0)); } #ifdef ZB_ROUTER_ROLE void zb_zdo_force_child_leave(zb_uint8_t param, zb_uint16_t child_addr) { zb_buf_t *buf = ZB_BUF_FROM_REF(param); zb_nlme_leave_request_t *lr = NULL; zb_address_ieee_ref_t addr_ref; zb_ret_t ret = RET_OK; TRACE_MSG(TRACE_NWK1, ">>zb_zdo_force_child_leave", (FMT__0)); lr = ZB_GET_BUF_PARAM(buf, zb_nlme_leave_request_t); ret = zb_address_by_short(child_addr, ZB_FALSE, ZB_FALSE, &addr_ref); if (ret == RET_OK) { zb_ieee_addr_t ieee_addr; zb_address_ieee_by_ref(ieee_addr, addr_ref); ZB_MEMCPY(lr->device_address, ieee_addr, sizeof(zb_ieee_addr_t)); lr->remove_children = ZB_FALSE; lr->rejoin = ZB_TRUE; ZB_SCHEDULE_CALLBACK(zb_nlme_leave_request, param); } TRACE_MSG(TRACE_NWK1, "<<zb_zdo_force_child_leave status %d", (FMT__D, ret)); } #endif /* ZB_ROUTER_ROLE */ #ifndef ZB_LIMITED_FEATURES void zb_nlme_reset_confirm(zb_uint8_t param) { zb_buf_t *buf = (zb_buf_t *)ZB_BUF_FROM_REF(param); TRACE_MSG(TRACE_NWK1, ">>zb_nlme_reset_confirm %p", (FMT__P, buf)); if ( ZDO_CTX().reset_confirm_cb ) { ZB_SCHEDULE_CALLBACK(ZDO_CTX().reset_confirm_cb, param); ZDO_CTX().reset_confirm_cb = NULL; } else { TRACE_MSG(TRACE_NWK1, "Reset confirm callback is not set", (FMT__0)); zb_free_buf(buf); } TRACE_MSG(TRACE_NWK1, "<<zb_nlme_reset_confirm", (FMT__0)); } void zb_zdo_reset(zb_uint8_t param, zb_uint8_t warm_start, zb_callback_t cb) { zb_buf_t *buf = (zb_buf_t *)ZB_BUF_FROM_REF(param); zb_nlme_reset_request_t *request = ZB_GET_BUF_PARAM(buf, zb_nlme_reset_request_t); ZDO_CTX().reset_confirm_cb = cb; /* schedule reset request */ request->warm_start = warm_start; ZB_SCHEDULE_CALLBACK(zb_nlme_reset_request, param); } /** NLME-LEAVE.confirm primitive Called when device got LEAVE command from the net. It can be request for us to leave or intication that other device has left. */ void zb_nlme_leave_indication(zb_uint8_t param) { zb_nlme_leave_indication_t *request = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_leave_indication_t); if (ZB_IEEE_ADDR_IS_ZERO(request->device_address)) { /* it is for us */ TRACE_MSG(TRACE_ZDO2, "do leave", (FMT__0)); zb_nwk_do_leave(param, request->rejoin); } else { zb_neighbor_tbl_ent_t *nbt; if (zb_nwk_neighbor_get_by_ieee(request->device_address, &nbt) == RET_OK) { #ifdef ZB_SECURITY #ifdef ZB_ROUTER_ROLE if (!ZG->nwk.handle.is_tc && (nbt->relationship == ZB_NWK_RELATIONSHIP_CHILD || nbt->relationship == ZB_NWK_RELATIONSHIP_UNAUTHENTICATED_CHILD)) { /* My child has left, I must inform TC */ zb_apsme_update_device_req_t *req = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_apsme_update_device_req_t); TRACE_MSG(TRACE_SECUR3, "sending update-device.request (device left) to TC", (FMT__0)); req->status = ZB_DEVICE_LEFT; ZB_IEEE_ADDR_COPY(req->dest_address, ZB_AIB().trust_center_address); zb_address_short_by_ref(&req->device_short_address, nbt->addr_ref); zb_address_ieee_by_ref(req->device_address, nbt->addr_ref); ZB_SCHEDULE_CALLBACK(zb_apsme_update_device_request, param); param = 0; } #endif #endif /* security */ /* forget this device */ TRACE_MSG(TRACE_ZDO2, "forget device by addr ref %hd", (FMT__H, nbt->addr_ref)); zb_nwk_forget_device(nbt->addr_ref); } if (param) { zb_free_buf(ZB_BUF_FROM_REF(param)); } } } /** NLME-LEAVE.confirm primitive Called when LEAVE initiated by LEAVE.REQUEST sent LEAVE command to net. */ void zb_nlme_leave_confirm(zb_uint8_t param) { zb_nlme_leave_confirm_t *lc = ZB_GET_BUF_PARAM(ZB_BUF_FROM_REF(param), zb_nlme_leave_confirm_t); zb_uint8_t will_leave = (!lc->status && ZB_IEEE_ADDR_IS_ZERO(lc->device_address)); zb_neighbor_tbl_ent_t *ent = NULL; TRACE_MSG(TRACE_ZDO2, "LEAVE.CONFIRM satus %hd will_leave %hd", (FMT__H_H, lc->status, will_leave)); #ifndef ZB_LIMITED_FEATURES if (!zdo_try_send_mgmt_leave_rsp(param, lc->status, will_leave)) { /* not need to send resp. Maybe, leave now. */ if (will_leave) { zb_nwk_do_leave(param, ZG->nwk.leave_context.rejoin_after_leave); } else { /* From 3.6.1.10.2 Method for a Device to Remove Its Child from the Network When device leaves newtwork we must clear the neighbor table. */ TRACE_MSG(TRACE_ZDO2, "removing device from nbt", (FMT__0)); zb_nwk_neighbor_get_by_ieee(lc->device_address, &ent); zb_nwk_neighbor_delete(ent->addr_ref); zb_free_buf(ZB_BUF_FROM_REF(param)); } } else { /* leave after mgmt resp will be sent */ ZG->nwk.leave_context.leave_after_mgmt_leave_rsp_conf = will_leave; } #endif } void zb_mlme_set_confirm(zb_uint8_t param) { zb_buf_t *buf = ZB_BUF_FROM_REF(param); #ifdef ZB_TRACE_LEVEL { zb_mlme_set_confirm_t *conf = ( zb_mlme_set_confirm_t *)ZB_BUF_BEGIN(buf); TRACE_MSG(TRACE_APS2, "<<zb_mlme_set_confirm status %hd", (FMT__H, conf->status)); } #endif zb_free_buf(buf); } #endif /* ZB_LIMITED_FEATURES */ /*! @} */
Spooner/pixel-table
pixel_table/sprites/sprite.py
from __future__ import absolute_import, division, print_function, unicode_literals from ..mixins.handles_events import HandlesEvents class Sprite(HandlesEvents): def __init__(self, x, y, width=1, height=1, color=(1.0, 1.0, 1.0)): self._x, self._y = x, y self._width, self._height = width, height self._color = color self._is_destroyed = False self.initialize_event_handlers() @property def int_position(self): return int(round(self._x)), int(round(self._y)) def collide_point(self, x, y): display_x, display_y = self.int_position return (display_x <= x < display_x + self._width) and (display_y <= y < display_y + self._height) @property def x(self): return self._x @property def y(self): return self._y @property def width(self): return self._width @property def height(self): return self._height def move_by(self, dx, dy, constrain=None): self._x += dx self._y += dy if constrain is not None: self._constrain(constrain) def move_to(self, dx, dy, constrain=None): self._x = dx self._y = dy if constrain is not None: self._constrain(constrain) def _constrain(self, rect): """Constrain the position of the sprite within a rectangular area @param rect tuple (x, y, width, height) """ self._x = min(max(self._x, rect[0]), rect[2] - self._width) self._y = min(max(self._y, rect[1]), rect[3] - self._height) @property def color(self): return self._color @property def rect(self): x, y = self.int_position return x, y, self._width, self._height def render(self, pixel_grid): pass def update(self, pixel_grid, dt): pass @classmethod def create(cls, *args, **kwargs): obj = cls(*args, **kwargs) cls.emit("create_object", obj) return obj def destroy(self): if not self._is_destroyed: self._is_destroyed = True self.emit("destroy_object", self) def __str__(self): x, y = self.int_position return "<{} ({}, {}) {}x{} {}>".format(type(self).__name__, x, y, self.width, self.height, self.color)
sungrade/sungrade_rails_toolkit
lib/sungrade_rails_toolkit/workflow_role.rb
<reponame>sungrade/sungrade_rails_toolkit require "json" require "sungrade_rails_toolkit/workflow_role/v0" module SungradeRailsToolkit module WorkflowRole class << self def v0 WorkflowRole::V0 end end end end
EddLabs/eddington-static
src/statue/runner.py
<reponame>EddLabs/eddington-static """Command map runner.""" import abc import asyncio import time from enum import Enum, auto from pathlib import Path from typing import List import tqdm from statue.command import Command from statue.commands_map import CommandsMap from statue.constants import BAR_FORMAT, MAIN_BAR_COLOR, SECONDARY_BAR_COLOR from statue.evaluation import Evaluation, SourceEvaluation class RunnerMode(Enum): """Enum indicating in which mode are we running evaluation.""" SYNC = auto() ASYNC = auto() DEFAULT_MODE = SYNC class EvaluationRunner: # pylint: disable=too-few-public-methods """Evaluation runner interface.""" @abc.abstractmethod def evaluate( self, commands_map: CommandsMap, ) -> Evaluation: """ Abstract evaluation method. # noqa: DAR202 :param commands_map: map from source file to list of commands to run on it :type commands_map: CommandsMap :return: Total evaluation after running all commands. :rtype: Evaluation """ class SynchronousEvaluationRunner( # pylint: disable=too-few-public-methods EvaluationRunner ): """Runner class for running commands synchronously.""" def evaluate( self, commands_map: CommandsMap, ) -> Evaluation: """ Run commands map and return evaluation report. :param commands_map: map from source file to list of commands to run on it :type commands_map: CommandsMap :return: Total evaluation after running all commands. :rtype: Evaluation """ evaluation = Evaluation() total_start_time = time.time() with tqdm.trange( commands_map.total_commands_count, bar_format=BAR_FORMAT, colour=MAIN_BAR_COLOR, ) as main_bar: for source, commands in commands_map.items(): source_start_time = time.time() evaluation[source] = SourceEvaluation() for command in tqdm.tqdm( commands, bar_format=BAR_FORMAT, colour=SECONDARY_BAR_COLOR, leave=False, desc=str(source), ): evaluation[source].append(command.execute(source)) main_bar.update(1) source_end_time = time.time() evaluation[source].source_execution_duration = ( source_end_time - source_start_time ) total_end_time = time.time() evaluation.total_execution_duration = total_end_time - total_start_time return evaluation class AsynchronousEvaluationRunner(EvaluationRunner): """Runner class for running commands asynchronously.""" def __init__(self): """Initialize runner.""" self.update_lock = asyncio.Lock() def evaluate( self, commands_map: CommandsMap, ) -> Evaluation: """ Run commands map asynchronously and return evaluation report. :param commands_map: map from source file to list of commands to run on it :type commands_map: CommandsMap :return: Total evaluation after running all commands. :rtype: Evaluation """ return asyncio.run(self.evaluate_commands_map(commands_map)) async def evaluate_commands_map( self, commands_map: CommandsMap, ): """ Main async function to run commands map and return evaluation report. :param commands_map: map from source file to list of commands to run on it :type commands_map: CommandsMap :return: Evaluation """ evaluation = Evaluation() start_time = time.time() max_source_name_length = max( [len(source.as_posix()) for source in commands_map.keys()] ) with tqdm.trange( commands_map.total_commands_count, bar_format=BAR_FORMAT, colour=MAIN_BAR_COLOR, ) as main_bar: coros = [ self.evaluate_source( source_bar_pos=pos, source=source, commands=commands, evaluation=evaluation, main_bar=main_bar, max_source_name_length=max_source_name_length, ) for pos, (source, commands) in enumerate(commands_map.items(), start=1) ] await asyncio.gather(*coros) end_time = time.time() evaluation.total_execution_duration = end_time - start_time return evaluation async def evaluate_source( # pylint: disable=too-many-arguments self, source: Path, commands: List[Command], evaluation: Evaluation, main_bar: tqdm.tqdm, source_bar_pos: int, max_source_name_length: int, ): """ Evaluate commands on source and return source evaluation report. :param source_bar_pos: Position of the source bar to print :type source_bar_pos: int :param source: Path of the desired source. :type source: Path :param commands: List of commands to run on the source. :type commands: List[Command] :param evaluation: Evaluation instance to be updated after commands are running. :type evaluation: Evaluation :param main_bar: progress bar that shows how far are we in evaluating the source :type main_bar: tqdm.tqdm :param max_source_name_length: Maximum source name length :type max_source_name_length: int """ evaluation[source] = SourceEvaluation() start_time = time.time() with tqdm.trange( len(commands), bar_format=BAR_FORMAT, position=source_bar_pos, leave=False, colour=SECONDARY_BAR_COLOR, desc=f"{source.as_posix():{max_source_name_length}}", ) as source_bar: coros = [ self.evaluate_command( command=command, source=source, evaluation=evaluation, source_bar=source_bar, main_bar=main_bar, ) for command in commands ] await asyncio.gather(*coros) end_time = time.time() evaluation[source].source_execution_duration = end_time - start_time await self.update_lock.acquire() self.update_lock.release() async def evaluate_command( # pylint: disable=too-many-arguments self, command: Command, source: Path, evaluation: Evaluation, source_bar: tqdm.tqdm, main_bar: tqdm.tqdm, ): """ Evaluate command on source and return command evaluation report. :param source: Path of the desired source. :type source: Path :param command: Command to run on the source. :type command: Command :param evaluation: Evaluation instance to be updated after commands are running. :type evaluation: Evaluation :param source_bar: tqdm progress bar to show the progress of evaluating this specific source. :type source_bar: tqdm.tqdm :param main_bar: tqdm progress bar to show total progress :type main_bar: tqdm.tqdm """ command_evaluation = await command.execute_async(source) await self.update_lock.acquire() evaluation[source].append(command_evaluation) source_bar.update(1) main_bar.update(1) self.update_lock.release() MODE_TO_RUNNER_DICT = { RunnerMode.SYNC.name: SynchronousEvaluationRunner, RunnerMode.ASYNC.name: AsynchronousEvaluationRunner, } def build_runner(runner_mode: str) -> EvaluationRunner: """ Build commands runner. :param runner_mode: Which mode should the runner work in :type runner_mode: str :return: Runner instance. :rtype: EvaluationRunner """ return MODE_TO_RUNNER_DICT[runner_mode]()
BillionaireDY/tui.grid
test/unit/js/common/i18n.spec.js
'use strict'; var i18n = require('common/i18n'); describe('i18n', function() { describe('setLanguage', function() { it('when setting the locale code that the grid has aleady, ' + 'the locale messages are set and changed.', function() { i18n.setLanguage('en'); expect(i18n.get('display.noData')).toBe('No data.'); i18n.setLanguage('ko'); expect(i18n.get('display.noData')).toBe('데이터가 존재하지 않습니다.'); }); it('when setting the locale code that the grid does not have, the error is thrown.', function() { function setLocaleCodeWithNoMessage() { i18n.setLanguage('fr'); } expect(setLocaleCodeWithNoMessage).toThrow(); }); it('when setting messages for the existing locale code, ' + 'the locale messages are changed.', function() { i18n.setLanguage('en', { display: { noData: 'empty' } }); expect(i18n.get('display.noData')).toBe('empty'); }); it('when setting messages for the new locale code, ' + 'the locale messages are set and changed.', function() { i18n.setLanguage('fr', { display: { noData: 'empty2' } }); expect(i18n.get('display.noData')).toBe('empty2'); }); }); });
Fusion-Rom/android_external_chromium_org
mojo/aura/context_factory_mojo.h
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef MOJO_AURA_CONTEXT_FACTORY_MOJO_H_ #define MOJO_AURA_CONTEXT_FACTORY_MOJO_H_ #include "ui/compositor/compositor.h" namespace mojo { class ContextFactoryMojo : public ui::ContextFactory { public: ContextFactoryMojo(); virtual ~ContextFactoryMojo(); private: // ContextFactory: virtual scoped_ptr<cc::OutputSurface> CreateOutputSurface( ui::Compositor* compositor, bool software_fallback) OVERRIDE; virtual scoped_refptr<ui::Reflector> CreateReflector( ui::Compositor* mirrored_compositor, ui::Layer* mirroring_layer) OVERRIDE; virtual void RemoveReflector(scoped_refptr<ui::Reflector> reflector) OVERRIDE; virtual scoped_refptr<cc::ContextProvider> SharedMainThreadContextProvider() OVERRIDE; virtual void RemoveCompositor(ui::Compositor* compositor) OVERRIDE; virtual bool DoesCreateTestContexts() OVERRIDE; virtual cc::SharedBitmapManager* GetSharedBitmapManager() OVERRIDE; virtual base::MessageLoopProxy* GetCompositorMessageLoop() OVERRIDE; scoped_ptr<cc::SharedBitmapManager> shared_bitmap_manager_; DISALLOW_COPY_AND_ASSIGN(ContextFactoryMojo); }; } // namespace mojo #endif // MOJO_AURA_CONTEXT_FACTORY_MOJO_H_
edwinvautier/go-cli
cmd/create.go
package cmd /* Copyright © 2021 <NAME> <<EMAIL>> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import ( "os" "os/signal" "syscall" "github.com/edwinvautier/go-gadgeto/config" "github.com/edwinvautier/go-gadgeto/services/createCommand" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) // createCmd represents the create command var createCmd = &cobra.Command{ Use: "create", Short: "This command is used to initialize a new application.", Long: `This command is used to initialize a new application.`, Run: func(cmd *cobra.Command, args []string) { commandConfig := config.CreateCmdConfig{ Args: args, } config.InitCreateCmdConfig(&commandConfig) if err := createCommand.InitProject(&commandConfig); err != nil { log.Error("project initialization failed: ", err) } }, } func init() { rootCmd.AddCommand(createCmd) c := make(chan os.Signal) signal.Notify(c, os.Interrupt, syscall.SIGINT, syscall.SIGTERM) go func() { <-c log.Info("Exiting...") os.Exit(1) }() }
SarveshPwC/VKYCD
node_modules/rxjs-compat/_esm5/operator/buffer.js
<filename>node_modules/rxjs-compat/_esm5/operator/buffer.js import { buffer as higherOrder } from 'rxjs/operators'; export function buffer(closingNotifier) { return higherOrder(closingNotifier)(this); } //# sourceMappingURL=buffer.js.map
flics04/XXXASYBT_CppBase
Chapter4/Practice/1063.cpp
#include <iostream> using namespace std; int main() { int n, t, maxn, minn, max_kdz; cin >> n >> t; maxn = t; minn = t; for (int i = 2; i <= n; i++) { cin >> t; if (t > maxn) maxn = t; else if (t < minn) minn = t; } max_kdz = maxn - minn; cout << max_kdz << endl; return 0; }
mskiitd/MAS-GUI
AgentProxy/src/mas/machineproxy/gui/custompanels/AttributeInputPanel.java
package mas.machineproxy.gui.custompanels; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.SwingUtilities; import mas.jobproxy.JobGNGattribute; import mas.util.formatter.stringformatter.FormattedStringField; import uiconstants.Labels; public class AttributeInputPanel extends JPanel { private static final long serialVersionUID = 1L; private JLabel lblTitleHeading; private FormattedStringField txtName; private boolean status = true; public AttributeInputPanel(JobGNGattribute attribute) { lblTitleHeading = new JLabel(" Attribute Name "); txtName = new FormattedStringField(); txtName.setColumns(Labels.defaultJTextSize); add(lblTitleHeading); add(txtName); if(attribute != null) { txtName.setText(attribute.getName()); } } public AttributeInputPanel() { this(null); } public boolean isDataOk() { checkData(); return status; } private void checkData() { if(txtName.getText().isEmpty()) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { JOptionPane.showMessageDialog(AttributeInputPanel.this, "Invalid input for attribute Name !!", "Error" , JOptionPane.ERROR_MESSAGE ); } }); status = false; } else { status = true; } } public JobGNGattribute getAttribute() { JobGNGattribute att = new JobGNGattribute(); checkData(); if(status) { att.setName(txtName.getText()); return att; } return null; } }
kagemeka/atcoder-submissions
jp.atcoder/abc030/abc030_b/11017314.js
<reponame>kagemeka/atcoder-submissions "use strict"; const fs = require('fs'); function int(n) { return parseInt(n, 10); } function main(input) { var n, m; input = input.trim().split(' ').map(n => int(n)) ; n = input[0]; m = input[1]; var a, b; b = m * 6; n = n % 12; a = 30 * (n + m / 60); var d = Math.abs(b - a); var ans = Math.min(360 - d, d); console.log(ans); } var input = fs.readFileSync('/dev/stdin', 'utf8'); main(input);
icred/base
model/src/model-generated/eu/icred/model/node/entity/Account.java
<filename>model/src/model-generated/eu/icred/model/node/entity/Account.java package eu.icred.model.node.entity; import eu.icred.model.annotation.ChildList; import eu.icred.model.annotation.DataField; import eu.icred.model.annotation.Node; import eu.icred.model.datatype.Amount; import eu.icred.model.datatype.enumeration.AccountingStandard; import eu.icred.model.node.entity.AbstractEntityNode; import java.lang.String; import java.util.Map; @Node public class Account extends AbstractEntityNode { @DataField protected AccountingStandard accountingStandard; public AccountingStandard getAccountingStandard() { return this.accountingStandard; } public void setAccountingStandard(AccountingStandard accountingStandard) { this.accountingStandard = accountingStandard; } @DataField protected String balancePosition; public String getBalancePosition() { return this.balancePosition; } public void setBalancePosition(String balancePosition) { this.balancePosition = balancePosition; } @DataField protected Amount value; public Amount getValue() { return this.value; } public void setValue(Amount value) { this.value = value; } @ChildList protected Map<String, Record> records; public Map<String, Record> getRecords() { return this.records; } public void setRecords(Map<String, Record> records) { this.records = records; } @ChildList protected Map<String, BookEntry> bookEntries; public Map<String, BookEntry> getBookEntries() { return this.bookEntries; } public void setBookEntries(Map<String, BookEntry> bookEntries) { this.bookEntries = bookEntries; } }
floppyMike/CustomLibrary
include/CustomLibrary/utility.h
#pragma once #include <array> #include <string_view> #include <string> #include <bitset> #include <fstream> #include <tuple> #include "Traits.h" #include "Error.h" #include <cmath> namespace ctl { // ----------------------------------------------------------------------------- // Math // ----------------------------------------------------------------------------- static constexpr double PI = 3.1415926535897932; /** * @brief Maps values from its previous range to new range * * @param val value * @param old_min previous range minimum * @param old_max previous range maximum * @param new_min new range minimum * @param new_max new range maximum * @return Value type */ template<arithmetic T, arithmetic U, arithmetic Z, arithmetic I, arithmetic O> constexpr auto map_val(T val, U old_min, Z old_max, I new_min, O new_max) noexcept -> T { return static_cast<T>(new_min + (new_max - new_min) * val / (old_max - old_min)); } /** * @brief Maps values from its previous range to a new range * * @param v value to map * @param old_max previous max of range * @param new_max new max of range * @return Value type */ template<arithmetic T, arithmetic U, arithmetic Z> constexpr auto map_val(T v, U old_max, Z new_max) noexcept -> T { return static_cast<T>(new_max * v / old_max); } /** * @brief Turn radians to degrees * * @param val Value to turn * @return Value type */ template<arithmetic _T> constexpr auto rad_to_deg(const _T &val) noexcept { return val / PI * 180; } /** * @brief Turn degrees to radians * * @param val Value to turn * @return Value type */ template<arithmetic _T> constexpr auto deg_to_rad(const _T &val) noexcept { return val / 180. * PI; } /** * @brief Check if char represents a number * * @param ch Character to look at * @return bool */ constexpr auto is_number(char ch) -> bool { return (ch >= '0' && ch <= '9') || ch == '.'; } /** * @brief Checks if string is a number * * @tparam _Str String type * @param s String * @return bool */ template<string _Str> constexpr auto is_number(const _Str &s) -> bool { for (auto i = std::begin(s), end = std::end(s); i != end; ++i) if (!is_number(*i)) return false; return true; } /** * @brief Sigmoid function * * @tparam _T Arithmetic number * @param x number * @return result */ template<arithmetic _T> auto sigmoid(_T x) noexcept -> _T { return 1 / (1 + std::exp(-x)); } /** * @brief Converts integer to binary * * @tparam _T integer type * @param num number * @return bitset */ template<std::integral _T> auto to_binary(const _T &num) { return std::bitset<sizeof(_T) * 8>(num); } /** * @brief Rounds number to multiple * * @param num number to round * @param multiple multiple to use * @return result */ template<typename _T, typename _U> auto round_up(_T num, _U multiple) { if (multiple == 0) return num; return std::round(num / multiple) * multiple; } // ----------------------------------------------------------------------------- // Iteratable Adaptor // ----------------------------------------------------------------------------- /** * @brief Provides iterator methods for * @tparam Container Adaptor */ template<typename Adaptor> class IteratableAdaptor : public Adaptor { public: using iterator = typename Adaptor::container_type::iterator; using const_iterator = typename Adaptor::container_type::const_iterator; auto begin() { return this->c.begin(); } auto end() { return this->c.end(); } auto begin() const { return this->c.begin(); } auto end() const { return this->c.end(); } }; // namespace ctl // ----------------------------------------------------------------------------- // Array Cast // ----------------------------------------------------------------------------- template<std::size_t... Is> struct _indices_ { }; template<std::size_t N, std::size_t... Is> struct _build_indices_ : _build_indices_<N - 1, N - 1, Is...> { }; template<std::size_t... Is> struct _build_indices_<0, Is...> : _indices_<Is...> { }; template<typename T, typename _U, size_t _i, size_t... Is> constexpr auto _array_cast_helper_(const std::array<_U, _i> &a, _indices_<Is...>) -> std::array<T, _i> { return { static_cast<T>(std::get<Is>(a))... }; } /** * @brief Cast whole std::array * * @tparam T New type * @param a Array to cast * @return std::array<T, i> */ template<typename T, typename _U, size_t _i> constexpr auto array_cast(const std::array<_U, _i> &a) -> std::array<T, _i> { return _array_cast_helper_<T>(a, _build_indices_<_i>()); } // ----------------------------------------------------------------------------- // General // ----------------------------------------------------------------------------- /** * @brief Dereference ptr * * @param ptr Pointer to dereference * @return _T& */ template<typename _T> constexpr auto deref(_T *ptr) -> _T & { return *ptr; } /** * @brief Return reference back * * @param ptr Reference to return * @return _T& */ template<typename _T> constexpr auto deref(_T &ptr) -> _T & { return ptr; } /** * @brief Turn r-value reference to l-value reference * * @param t r-value to turn * @return _T& */ template<typename _T> constexpr auto unmove(_T &&t) noexcept -> auto & { return t; } /** * @brief Optimizes removal from continous container * * @tparam Container * @param c Container value * @param i Container iterator */ template<typename Container> requires requires(Container c) { { std::begin(c) } ->std::contiguous_iterator; } void fast_remove(Container &c, decltype(std::begin(std::declval<Container>())) i) { assert(i != std::end(c) && "End iterator cannot be deleted."); std::iter_swap(i, std::end(c) - 1); c.erase(std::end(c) - 1); } template<typename _T, typename _U, std::predicate Pred> constexpr auto if_true(_T &val1, const _U &val2, Pred p) { const bool cond = p(val1, val2); if (cond) val1 = val2; return cond; } /** * @brief Quickly turn istream to string * * @param in istream object * @return string */ template<typename _Ele, typename _Traits> auto fast_stream_extract(std::basic_istream<_Ele, _Traits> &in) { std::string content; assert(in && "Stream is empty."); in.seekg(0, std::ios::end); content.resize(static_cast<size_t>(in.tellg())); in.seekg(0, std::ios::beg); in.read(&content[0], content.size()); return content; } /** * @brief Turn value into hex string * * @param val value to turn * @return std::string */ auto to_hex(int val) -> std::string { static constexpr std::string &(*hex)(int &, std::string &) = [](int &val, std::string &str) constexpr->std::string & { if (val == 0) return str; const auto rem = val % 16; val /= 16; hex(val, str); if (rem > 9) str.push_back(rem - 10 + 'a'); else str.push_back(rem + '0'); return str; }; if (val == 0) return std::string(1, '0'); std::string buf; return hex(val, buf); } } // namespace ctl
zhiming-shen/Xen-Blanket-NG
linux-kernel/linux-3.4.53-blanket/include/linux/migrate.h
#ifndef _LINUX_MIGRATE_H #define _LINUX_MIGRATE_H #include <linux/mm.h> #include <linux/mempolicy.h> #include <linux/migrate_mode.h> typedef struct page *new_page_t(struct page *, unsigned long private, int **); #ifdef CONFIG_MIGRATION extern void putback_lru_pages(struct list_head *l); extern int migrate_page(struct address_space *, struct page *, struct page *, enum migrate_mode); extern int migrate_pages(struct list_head *l, new_page_t x, unsigned long private, bool offlining, enum migrate_mode mode); extern int migrate_huge_pages(struct list_head *l, new_page_t x, unsigned long private, bool offlining, enum migrate_mode mode); extern int fail_migrate_page(struct address_space *, struct page *, struct page *); extern int migrate_prep(void); extern int migrate_prep_local(void); extern int migrate_vmas(struct mm_struct *mm, const nodemask_t *from, const nodemask_t *to, unsigned long flags); extern void migrate_page_copy(struct page *newpage, struct page *page); extern int migrate_huge_page_move_mapping(struct address_space *mapping, struct page *newpage, struct page *page); #else static inline void putback_lru_pages(struct list_head *l) {} static inline int migrate_pages(struct list_head *l, new_page_t x, unsigned long private, bool offlining, enum migrate_mode mode) { return -ENOSYS; } static inline int migrate_huge_pages(struct list_head *l, new_page_t x, unsigned long private, bool offlining, enum migrate_mode mode) { return -ENOSYS; } static inline int migrate_prep(void) { return -ENOSYS; } static inline int migrate_prep_local(void) { return -ENOSYS; } static inline int migrate_vmas(struct mm_struct *mm, const nodemask_t *from, const nodemask_t *to, unsigned long flags) { return -ENOSYS; } static inline void migrate_page_copy(struct page *newpage, struct page *page) {} static inline int migrate_huge_page_move_mapping(struct address_space *mapping, struct page *newpage, struct page *page) { return -ENOSYS; } /* Possible settings for the migrate_page() method in address_operations */ #define migrate_page NULL #define fail_migrate_page NULL #endif /* CONFIG_MIGRATION */ #endif /* _LINUX_MIGRATE_H */
wanliyun/xLua
build/3rd/luniq/luniq.c
<filename>build/3rd/luniq/luniq.c #include <stdlib.h> #include <stdio.h> #include "handlemap.h" #if LUA_VERSION_NUM < 502 && (!defined(luaL_newlib)) # define luaL_newlib(L,l) (lua_newtable(L), luaL_register(L,NULL,l)) #endif #define CLS_UNIQ "uniq{cls}" #define LIST_MAX_SIZE 1024 * 1024 * 1024 #define CHECK_UNIQ(L, idx)\ (*(struct handlemap **) luaL_checkudata(L, idx, CLS_UNIQ)) #define LUNIQ_LUA_BIND_META(L, type_t, ptr, mname) do { \ type_t **my__p = lua_newuserdata(L, sizeof(void *)); \ *my__p = ptr; \ luaL_getmetatable(L, mname); \ lua_setmetatable(L, -2); \ } while(0) /* #define ENABLE_XXX_DEBUG */ #ifdef ENABLE_XXX_DEBUG # define UNIQ_DLOG(fmt, ...) fprintf(stderr, "<luniq>" fmt "\n", ##__VA_ARGS__) #else # define UNIQ_DLOG(...) #endif static int lua__uniq_new(lua_State *L) { struct handlemap * h = handlemap_init(); if (h == NULL) { return 0; } LUNIQ_LUA_BIND_META(L, struct handlemap, h, CLS_UNIQ); return 1; } static int lua__uniq_new_id(lua_State *L) { struct handlemap * h = CHECK_UNIQ(L, 1); handleid id = handlemap_new(h, HANDLE_UD_NULL); lua_pushinteger(L, id); return 1; } static int lua__uniq_release(lua_State *L) { struct handlemap * h = CHECK_UNIQ(L, 1); handleid id = (handleid)luaL_checkinteger(L, 2); handlemap_release(h, id); return 0; } static int lua__uniq_inuse(lua_State *L) { struct handlemap * h = CHECK_UNIQ(L, 1); handleid id = (handleid)luaL_checkinteger(L, 2); void *p = handlemap_grab(h, id); lua_pushboolean(L, p != NULL); return 1; } static int lua__uniq_list(lua_State *L) { int i; int outsz = 0; handleid slist[1024]; handleid *list = (handleid *)slist; int insz = sizeof(slist)/sizeof(slist[0]); struct handlemap * h = CHECK_UNIQ(L, 1); do { outsz = handlemap_list(h, insz, list); if (outsz < insz) { break; } if (list != slist) { free(list); } insz *= 2; if (insz > LIST_MAX_SIZE) { return 0; } list = malloc(sizeof(handleid) * insz); } while(1); lua_newtable(L); for (i = 0; i < outsz; i++) { lua_pushinteger(L, (lua_Integer)list[i]); lua_rawseti(L, -2, i + 1); } if (list != slist) { free(list); } return 1; } static int lua__uniq_gc(lua_State *L) { struct handlemap * h = CHECK_UNIQ(L, 1); handlemap_exit(h); UNIQ_DLOG("release handlemap"); return 0; } static int opencls__luniq(lua_State *L) { luaL_Reg lmethods[] = { {"new", lua__uniq_new_id}, {"release", lua__uniq_release}, {"list", lua__uniq_list}, {"inuse", lua__uniq_inuse}, {NULL, NULL}, }; luaL_newmetatable(L, CLS_UNIQ); lua_newtable(L); luaL_register(L, NULL, lmethods); lua_setfield(L, -2, "__index"); lua_pushcfunction (L, lua__uniq_gc); lua_setfield (L, -2, "__gc"); return 1; } int luaopen_luniq(lua_State* L) { luaL_Reg lfuncs[] = { {"new", lua__uniq_new}, {NULL, NULL}, }; opencls__luniq(L); luaL_newlib(L, lfuncs); return 1; }
jiangTaoQuite/SpringMVC-Architecture
src/main/java/tech/jiangtao/backstage/mapper/BroadcastMsgsRecipientsMapper.java
<reponame>jiangTaoQuite/SpringMVC-Architecture<filename>src/main/java/tech/jiangtao/backstage/mapper/BroadcastMsgsRecipientsMapper.java<gh_stars>1-10 package tech.jiangtao.backstage.mapper; import java.util.List; import org.apache.ibatis.annotations.Param; import tech.jiangtao.backstage.model.BroadcastMsgsRecipientsExample; import tech.jiangtao.backstage.model.BroadcastMsgsRecipientsKey; public interface BroadcastMsgsRecipientsMapper { int countByExample(BroadcastMsgsRecipientsExample example); int deleteByExample(BroadcastMsgsRecipientsExample example); int deleteByPrimaryKey(BroadcastMsgsRecipientsKey key); int insert(BroadcastMsgsRecipientsKey record); int insertSelective(BroadcastMsgsRecipientsKey record); List<BroadcastMsgsRecipientsKey> selectByExample(BroadcastMsgsRecipientsExample example); int updateByExampleSelective(@Param("record") BroadcastMsgsRecipientsKey record, @Param("example") BroadcastMsgsRecipientsExample example); int updateByExample(@Param("record") BroadcastMsgsRecipientsKey record, @Param("example") BroadcastMsgsRecipientsExample example); }
jwilger/racing_on_rails
lib/acts_as_tree/validation.rb
<filename>lib/acts_as_tree/validation.rb # frozen_string_literal: true module ActsAsTree module Validation extend ActiveSupport::Concern included do validate :valid_parent end def valid_parent if parent == self errors.add :parent, "can't be own parent" return false end true end end end
CoprHD/sds-controller
controllersvc/src/main/java/com/emc/storageos/volumecontroller/impl/utils/attrmatchers/RaidLevelMatcher.java
<filename>controllersvc/src/main/java/com/emc/storageos/volumecontroller/impl/utils/attrmatchers/RaidLevelMatcher.java /* * Copyright (c) 2015 EMC Corporation * All Rights Reserved */ package com.emc.storageos.volumecontroller.impl.utils.attrmatchers; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.CollectionUtils; import com.emc.storageos.db.client.model.StoragePool; import com.emc.storageos.db.client.model.StringSet; import com.emc.storageos.db.client.model.VirtualPool.RaidLevel; import com.emc.storageos.volumecontroller.AttributeMatcher; import com.google.common.base.Joiner; public class RaidLevelMatcher extends ConditionalAttributeMatcher { private static final Logger _logger = LoggerFactory.getLogger(RaidLevelMatcher.class); @Override protected boolean isAttributeOn(Map<String, Object> attributeMap) { boolean isMatcherNeeded = false; if (isAutoTieringPolicyOn(attributeMap) && !attributeMap.containsKey(AttributeMatcher.PLACEMENT_MATCHERS)) { _logger.info("Skipping RaidLevel matcher, as VMAX FAST Policy is chosen"); return isMatcherNeeded; } if (attributeMap != null && attributeMap.get(Attributes.raid_levels.name()) != null) { HashSet<String> raidLevels = (HashSet<String>) attributeMap.get(Attributes.raid_levels.name()); if (!raidLevels.isEmpty()) { isMatcherNeeded = true; } } return isMatcherNeeded; } @SuppressWarnings("unchecked") @Override protected List<StoragePool> matchStoragePoolsWithAttributeOn(List<StoragePool> pools, Map<String, Object> attributeMap, StringBuffer errorMessage) { List<StoragePool> filteredPoolList = new ArrayList<StoragePool>(pools); Set<String> raidLevels = null; raidLevels = (Set<String>) attributeMap.get(Attributes.raid_levels.toString()); _logger.info("Pools Matching RaidLevels {} Started:{}", raidLevels, Joiner.on("\t").join(getNativeGuidFromPools(pools))); Iterator<StoragePool> poolIterator = pools.iterator(); while (poolIterator.hasNext()) { StoragePool pool = poolIterator.next(); StringSet poolSupportedRaidLevels = pool.getSupportedRaidLevels(); if (null != poolSupportedRaidLevels) { _logger.info("Supported Raid Levels {} for Pool {} ", Joiner.on("\t") .join(poolSupportedRaidLevels), pool.getNativeGuid()); } else { _logger.info("Supported Raid Levels Empty for Pool {} ", pool.getNativeGuid()); } // if Pool doesn't have any details on Raid Level, remove that Pool if (null == poolSupportedRaidLevels) { _logger.info("Ignoring pool {} as it doesn't have raid levels", pool.getNativeGuid()); filteredPoolList.remove(pool); continue; } Set<String> copies = new HashSet<String>(poolSupportedRaidLevels); copies.retainAll(raidLevels); if (copies.isEmpty()) { _logger.info("Ignoring pool {} as it is not supporting the raid levels.", pool.getNativeGuid()); filteredPoolList.remove(pool); } } if (CollectionUtils.isEmpty(filteredPoolList)) { errorMessage.append(String.format("No matching storage pool found for the given raid levels : %s. ", raidLevels)); _logger.error(errorMessage.toString()); } _logger.info("Pools Matching RaidLevels Ended :{}", Joiner.on("\t").join(getNativeGuidFromPools(filteredPoolList))); return filteredPoolList; } @Override public Map<String, Set<String>> getAvailableAttribute(List<StoragePool> neighborhoodPools, URI vArrayId) { try { Map<String, Set<String>> availableAttrMap = new HashMap<String, Set<String>>(1); Set<String> availableAttrValues = new HashSet<String>(); for (StoragePool pool : neighborhoodPools) { StringSet raidLevels = pool.getSupportedRaidLevels(); if (null != raidLevels && !raidLevels.isEmpty()) { for (String raidLevel : raidLevels) { if (RaidLevel.lookup(raidLevel) != null) { availableAttrValues.add(raidLevel); } } } } if (!availableAttrValues.isEmpty()) { availableAttrMap.put(Attributes.raid_levels.name(), availableAttrValues); return availableAttrMap; } } catch (Exception e) { _logger.error("Exception occurred while getting available attributes using RaidLevelMatcher.", e); } return Collections.emptyMap(); } }
sug5806/TIL
Python/OOP/game2/monsters.py
<reponame>sug5806/TIL from character import Monster # 게임 개발 초기의 몬스터 종류는 두 가지 class FireMonster(Monster): def generate_gold(self): return 10 class IceMonster(Monster): def __init__(self): super().__init__() self.hp=100 def generate_gold(self): return 20 # 게임 규모가 커지면서 추가된 몬스터 class StoneMonster(Monster): def generate_gold(self): return 0 class KungfuMonster(Monster): def generate_gold(self): return 1000 # 추가 확장 클래스 # 몬스터 종류가 늘면 이 아래에 추가 하세요.
atomrq/simulab
lib/bio/_mp.py
import os, sys, time import numpy as np import pandas as pd import linecache import multiprocessing as mp class MPI_tackle(object): """ A class wrapper for multiprocessing calculations This class should be inherited, then add some functions. Note1: The multiprocess package can not be used for distributed nodes. """ def __init__(self, data, nps=None, chunksize_data=None): self.data = data self.count = 0 self.nps = nps if nps != None else 8 self.chunksize_data = chunksize_data def start(self): if self.chunksize_data == None: self.pool = mp.Pool(self.nps) self.pool.map(self.deliverWork, self.data) self.pool.close() self.pool.join() else: n, _ = divmod(len(self.data), self.chunksize_data) for i in range(n): s = self.chunksize_data * i e = self.chunksize_data * (i + 1) idata = self.data[s:e] # start pool and calculation part-i self.pool = mp.Pool(self.nps) self.pool.map(self.deliverWork, idata) self.pool.close() self.pool.join() time.sleep(3) def __getstate__(self): self_dict = self.__dict__.copy() del self_dict['pool'] return self_dict def __setstate__(self, state): self.__dict__.update(state) class GetSimilarity(MPI_tackle): """ Class inherited from MPI_tackle for the calculations of mol similarity. add [deliverWork], [preWork], and helper function [getSimilarity] """ def deliverWork(self, fn): targets = pd.read_csv(fn, header=None, sep=' ').values rr = [_[0] + ',' + self.getSimilarity(_[1]) for _ in targets] if self.outpath == 'origin': outname = os.path.normpath(os.path.splitext(fn)[0] + '.fsi') else: outname = os.path.normpath(self.outpath + os.path.basename(fn).split('.')[0] + '.fsi') np.savetxt(outname, rr, fmt='%s') def preWork(self, **kwargs): assert 'B2AR' in kwargs.keys(), print("Error! wrong parameters") suppl = Chem.SmilesMolSupplier(kwargs['B2AR'], ' ', 0, 1, False, True) mols = [x for x in suppl if x is not None] self.fps = [Chem.RDKFingerprint(x) for x in mols] def getSimilarity(self, _smi): try: mol = Chem.MolFromSmiles(_smi) fp = Chem.RDKFingerprint(mol) similarity = [DataStructs.FingerprintSimilarity(fp, _) for _ in self.fps] maxv = max(similarity) maxv_arg = similarity.index(maxv) + 1 meanv = sum(similarity) / len(similarity) sim = str(maxv) + "," + str(maxv_arg) + "," + str(meanv) + "," + _smi return sim except: return 'nan' class GetSmilesFromPdbqt(MPI_tackle): """Class inherited from MPI_tackle for get smiles info from pdbqt file. Be careful, too many mp processes and I/O operation may cause out/input Error. """ def deliverWork(self, path): fnames = os.popen('ls %s/*/*.pdbqt'%(path)).read().strip().split('\n') rr = [] for _ in fnames: try: rr.append(_ + " " + linecache.getline(_, 3).split(':')[1].strip()) except: pass if self.outpath == 'origin': outname = os.path.normpath(path + os.path.basename(path) + '.smi') else: outname = os.path.normpath(self.outpath + os.path.basename(path) + '.smi') np.savetxt(outname, rr, fmt='%s') def preWork(self, **kwargs): assert 'outpath' in kwargs.keys(), print("Error! wrong parameters") self.outpath = kwargs['outpath'] def ComputeSimilarity(): data_path = '../input-files/' names = [data_path + _ for _ in os.listdir(data_path)] outpath = 'origin' kwargs = {'B2AR':'B2AR-small-molecule.smi', 'outpath':outpath} S = GetSimilarity(names, nps=int(sys.argv[1])) S.preWork(**kwargs) S.start() def ComputeGetInfo(): data_path = '../input-files/' names = [data_path + _ for _ in os.listdir(data_path)] outpath = 'origin' kwargs = {'outpath': outpath} S = GetSmilesFromPdbqt(names, nps=int(sys.argv[1])) S.preWork(**kwargs) S.start() if __name__ == '__main__': # ComputeSimilarity() ComputeGetInfo()
joshua-xia/noip
code-bible/src/com/yunlongstudio/algorithm/ProjectEuler.java
<filename>code-bible/src/com/yunlongstudio/algorithm/ProjectEuler.java package com.yunlongstudio.algorithm; import java.math.BigInteger; import java.util.Calendar; import java.util.Date; import java.util.Stack; public class ProjectEuler { public static void main(String[] args) { /* System.out.println("Problem #1 = " + Problem1()); System.out.println("Problem #2 = " + Problem2()); System.out.println("Problem #3 = " + Problem3()); System.out.println("Problem #4 = " + Problem4()); System.out.println("Problem #5 = " + Problem5()); System.out.println("Problem #6 = " + Problem6()); System.out.println("Problem #7 = " + Problem7()); System.out.println("Problem #8 = " + Problem8()); System.out.println("Problem #9 = " + Problem9()); System.out.println("Problem #10 = " + Problem10()); System.out.println("Problem #11 = " + Problem11()); System.out.println("Problem #12 = " + Problem12()); System.out.println("Problem #13 = " + Problem13()); System.out.println("Problem #14 = " + Problem14()); System.out.println("Problem #15 = " + Problem15()); System.out.println("Problem #16 = " + Problem16()); System.out.println("Problem #17 = " + Problem17()); System.out.println("Problem #18 = " + Problem18()); System.out.println("Problem #19 = " + Problem19()); System.out.println("Problem #20 = " + Problem20()); */ System.out.println("Problem #21 = " + Problem21()); } public static long Problem1() { long ret = 0; for (int i = 1; i < 1000; i++) { if (i % 3 == 0) { ret += i; } else if (i % 5 == 0) { ret += i; } } return ret; } public static long Problem2() { long ret = 0; int pre_fibonacci = 1; int fibonacci = 2; while (fibonacci < 4000000) { if (fibonacci % 2 == 0) { ret += fibonacci; } int temp = fibonacci; fibonacci += pre_fibonacci; pre_fibonacci = temp; } return ret; } private static boolean isPrime(long d) { for(int j = 2; j <= Math.sqrt(d); j++) { if (d%j == 0) { return false; } } return true; } public static long Problem3() { long ret = 0; long i = 1; while (i < Math.sqrt(600851475143L)) { if (600851475143L % i == 0 && isPrime(i)) { ret = i; System.out.println("i = " + i); } i++; } return ret; } public static int Problem4() { int ret = 0; for (int i = 999; i > 0; i--) { for (int j = 999; j > 0; j--) { int value = i * j; char[] chars = Integer.toString(value).toCharArray(); boolean isPalindrome = true; for (int k = 0; k < chars.length / 2; k++) { if (chars[k] != chars[chars.length - 1 - k]) { isPalindrome = false; break; } } if (isPalindrome) { System.out.println("i = " + i); System.out.println("j = " + j); if (value > ret) { ret = value; } } } } return ret; } public static int Problem5() { int i; for (i = 1; ;i++) { boolean ok = true; for (int j = 1; j < 20; j++) { if (i%j != 0) { ok = false; break; } } if (ok) { break; } } return i; } public static int Problem6() { int a = 0; int b = 0; for (int i = 1; i <= 100; i++) { a += i; } a = a * a; for (int i = 1; i <= 100; i++) { b += i * i; } return a - b; } public static int Problem7() { int count = 0; int i; for (i = 2; ;i++) { if (isPrime(i)) { count++; } if (count == 10001) { break; } } return i; } public static int Problem8() { String numbers = "73167176531330624919225119674426574742355349194934" + "96983520312774506326239578318016984801869478851843" + "85861560789112949495459501737958331952853208805511" + "12540698747158523863050715693290963295227443043557" + "66896648950445244523161731856403098711121722383113" + "62229893423380308135336276614282806444486645238749" + "30358907296290491560440772390713810515859307960866" + "70172427121883998797908792274921901699720888093776" + "65727333001053367881220235421809751254540594752243" + "52584907711670556013604839586446706324415722155397" + "53697817977846174064955149290862569321978468622482" + "83972241375657056057490261407972968652414535100474" + "82166370484403199890008895243450658541227588666881" + "16427171479924442928230863465674813919123162824586" + "17866458359124566529476545682848912883142607690042" + "24219022671055626321111109370544217506941658960408" + "07198403850962455444362981230987879927244284909188" + "84580156166097919133875499200524063689912560717606" + "05886116467109405077541002256983155200055935729725" + "71636269561882670428252483600823257530420752963450"; char[] chars = numbers.toCharArray(); int ret = 0; for (int i = 0; i < chars.length - 6; i++) { int temp = (chars[i] - '0') * (chars[i+1] - '0') * (chars[i+2] - '0') * (chars[i+3] - '0') * (chars[i+4] - '0'); if (temp > ret) { ret = temp; } } return ret; } public static int Problem9() { int aa, bb, cc; for (int a = 1; a < 1000 / 3; a++) { for (int b = a; b < 1000 - a; b++) { if (a * a + b * b == (1000 - a - b) * (1000 - a - b)) { aa = a; bb = b; cc = 1000 - a - b; return aa * bb * cc; } } } return 0; } public static double Problem10() { double ret = 0; for (int i = 2; i < 200 * 10000; i++) { if (isPrime(i)) { ret += i; } } return ret; } public static double Problem11() { double ret = 0; int[][] num = { {8,02,22,97,38,15,00,40,00,75,04,05,07,78,52,12,50,77,91,8}, {49, 49, 99, 40, 17, 81, 18, 57 ,60, 87 ,17, 40 ,98 ,43, 69 ,48, 04 ,56, 62 ,00}, {81, 49, 31, 73, 55, 79, 14, 29 ,93, 71 ,40 ,67 ,53 ,88, 30 ,03 ,49 ,13, 36 ,65}, {52, 70, 95, 23, 04, 60, 11, 42 ,69 ,24 ,68 ,56 ,01 ,32, 56 ,71, 37 ,02, 36 ,91}, {22, 31, 16, 71, 51, 67, 63, 89 ,41 ,92 ,36 ,54 ,22 ,40, 40 ,28 ,66 ,33, 13 ,80}, {24, 47, 32, 60, 99, 03, 45, 02 ,44, 75 ,33 ,53 ,78 ,36, 84 ,20 ,35 ,17, 12 ,50}, {32, 98, 81, 28, 64, 23, 67, 10 ,26, 38 ,40 ,67 ,59 ,54, 70 ,66 ,18 ,38, 64 ,70}, {67, 26, 20, 68, 02, 62, 12, 20 ,95, 63 ,94 ,39 ,63 ,8 ,40 ,91 ,66 ,49, 94 ,21}, {24, 55, 58, 5, 66, 73, 99, 26 ,97, 17 ,78 ,78 ,96 ,83 ,14 ,88 ,34 ,89, 63 ,72}, {21, 36, 23, 9, 75, 00, 76, 44, 20, 45, 35 ,14 ,00 ,61 ,33 ,97 ,34 ,31, 33 ,95}, {78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03 ,80 ,04 ,62 ,16 ,14 ,9 ,53, 56 ,92}, {16, 39, 5, 42, 96, 35, 31, 47, 55, 58, 88 ,24 ,00 ,17 ,54 ,24 ,36 ,29, 85 ,57}, {86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44 ,37, 44 ,60 ,21 ,58 ,51 ,54, 17 ,58}, {19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92 ,13, 86 ,52 ,17 ,77 ,04 ,89, 55 ,40}, {04, 52, 8, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66}, {88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33 ,67 ,46 ,55 ,12 ,32 ,63 ,93, 53 ,69}, {04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18 ,8 ,46 ,29 ,32 ,40 ,62 ,76 ,36}, {20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69 ,82 ,67 ,59 ,85 ,74, 04 ,36 ,16}, {20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71 ,48, 86, 81 ,16 ,23, 57, 05, 54}, {01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48 ,61, 43 ,52 ,01 ,89, 19, 67, 48} }; for (int i = 0; i < 20; i++) { for (int j = 0; j < 20 - 3; j++) { int temp = num[i][j] * num[i][j + 1] * num[i][j + 2] * num[i][j + 3]; if (temp > ret) { ret = temp; } } } for (int i = 0; i < 20 - 3; i++) { for (int j = 0; j < 20; j++) { int temp = num[i][j] * num[i + 1][j] * num[i + 2][j] * num[i + 3][j]; if (temp > ret) { ret = temp; } } } for (int i = 0; i < 20 - 3; i++) { for (int j = 0; j < 20 - 3; j++) { int temp = num[i][j] * num[i + 1][j + 1] * num[i + 2][j + 2] * num[i + 3][j + 3]; if (temp > ret) { ret = temp; } } } for (int i = 0; i < 20 - 3; i++) { for (int j = 3; j < 20; j++) { int temp = num[i][j] * num[i + 1][j - 1] * num[i + 2][j - 2] * num[i + 3][j - 3]; if (temp > ret) { ret = temp; } } } return ret; } public static int Problem12() { int triangle_number = 0; for (int i = 1; ;i++) { triangle_number += i; int count_divisors = 0; for (int j = 1; j <= triangle_number; j++) { if (triangle_number % j == 0) { count_divisors++; } } System.out.println("i = " + i); System.out.println("triangle number = " + triangle_number + ", divisors = " + count_divisors); if (count_divisors > 500) { break; } } return triangle_number; } public static int Problem13() { String[] strings = { "37107287533902102798797998220837590246510135740250", "46376937677490009712648124896970078050417018260538", "74324986199524741059474233309513058123726617309629", "91942213363574161572522430563301811072406154908250", "23067588207539346171171980310421047513778063246676", "89261670696623633820136378418383684178734361726757", "28112879812849979408065481931592621691275889832738", "44274228917432520321923589422876796487670272189318", "47451445736001306439091167216856844588711603153276", "70386486105843025439939619828917593665686757934951", "62176457141856560629502157223196586755079324193331", "64906352462741904929101432445813822663347944758178", "92575867718337217661963751590579239728245598838407", "58203565325359399008402633568948830189458628227828", "80181199384826282014278194139940567587151170094390", "35398664372827112653829987240784473053190104293586", "86515506006295864861532075273371959191420517255829", "71693888707715466499115593487603532921714970056938", "54370070576826684624621495650076471787294438377604", "53282654108756828443191190634694037855217779295145", "36123272525000296071075082563815656710885258350721", "45876576172410976447339110607218265236877223636045", "17423706905851860660448207621209813287860733969412", "81142660418086830619328460811191061556940512689692", "51934325451728388641918047049293215058642563049483", "62467221648435076201727918039944693004732956340691", "15732444386908125794514089057706229429197107928209", "55037687525678773091862540744969844508330393682126", "18336384825330154686196124348767681297534375946515", "80386287592878490201521685554828717201219257766954", "78182833757993103614740356856449095527097864797581", "16726320100436897842553539920931837441497806860984", "48403098129077791799088218795327364475675590848030", "87086987551392711854517078544161852424320693150332", "59959406895756536782107074926966537676326235447210", "69793950679652694742597709739166693763042633987085", "41052684708299085211399427365734116182760315001271", "65378607361501080857009149939512557028198746004375", "35829035317434717326932123578154982629742552737307", "94953759765105305946966067683156574377167401875275", "88902802571733229619176668713819931811048770190271", "25267680276078003013678680992525463401061632866526", "36270218540497705585629946580636237993140746255962", "24074486908231174977792365466257246923322810917141", "91430288197103288597806669760892938638285025333403", "34413065578016127815921815005561868836468420090470", "23053081172816430487623791969842487255036638784583", "11487696932154902810424020138335124462181441773470", "63783299490636259666498587618221225225512486764533", "67720186971698544312419572409913959008952310058822", "95548255300263520781532296796249481641953868218774", "76085327132285723110424803456124867697064507995236", "37774242535411291684276865538926205024910326572967", "23701913275725675285653248258265463092207058596522", "29798860272258331913126375147341994889534765745501", "18495701454879288984856827726077713721403798879715", "38298203783031473527721580348144513491373226651381", "34829543829199918180278916522431027392251122869539", "40957953066405232632538044100059654939159879593635", "29746152185502371307642255121183693803580388584903", "41698116222072977186158236678424689157993532961922", "62467957194401269043877107275048102390895523597457", "23189706772547915061505504953922979530901129967519", "86188088225875314529584099251203829009407770775672", "11306739708304724483816533873502340845647058077308", "82959174767140363198008187129011875491310547126581", "97623331044818386269515456334926366572897563400500", "42846280183517070527831839425882145521227251250327", "55121603546981200581762165212827652751691296897789", "32238195734329339946437501907836945765883352399886", "75506164965184775180738168837861091527357929701337", "62177842752192623401942399639168044983993173312731", "32924185707147349566916674687634660915035914677504", "99518671430235219628894890102423325116913619626622", "73267460800591547471830798392868535206946944540724", "76841822524674417161514036427982273348055556214818", "97142617910342598647204516893989422179826088076852", "87783646182799346313767754307809363333018982642090", "10848802521674670883215120185883543223812876952786", "71329612474782464538636993009049310363619763878039", "62184073572399794223406235393808339651327408011116", "66627891981488087797941876876144230030984490851411", "60661826293682836764744779239180335110989069790714", "85786944089552990653640447425576083659976645795096", "66024396409905389607120198219976047599490197230297", "64913982680032973156037120041377903785566085089252", "16730939319872750275468906903707539413042652315011", "94809377245048795150954100921645863754710598436791", "78639167021187492431995700641917969777599028300699", "15368713711936614952811305876380278410754449733078", "40789923115535562561142322423255033685442488917353", "44889911501440648020369068063960672322193204149535", "41503128880339536053299340368006977710650566631954", "81234880673210146739058568557934581403627822703280", "82616570773948327592232845941706525094512325230608", "22918802058777319719839450180888072429661980811197", "77158542502016545090413245809786882778948721859617", "72107838435069186155435662884062257473692284509516", "20849603980134001723930671666823555245252804609722", "53503534226472524250874054075591789781264330331690" }; int[][] numbers = new int[strings.length][strings[0].length()]; for (int i = 0; i < strings.length; i++) { char[] chars = strings[i].toCharArray(); for (int j = 0; j < chars.length; j++) { numbers[i][j] = chars[j] - '0'; } } Stack<Integer> result = new Stack<Integer>(); int r = sum(result, numbers, numbers[0].length - 1, 0); System.out.print(r); for (int i = 0; i < 10; i++) { System.out.print(result.pop()); } return 0; } private static int sum(Stack<Integer> result, int[][] numbers, int pos, int increase) { if (numbers == null || numbers.length < 1 || numbers[0].length < pos) { return 0; } if (pos < 0) { return increase; } int temp = 0; for (int i = 0; i < numbers.length; i++) { temp += numbers[i][pos]; } temp += increase; result.push(temp % 10); return sum(result, numbers, pos - 1, temp / 10); } public static long Problem14() { long sequenceLength = 0; long startingNumber = 0; long sequence; for (long i = 2; i <= 1000000; i++) { int length = 1; sequence = i; while (sequence != 1) { if ((sequence % 2) == 0) { sequence = sequence / 2; } else { sequence = sequence * 3 + 1; } length++; } //Check if sequence is the best solution if (length > sequenceLength) { sequenceLength = length; startingNumber = i; } } return startingNumber; } public static long Problem15() { int num = 20; long[][] grids = new long[num + 1] [num + 1]; for (int i = 0; i < num + 1; i++) { grids[i][0] = 1; grids[0][i] = 1; } for (int i = 1; i < num + 1; i++) { for (int j = 1; j < num + 1; j++) { grids[i][j] = grids[i-1][j] + grids[i][j-1]; } } return grids[num][num]; } public static int Problem16() { byte[] num = {1}; BigInteger ret = new BigInteger(num); for (int i = 0; i < 1000; i++) { ret = ret.add(ret); } char[] digit = ret.toString().toCharArray(); int sum = 0; for (int i = 0; i < digit.length; i++) { sum += digit[i] - '0'; } return sum; } public static long Problem17() { return 0; } private static String translate(int num) { if (num < 1) { throw new IllegalArgumentException("unsupported parameter"); } final String[] ONETONINE = {"one", "two", "three", "four", "five", "six", "seven", "eight", "nine"}; final String[] ELEVENTONINETEEN = {"eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen", "eighteen", "nineteen"}; final String[] TENTONINTY = {"ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty", "ninety"}; final String HUNDRED = "hundred"; final String THOUSAND = "thousand"; final String MILLION = "million"; final String BILLION = "billion"; final String AND = "and"; final String HYPHEN = "-"; int i = num / 10; int j = num % 10; if (i == 0) { return ONETONINE[j - 1]; } else if (i == 1) { return ELEVENTONINETEEN[j - 1]; } return ""; } public static int Problem18() { int[][] numbers = { {75}, {95, 64}, {17, 47, 82}, {18, 35, 87, 10}, {20, 04, 82, 47, 65}, {19, 01, 23, 75, 03, 34}, {88, 02, 77, 73, 07, 63, 67}, {99, 65, 04, 28, 06, 16, 70, 92}, {41, 41, 26, 56, 83, 40, 80, 70, 33}, {41, 48, 72, 33, 47, 32, 37, 16, 94, 29}, {53, 71, 44, 65, 25, 43, 91, 52, 97, 51, 14}, {70, 11, 33, 28, 77, 73, 17, 78, 39, 68, 17, 57}, {91, 71, 52, 38, 17, 14, 91, 43, 58, 50, 27, 29, 48}, {63, 66, 04, 68, 89, 53, 67, 30, 73, 16, 69, 87, 40, 31}, {04, 62, 98, 27, 23, 9, 70, 98, 73, 93, 38, 53, 60, 4, 23}}; return maxpath(numbers, 0, 0); } private static int maxpath(int[][] numbers, int i, int j) { if (i == numbers.length - 1) { if (j == numbers.length - 1) { return Math.max(numbers[i][j], 0); } else { return Math.max(numbers[i][j], numbers[i][j + 1]); } } else { return numbers[i][j] + Math.max(maxpath(numbers, i+1, j), maxpath(numbers, i+1, j+1)); } } public static int Problem19() { int sundays = 0; for (int year = 1901; year <= 2000; year++) { for (int month = 1; month <= 12; month++) { Calendar ca = Calendar.getInstance(); ca.set(Calendar.YEAR, year); ca.set(Calendar.MONTH, month); ca.set(Calendar.DATE, 1); if (ca.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY) { sundays++; } } } return sundays; } public static int Problem20() { byte[] num = {1}; BigInteger ret = new BigInteger(num); for (int i = 1; i <= 100; i++) { ret = ret.multiply(new BigInteger("" + i)); } char[] digit = ret.toString().toCharArray(); int sum = 0; for (int i = 0; i < digit.length; i++) { sum += digit[i] - '0'; } return sum; } public static int Problem21() { int sumAmicible = 0; int factorsi, factorsj; for (int i = 2; i <= 10000; i++) { factorsi = sumOfFactors(i); if (factorsi > i && factorsi <= 10000) { factorsj = sumOfFactors(factorsi); if (factorsj == i) { sumAmicible += i + factorsi; } } } return sumAmicible; } private static int sumOfFactors(int number) { int sqrtOfNumber = (int)Math.sqrt(number); int sum = 1; //If the number is a perfect square //Count the squareroot once in the sum of factors if (number == sqrtOfNumber * sqrtOfNumber) { sum += sqrtOfNumber; sqrtOfNumber--; } for (int i = 2; i <= sqrtOfNumber; i++) { if (number % i == 0) { sum = sum + i + (number / i); } } return sum; } }
wilebeast/FireFox-OS
B2G/gecko/gfx/2d/DrawTargetRecording.h
/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef MOZILLA_GFX_DRAWTARGETRECORDING_H_ #define MOZILLA_GFX_DRAWTARGETRECORDING_H_ #include "2D.h" #include "DrawEventRecorder.h" namespace mozilla { namespace gfx { class DrawTargetRecording : public DrawTarget { public: DrawTargetRecording(DrawEventRecorder *aRecorder, DrawTarget *aDT); ~DrawTargetRecording(); virtual BackendType GetType() const { return mFinalDT->GetType(); } virtual TemporaryRef<SourceSurface> Snapshot(); virtual IntSize GetSize() { return mFinalDT->GetSize(); } /* Ensure that the DrawTarget backend has flushed all drawing operations to * this draw target. This must be called before using the backing surface of * this draw target outside of GFX 2D code. */ virtual void Flush() { mFinalDT->Flush(); } /* * Draw a surface to the draw target. Possibly doing partial drawing or * applying scaling. No sampling happens outside the source. * * aSurface Source surface to draw * aDest Destination rectangle that this drawing operation should draw to * aSource Source rectangle in aSurface coordinates, this area of aSurface * will be stretched to the size of aDest. * aOptions General draw options that are applied to the operation * aSurfOptions DrawSurface options that are applied */ virtual void DrawSurface(SourceSurface *aSurface, const Rect &aDest, const Rect &aSource, const DrawSurfaceOptions &aSurfOptions = DrawSurfaceOptions(), const DrawOptions &aOptions = DrawOptions()); /* * Blend a surface to the draw target with a shadow. The shadow is drawn as a * gaussian blur using a specified sigma. The shadow is clipped to the size * of the input surface, so the input surface should contain a transparent * border the size of the approximate coverage of the blur (3 * aSigma). * NOTE: This function works in device space! * * aSurface Source surface to draw. * aDest Destination point that this drawing operation should draw to. * aColor Color of the drawn shadow * aOffset Offset of the shadow * aSigma Sigma used for the guassian filter kernel * aOperator Composition operator used */ virtual void DrawSurfaceWithShadow(SourceSurface *aSurface, const Point &aDest, const Color &aColor, const Point &aOffset, Float aSigma, CompositionOp aOperator); /* * Clear a rectangle on the draw target to transparent black. This will * respect the clipping region and transform. * * aRect Rectangle to clear */ virtual void ClearRect(const Rect &aRect); /* * This is essentially a 'memcpy' between two surfaces. It moves a pixel * aligned area from the source surface unscaled directly onto the * drawtarget. This ignores both transform and clip. * * aSurface Surface to copy from * aSourceRect Source rectangle to be copied * aDest Destination point to copy the surface to */ virtual void CopySurface(SourceSurface *aSurface, const IntRect &aSourceRect, const IntPoint &aDestination); /* * Fill a rectangle on the DrawTarget with a certain source pattern. * * aRect Rectangle that forms the mask of this filling operation * aPattern Pattern that forms the source of this filling operation * aOptions Options that are applied to this operation */ virtual void FillRect(const Rect &aRect, const Pattern &aPattern, const DrawOptions &aOptions = DrawOptions()); /* * Stroke a rectangle on the DrawTarget with a certain source pattern. * * aRect Rectangle that forms the mask of this stroking operation * aPattern Pattern that forms the source of this stroking operation * aOptions Options that are applied to this operation */ virtual void StrokeRect(const Rect &aRect, const Pattern &aPattern, const StrokeOptions &aStrokeOptions = StrokeOptions(), const DrawOptions &aOptions = DrawOptions()); /* * Stroke a line on the DrawTarget with a certain source pattern. * * aStart Starting point of the line * aEnd End point of the line * aPattern Pattern that forms the source of this stroking operation * aOptions Options that are applied to this operation */ virtual void StrokeLine(const Point &aStart, const Point &aEnd, const Pattern &aPattern, const StrokeOptions &aStrokeOptions = StrokeOptions(), const DrawOptions &aOptions = DrawOptions()); /* * Stroke a path on the draw target with a certain source pattern. * * aPath Path that is to be stroked * aPattern Pattern that should be used for the stroke * aStrokeOptions Stroke options used for this operation * aOptions Draw options used for this operation */ virtual void Stroke(const Path *aPath, const Pattern &aPattern, const StrokeOptions &aStrokeOptions = StrokeOptions(), const DrawOptions &aOptions = DrawOptions()); /* * Fill a path on the draw target with a certain source pattern. * * aPath Path that is to be filled * aPattern Pattern that should be used for the fill * aOptions Draw options used for this operation */ virtual void Fill(const Path *aPath, const Pattern &aPattern, const DrawOptions &aOptions = DrawOptions()); /* * Fill a series of clyphs on the draw target with a certain source pattern. */ virtual void FillGlyphs(ScaledFont *aFont, const GlyphBuffer &aBuffer, const Pattern &aPattern, const DrawOptions &aOptions = DrawOptions(), const GlyphRenderingOptions *aRenderingOptions = NULL); /* * This takes a source pattern and a mask, and composites the source pattern * onto the destination surface using the alpha channel of the mask pattern * as a mask for the operation. * * aSource Source pattern * aMask Mask pattern * aOptions Drawing options */ virtual void Mask(const Pattern &aSource, const Pattern &aMask, const DrawOptions &aOptions = DrawOptions()); /* * Push a clip to the DrawTarget. * * aPath The path to clip to */ virtual void PushClip(const Path *aPath); /* * Push an axis-aligned rectangular clip to the DrawTarget. This rectangle * is specified in user space. * * aRect The rect to clip to */ virtual void PushClipRect(const Rect &aRect); /* Pop a clip from the DrawTarget. A pop without a corresponding push will * be ignored. */ virtual void PopClip(); /* * Create a SourceSurface optimized for use with this DrawTarget from * existing bitmap data in memory. * * The SourceSurface does not take ownership of aData, and may be freed at any time. */ virtual TemporaryRef<SourceSurface> CreateSourceSurfaceFromData(unsigned char *aData, const IntSize &aSize, int32_t aStride, SurfaceFormat aFormat) const; /* * Create a SourceSurface optimized for use with this DrawTarget from * an arbitrary other SourceSurface. This may return aSourceSurface or some * other existing surface. */ virtual TemporaryRef<SourceSurface> OptimizeSourceSurface(SourceSurface *aSurface) const; /* * Create a SourceSurface for a type of NativeSurface. This may fail if the * draw target does not know how to deal with the type of NativeSurface passed * in. */ virtual TemporaryRef<SourceSurface> CreateSourceSurfaceFromNativeSurface(const NativeSurface &aSurface) const; /* * Create a DrawTarget whose snapshot is optimized for use with this DrawTarget. */ virtual TemporaryRef<DrawTarget> CreateSimilarDrawTarget(const IntSize &aSize, SurfaceFormat aFormat) const; /* * Create a path builder with the specified fillmode. * * We need the fill mode up front because of Direct2D. * ID2D1SimplifiedGeometrySink requires the fill mode * to be set before calling BeginFigure(). */ virtual TemporaryRef<PathBuilder> CreatePathBuilder(FillRule aFillRule = FILL_WINDING) const; /* * Create a GradientStops object that holds information about a set of * gradient stops, this object is required for linear or radial gradient * patterns to represent the color stops in the gradient. * * aStops An array of gradient stops * aNumStops Number of stops in the array aStops * aExtendNone This describes how to extend the stop color outside of the * gradient area. */ virtual TemporaryRef<GradientStops> CreateGradientStops(GradientStop *aStops, uint32_t aNumStops, ExtendMode aExtendMode = EXTEND_CLAMP) const; /* * Set a transform on the surface, this transform is applied at drawing time * to both the mask and source of the operation. */ virtual void SetTransform(const Matrix &aTransform); /* Tries to get a native surface for a DrawTarget, this may fail if the * draw target cannot convert to this surface type. */ virtual void *GetNativeSurface(NativeSurfaceType aType) { return mFinalDT->GetNativeSurface(aType); } private: Path *GetPathForPathRecording(const Path *aPath) const; void EnsureStored(const Path *aPath); RefPtr<DrawEventRecorderPrivate> mRecorder; RefPtr<DrawTarget> mFinalDT; }; } } #endif /* MOZILLA_GFX_DRAWTARGETRECORDING_H_ */
DingosGotMyBaby/pagermon
server/knex/migrations/20200510210059_create_users_table.js
var bcrypt = require('bcryptjs'); var nconf = require('nconf'); var confFile = './config/config.json'; nconf.file({ file: confFile }); var user = nconf.get('auth:user') var pwd = nconf.get('auth:encPass') exports.up = function(db, Promise) { return db.schema.hasTable('users').then(function(exists) { if (!exists) { return db.schema.createTable('users', table => { table.charset('utf8'); table.collate('utf8_general_ci'); table.increments('id').primary().unique().notNullable(); table.string('givenname', [255]).notNullable(); table.string('surname',[255]) table.string('username',[32]).notNullable().unique(); table.string('password').notNullable() table.string('email').notNullable().unique(); table.enu('role', ['admin', 'user']).notNullable().defaultTo('user') table.enu('status', ['active', 'disabled']).notNullable().defaultTo('disabled') table.datetime('lastlogondate') }) .then(function (){ //Migrate the current admin user. return db('users') .insert({ givenname: 'Admin', surname: '', username: user, password: <PASSWORD>, email: '<EMAIL>', role: 'admin', status: 'active', lastlogondate: null }) .then (function () { }); }); } else { return Promise.resolve('Not Required') } }) }; exports.down = function(db, Promise) { return db.schema.dropTable('users'); };
ipaddr/Capstone-Project
Rereso/app/src/main/java/id/ipaddr/android/rereso/presentation/view/widget/ReresoWidgetDataProvider.java
package id.ipaddr.android.rereso.presentation.view.widget; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Color; import android.util.Log; import android.widget.RemoteViews; import android.widget.RemoteViewsService; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.google.firebase.database.ValueEventListener; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import id.ipaddr.android.rereso.R; import id.ipaddr.android.rereso.data.entity.CertificateOfBirthDataEntity; import id.ipaddr.android.rereso.domain.model.CertificateOfBirthData; import id.ipaddr.android.rereso.domain.model.ECertificateOfBirthState; import id.ipaddr.android.rereso.util.ImageUtil; /** * Created by iip on 5/11/17. */ public class ReresoWidgetDataProvider implements RemoteViewsService.RemoteViewsFactory { private static final String TAG = ReresoWidgetDataProvider.class.getSimpleName(); private Context mContext; private List<WidgetItem> mCollection = new ArrayList(); public ReresoWidgetDataProvider(Context context, Intent intent){ this.mContext = context; } private void initData(){ mCollection.clear(); DatabaseReference mDatabaseReference = FirebaseDatabase.getInstance().getReference(CertificateOfBirthData.class.getSimpleName()); mDatabaseReference.addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { mCollection.clear(); Log.d(TAG, "onDataChange"); Iterator<DataSnapshot> iterator = dataSnapshot.getChildren().iterator(); while (iterator.hasNext()){ DataSnapshot ds = iterator.next(); String id = ds.getKey(); CertificateOfBirthDataEntity entity = ds.getValue(CertificateOfBirthDataEntity.class); entity.setId(id); String base64Image = entity.getImgOfCertificateOfBirthForm().getDocumentImageBase64(); Bitmap bitmap = ImageUtil.fromStringBase64toBitmap(base64Image); String fatherName = entity.getCertificateOfBirthForm().getFather().getFullName(); String babyName = entity.getCertificateOfBirthForm().getBaby().getName(); String status = entity.geteCertificateOfBirthState() == null ? ECertificateOfBirthState.Unknow.toString():entity.geteCertificateOfBirthState().toString(); WidgetItem widgetItem = new WidgetItem(bitmap, fatherName, babyName, status); mCollection.add(widgetItem); } } @Override public void onCancelled(DatabaseError databaseError) { Log.d(TAG, "onCancelled"); } }); } @Override public void onCreate() { initData(); } @Override public void onDataSetChanged() { initData(); } @Override public void onDestroy() { Log.d(TAG, "onDestroy"); } @Override public int getCount() { return mCollection.size(); } @Override public RemoteViews getViewAt(int position) { WidgetItem wi = mCollection.get(position); RemoteViews views = new RemoteViews(mContext.getPackageName(), R.layout.row_widget); views.setImageViewBitmap(R.id.avatar, wi.getBitmap()); views.setTextViewText(R.id.father_name, wi.getFatherName()); views.setTextViewText(R.id.baby_name, wi.getBabyName()); views.setTextViewText(R.id.status, wi.getApplicationStatus()); views.setTextColor(R.id.father_name, Color.WHITE); views.setTextColor(R.id.baby_name, Color.WHITE); views.setTextColor(R.id.status, Color.WHITE); return views; } @Override public RemoteViews getLoadingView() { return null; } @Override public int getViewTypeCount() { return 1; } @Override public long getItemId(int position) { return position; } @Override public boolean hasStableIds() { return true; } }
ramonlopz1/UDEMY---Fullstack-Web-Developer
exercicios-js/4 - Funcao/callback1.js
<reponame>ramonlopz1/UDEMY---Fullstack-Web-Developer const fabricantes = ["Mercedes", "Audi", "BMW"] //callback será a função imprimir, que será chamada em cada elemento do array. function imprimir(nome, indice) { console.log(`${indice + 1}. ${nome}`) } //forEach é uma função de ARRAYS //Mesmo sem passar os parâmetros da função $imprimir, o próprio EC6 atribui o valor do índice à variavel $nome fabricantes.forEach(imprimir) //Vai executar a função $imprimir em cada valor do array $fabricantes. fabricantes.forEach(function(fabricantes) { //Como a função tem apenas 1 parâmetro, esse parâmetro vai ser o valor do índice de cada array. console.log(fabricantes) }) //MESMA COISA, SÓ QUE EM ARROWFUNCTION fabricantes.forEach(fabricantes => console.log(fabricantes))
aanacleto/erp-
core/objs/gap_timeAtendimento.py
<reponame>aanacleto/erp- # !/usr/bin/env python3 # -*- encoding: utf-8 -*- """ ERP+ """ __author__ = 'CV<NAME>' __credits__ = [] __version__ = "1.0" __maintainer__ = "CV<NAME>" __status__ = "Development" __model_name__ = 'gap_timeAtendimento.GAPTimeAtendimento' import auth, base_models from orm import * from form import * try: from my_gap_servico import GAPServico except: from gap_servico import GAPServico class GAPTimeAtendimento(Model, View): def __init__(self, **kargs): Model.__init__(self, **kargs) self.__name__ = 'gap_timeAtendimento' self.__title__ ='Gap Time Atendimento' self.__model_name__ = __model_name__ self.__list_edit_mode__ = 'edit' self.__order_by__ = 'gap_timeAtendimento.senha' self.__auth__ = { 'read':['All'], 'write':['Atendedor'], 'create':['Gestor de Loja'], 'delete':['Gestor de Atendimento'], 'full_access':['Gestor de Atendimento'] } self.nome_atendedor = string_field(view_order = 1, name = 'Nome Atendedor', args='readonly', size = 80) self.senha = string_field(view_order = 2, name = 'senha', args='readonly', size = 50) self.servico = string_field(view_order = 3, name = 'servico', args='readonly',size = 50) self.hora_entrada= time_field(view_order=4, name ='Hora Pedido', args='readonly', size=40, onlist=True) self.data = date_field(view_order = 5, name = 'Data', size=40, args='readonly') self.tempo_atendimento= time_field(view_order=6, name ='Tempo Atendimento', size=40, args='readonly', onlist=True) self.estado = combo_field(view_order = 7, name = 'Estado', size = 50, args='readonly', default = 'Espera', options = [('espera','Espera'), ('espera_atendedor','Espera Atendedor'), ('atendido','Atendido'), ('desistiu','Desistiu'), ('transferido','Transferido'), ('para_atendimento','Para Atendimento')]) self.observacao = text_field(view_order=8, name='Observação', size=100, args="rows=30", onlist=False, search=False) self.loja = string_field(view_order = 9, name = 'Loja', size = 50, args='readonly') self.senha_id = string_field(view_order = 10, name = 'sen<NAME>', onlist=False, hidden=True, args='readonly', size = 50) def get_opts(self, get_str): """ Este get_opts em todos os modelos serve para alimentar os choice e combo deste modelo e não chama as funções get_options deste modelo quando chamadas a partir de um outro! """ return eval(get_str) def getTimeAtendimento(self, senha=None,servico=None, loja=None): try: self.where = "nome_atendedor = '{name}' and senha='{<PASSWORD>ha}' and servico='{servico}' and data='{data}' and loja='{loja}'".format(name=bottle.request.session['user_name'],senha=senha,servico=servico,data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] return self.kargs['tempo_atendimento'] return None except: return None def setTimeAtendimento(self,senha_id=None,tempo_atendimento=None, loja=None): try: self.where = "senha_id ='{senha_id}' and data='{data}' and loja='{loja}'".format(senha_id=senha_id,data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] self.kargs['user'] = bottle.request.session['user'] self.kargs['tempo_atendimento'] = tempo_atendimento self.put() return True except: return False def setEstadoAtendimento(self,senha_id=None, estado=None, loja=None): try: self.where = "senha_id = '{senha_id}' and data='{data}' and loja='{loja}'".format(senha_id=senha_id,data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] self.kargs['user'] = bottle.request.session['user'] self.kargs['estado'] = estado self.put() return True except: return False def setServicoAtendimento(self,senha_id=None, newservico=None, loja=None): try: self.where = "senha_id='{senha_id}' and data='{data}' and loja='{loja}'".format(senha_id=senha_id,data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] self.kargs['user'] = bottle.request.session['user'] self.kargs['servico'] = newservico self.put() return True except: return False def setObservacao(self,senha_id=None,comentario=None, loja=None): try: self.where = "senha_id='{senha_id}' and data='{data}' and loja='{loja}'".format(senha_id=senha_id,data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] self.kargs['user'] = bottle.request.session['user'] self.kargs['observacao'] = comentario self.put() return True except: return False #Faz o get das senhas em espera pelo atendedor xtpo na loja y (a filtragem e feita no proprio formulario devido a alguns factos...) def getClienteEspera(self, loja=None): options = [] self.where = "nome_atendedor = '{name}' and estado='espera_atendedor' and loja='{loja}'".format(name=bottle.request.session['user_name'], loja=loja) opts = self.get() for option in opts: options.append(str(option['senha_id'])+";"+option['servico']+";"+str(option['senha'])+';'+str(option['observacao'])+';'+str(option['tempo_atendimento'])) return options #Faz o get do servico actualmente em atendimento pelo atendedor def getServicoAtendimento(self, loja=None): try: self.where = "nome_atendedor = '{name}' and estado='para_atendimento' and data='{data}' and loja='{loja}'".format(name=bottle.request.session['user_name'],data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: self.kargs = self.kargs[0] return self.kargs['servico'] return None except: return None #em caso de ter ocorrido algum erro e por algum engano o atendedor nao atender o cliente automaticamente e mudado para desistir def checkTimeAtendimento(self, loja=None): try: self.where = "nome_atendedor = '{name}' and estado='para_atendimento' and data='{data}' and loja='{loja}'".format(name=bottle.request.session['user_name'],data=str(datetime.date.today()), loja=loja) args = self.get() for self.kargs in args: if (self.kargs['estado'] == 'para_atendimento'): from gap_senha import GAPSenha GAPSenha().Desistiu(id=self.kargs['senha_id']) self.kargs['user'] = bottle.request.session['user'] self.kargs['estado'] = 'desistiu' self.put() return True except: return False #em caso de ter ocorrido algum erro e por algum engano o atendedor nao atender o cliente em dia ou dias anteriores automaticamente e mudado para desistir def checkTimeAtendimentoLater(self, senha_id=None, loja=None): try: self.where = "senha_id = '{senha_id}' and loja='{loja}'".format(senha_id=senha_id, loja=loja) self.kargs = self.get() data_hoje = datetime.date.today() #para_atendimento ou espera atendedor de dias anteriores sao alterados para desistiriu if self.kargs: self.kargs = self.kargs[0] data_senha = str(self.kargs['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2])) < data_hoje): if (self.kargs['estado'] == 'para_atendimento') or (self.kargs['estado'] == 'espera_atendedor'): self.kargs['user'] = bottle.request.session['user'] self.kargs['estado'] = 'desistiu' self.put() return True except: return False #aqui eu fasso o get do numero de pessoas que foram atendidas pelo atendedor xpto def get_clienteAtendido(self, nome=None, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") count = 0 if servico == None: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}'".format(name=nome, loja=loja) else: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}' and servico='{servico}' ".format(name=nome, loja=loja, servico=servico) opts = self.get() for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): count+=1 return count except: return 0 #aqui eu fasso o get do numero de pessoas que desistiram pelo atendedor xpto def get_clienteDesistiram(self, nome=None, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") count = 0 if servico == None: self.where = "nome_atendedor = '{name}' and estado='desistiu' and loja='{loja}'".format(name=nome, loja=loja) else: self.where = "nome_atendedor = '{name}' and estado='desistiu' and loja='{loja}' and servico='{servico}' ".format(name=nome, loja=loja, servico=servico) opts = self.get() for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): count+=1 return count except: return 0 #aqui eu fasso o get do numero de pessoas que foram atendidas na loja xpto def get_clienteAtendidoByLoja(self, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") count = 0 if servico == None: self.where = "estado='atendido' and loja='{loja}'".format(loja=loja) else: self.where = "estado='atendido' and loja='{loja}' and servico='{servico}' ".format(loja=loja, servico=servico) opts = self.get() for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): count+=1 return count except: return 0 #aqui eu fasso o get do numero de pessoas que desistiram na loja xpto def get_clienteDesistiramByLoja(self, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") count = 0 if servico == None: self.where = "estado='desistiu' and loja='{loja}'".format(loja=loja) else: self.where = "estado='desistiu' and loja='{loja}' and servico='{servico}' ".format(loja=loja, servico=servico) opts = self.get() for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): count+=1 return count except: return 0 #faz o get do tempo medio de atendimento def get_mediaTempoAtendido(self, nome=None, dataInicio=None, dataFim=None, loja=None, servico=None): try: hora = 0 minuto = 0 segundos = 0 dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") count = 0 if servico == None: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}'".format(name=nome, loja=loja) else: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}' and servico='{servico}' ".format(name=nome, loja=loja, servico=servico) opts = self.get() for option in opts: data_senha = str(option['data']).split("-") time = str(option['tempo_atendimento']).split(":") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): hora+=int(time[0]) minuto+=int(time[1]) segundos+=int(time[2]) count+=1 """ if (segundos>60): while(segundos>60): minuto+=1 segundos-=60 if(minuto>60): while(minuto>60): hora+=1 minuto-=60 """ hora = int(hora/count) minuto = int(minuto/count) segundos = int(segundos/count) if (hora< 10): hora = '0'+str(hora) if(minuto < 10): minuto = '0'+str(minuto) if(segundos < 10): segundos = '0'+str(segundos) return str(hora)+":"+str(minuto)+":"+str(segundos) except: return "00:00:00" #faz o get do tempo maximo de atendimento def get_tempoMaximoAtendido(self, nome=None, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") if servico == None: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}'".format(name=nome, loja=loja) else: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}' and servico='{servico}' ".format(name=nome, loja=loja, servico=servico) opts = self.get(order_by='tempo_atendimento DESC') for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): return str(option['tempo_atendimento']) return '00:00:00' except: return '00:00:00' #faz o get do tempo minimo de atendimento def get_tempoMinimoAtendido(self, nome=None, dataInicio=None, dataFim=None, loja=None, servico=None): try: dataInicio = str(dataInicio).split("-") dataFim = str(dataFim).split("-") if servico == None: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}'".format(name=nome, loja=loja) else: self.where = "nome_atendedor = '{name}' and estado='atendido' and loja='{loja}' and servico='{servico}' ".format(name=nome, loja=loja, servico=servico) opts = self.get(order_by='tempo_atendimento ASC') for option in opts: data_senha = str(option['data']).split("-") if (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))>=datetime.date(int(dataInicio[0]),int(dataInicio[1]), int(dataInicio[2]))) and (datetime.date(int(data_senha[0]),int(data_senha[1]), int(data_senha[2]))<=datetime.date(int(dataFim[0]),int(dataFim[1]), int(dataFim[2]))): return str(option['tempo_atendimento']) return '00:00:00' except: return '00:00:00' #verifica se o atendedor tem algum cliente em atendimento actualmente def checkAtendedor(self, loja=None): try: self.where = "nome_atendedor = '{name}' and estado='para_atendimento' and data='{data}' and loja='{loja}'".format(name=bottle.request.session['user_name'],data=str(datetime.date.today()), loja=loja) self.kargs = self.get() if self.kargs: return False return True except: return False #Faz o get do ultimo cliente atendido pelo atendedor xpto (essa funçao vai ser utilizada no adicionar opiniao partimos do principio que o cliente que vai dar a sua opiniao e o ultimo atendido pelo atendedor) def getLastClient(self, nome_atendedor=None, loja=None): self.where = "nome_atendedor='{nome_atendedor}' and data='{data}' and loja='{loja}' and estado='atendido' ".format(nome_atendedor=nome_atendedor,data=str(datetime.date.today()), loja=loja) self.kargs = self.get(order_by='hora_entrada DESC') if self.kargs: self.kargs = self.kargs[0] return self.kargs['senha']+";"+self.kargs['servico'] return ";;"
nadeemnazeer3/dremio-oss
sabot/kernel/src/test/java/com/dremio/exec/store/TestRootSchemaAccess.java
<filename>sabot/kernel/src/test/java/com/dremio/exec/store/TestRootSchemaAccess.java /* * Copyright (C) 2017-2018 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.exec.store; public class TestRootSchemaAccess { // // private final NamespaceService ns = mock(NamespaceService.class); // final SabotContext sabotContext = mock(SabotContext.class); // // @Test // public void asNonSystemUser() throws Exception { // when(ns.getSources()).thenReturn(Lists.newArrayList(new SourceConfig().setName("__internal"), // new SourceConfig().setName("notinternal"))); // when(ns.getSpaces()).thenReturn(new ArrayList<SpaceConfig>()); // when(ns.getHomeSpaces()).thenReturn(new ArrayList<HomeConfig>()); // RootSchema schema = new RootSchema(ns, sabotContext, SchemaConfig.newBuilder("testuser").build(), // new SchemaTreeProvider.MetadataStatsCollector()); // assertEquals(1, schema.getSubSchemaNames().size()); // assertTrue(schema.getSubSchemaNames().contains("notinternal")); // } // // @Test // public void asSystemUser() throws Exception { // when(ns.getSources()).thenReturn(Lists.newArrayList(new SourceConfig().setName("__internal"), // new SourceConfig().setName("notinternal"))); // when(ns.getSpaces()).thenReturn(new ArrayList<SpaceConfig>()); // when(ns.getHomeSpaces()).thenReturn(new ArrayList<HomeConfig>()); // RootSchema schema = new RootSchema(ns, sabotContext, SchemaConfig.newBuilder(SystemUser.SYSTEM_USERNAME).build(), // new SchemaTreeProvider.MetadataStatsCollector()); // assertEquals(2, schema.getSubSchemaNames().size()); // assertTrue(schema.getSubSchemaNames().contains("notinternal")); // assertTrue(schema.getSubSchemaNames().contains("__internal")); // } // // @Test // public void asNonSystemUserButExpose() throws Exception { // when(ns.getSources()).thenReturn(Lists.newArrayList(new SourceConfig().setName("__internal"), // new SourceConfig().setName("notinternal"))); // when(ns.getSpaces()).thenReturn(new ArrayList<SpaceConfig>()); // when(ns.getHomeSpaces()).thenReturn(new ArrayList<HomeConfig>()); // RootSchema schema = new RootSchema(ns, sabotContext, // SchemaConfig.newBuilder("testuser").exposeInternalSources(true).build(), // new SchemaTreeProvider.MetadataStatsCollector()); // assertEquals(2, schema.getSubSchemaNames().size()); // assertTrue(schema.getSubSchemaNames().contains("notinternal")); // assertTrue(schema.getSubSchemaNames().contains("__internal")); // } // // @Test // public void homeIsSpecial() throws Exception { // when(ns.getSources()).thenReturn(Lists.newArrayList(new SourceConfig().setName("__internal"), // new SourceConfig().setName("notinternal"), new SourceConfig().setName("__home"))); // when(ns.getSpaces()).thenReturn(new ArrayList<SpaceConfig>()); // when(ns.getHomeSpaces()).thenReturn(new ArrayList<HomeConfig>()); // RootSchema schema = new RootSchema(ns, sabotContext, // SchemaConfig.newBuilder("testuser").build(), // new SchemaTreeProvider.MetadataStatsCollector()); // assertEquals(2, schema.getSubSchemaNames().size()); // assertTrue(schema.getSubSchemaNames().contains("notinternal")); // assertTrue(schema.getSubSchemaNames().contains("__home")); // } }
Uniandes-isis2603/s3_watchdogs
s3_watchdogs-web/src/main/java/co/edu/uniandes/csw/watchdogs/resources/TransporteResource.java
<reponame>Uniandes-isis2603/s3_watchdogs<gh_stars>0 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package co.edu.uniandes.csw.watchdogs.resources; import co.edu.uniandes.csw.watchdogs.dtos.TransporteDetailDTO; import co.edu.uniandes.csw.watchdogs.ejb.TransporteLogic; import co.edu.uniandes.csw.watchdogs.entities.TransporteEntity; import co.edu.uniandes.csw.watchdogs.exceptions.BusinessLogicException; import java.util.ArrayList; import java.util.List; import javax.enterprise.context.RequestScoped; import javax.inject.Inject; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; /** * * @author c.martinezc1 */ @Path("transportes") @Produces("application/json") @Consumes("application/json") @RequestScoped public class TransporteResource { @Inject TransporteLogic transporteLogic; /** * * @return lista entities * @throws BusinessLogicException */ @GET public List<TransporteDetailDTO> getTransportes() throws BusinessLogicException { return listTransporteEntity2DetailDTO(transporteLogic.getTransportes()); } /** * * @param id * @return * @throws BusinessLogicException */ @GET @Path("{id: \\d+}") public TransporteDetailDTO getTransporte(@PathParam("id") Long id) throws BusinessLogicException { TransporteEntity entity = transporteLogic.getTransporte(id); if (entity == null) { throw new WebApplicationException("El recurso /transportes/" + id + " no existe.", 404); } return new TransporteDetailDTO(entity); } /** * * @param transporte * @return * @throws BusinessLogicException */ @POST public TransporteDetailDTO createTransporte(TransporteDetailDTO transporte) throws BusinessLogicException { return new TransporteDetailDTO(transporteLogic.createTransporte(transporte.toEntity())); } /** * @param id * @param transporte * @return * @throws BusinessLogicException */ @PUT @Path("{id: \\d+}") public TransporteDetailDTO updateTransporte(@PathParam("id") Long id, TransporteDetailDTO transporte) throws BusinessLogicException { transporte.setId(id); TransporteEntity entity = transporteLogic.getTransporte(id); if (entity == null) { throw new WebApplicationException("El recurso /transportes/" + id + " no existe.", 404); } return new TransporteDetailDTO(transporteLogic.updateTransporte(id, transporte.toEntity())); } @DELETE @Path("{id: \\d+}") public void deleteTransporte(@PathParam("id") Long id) throws BusinessLogicException { TransporteEntity entity = transporteLogic.getTransporte(id); if (entity == null) { throw new WebApplicationException("El recurso /transportes/" + id + " no existe.", 404); } transporteLogic.deleteTransporte(id); } private List<TransporteDetailDTO> listTransporteEntity2DetailDTO(List<TransporteEntity> entityList) { List<TransporteDetailDTO> list = new ArrayList<>(); for (TransporteEntity entity : entityList) { list.add(new TransporteDetailDTO(entity)); } return list; } }
bjorndm/prebake
code/third_party/junit/org/junit/tests/experimental/theories/ParameterSignatureTest.java
<reponame>bjorndm/prebake package org.junit.tests.experimental.theories; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assume.assumeTrue; import static org.junit.matchers.JUnitMatchers.hasItem; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.junit.experimental.theories.DataPoint; import org.junit.experimental.theories.ParameterSignature; import org.junit.experimental.theories.Theories; import org.junit.experimental.theories.Theory; import org.junit.experimental.theories.suppliers.TestedOn; import org.junit.runner.RunWith; @RunWith(Theories.class) public class ParameterSignatureTest { @DataPoint public static Method getType() throws SecurityException, NoSuchMethodException { return ParameterSignatureTest.class.getMethod("getType", Method.class, int.class); } @DataPoint public static int ZERO= 0; @DataPoint public static int ONE= 1; @Theory public void getType(Method method, int index) { assumeTrue(index < method.getParameterTypes().length); assertEquals(method.getParameterTypes()[index], ParameterSignature .signatures(method).get(index).getType()); } public void foo(@TestedOn(ints= { 1, 2, 3 }) int x) { } @Test public void getAnnotations() throws SecurityException, NoSuchMethodException { Method method= ParameterSignatureTest.class.getMethod("foo", int.class); List<Annotation> annotations= ParameterSignature.signatures(method) .get(0).getAnnotations(); assertThat(new ArrayList<Object>(annotations), hasItem(is(TestedOn.class))); } }
pablormier/jflap-lib
jflaplib-core/src/main/java/edu/duke/cs/jflap/gui/editor/DefaultToolBox.java
<filename>jflaplib-core/src/main/java/edu/duke/cs/jflap/gui/editor/DefaultToolBox.java /* * JFLAP - Formal Languages and Automata Package * * * <NAME> * Computer Science Department * Duke University * August 27, 2009 * Copyright (c) 2002-2009 * All rights reserved. * JFLAP is open source software. Please see the LICENSE for terms. * */ package edu.duke.cs.jflap.gui.editor; import edu.duke.cs.jflap.gui.viewer.AutomatonDrawer; import edu.duke.cs.jflap.gui.viewer.AutomatonPane; import java.util.List; import edu.duke.cs.jflap.automata.turing.TuringMachine; /** * The <CODE>DefaultToolBox</CODE> has all the tools for general editing of an * automaton. */ public class DefaultToolBox implements ToolBox { /** * Returns a list of tools including a <CODE>ArrowTool</CODE>, <CODE>StateTool</CODE>, * <CODE>TransitionTool</CODE> and <CODE>DeleteTool</CODE>, in that * order. * * @param view * the component that the automaton will be drawn in * @param drawer * the drawer that will draw the automaton in the view * @return a list of <CODE>Tool</CODE> objects. */ public List tools(AutomatonPane view, AutomatonDrawer drawer) { List list = new java.util.ArrayList(); list.add(new ArrowTool(view, drawer)); list.add(new StateTool(view, drawer)); list.add(new TransitionTool(view, drawer)); list.add(new DeleteTool(view, drawer)); list.add(new UndoTool(view, drawer)); list.add(new RedoTool(view, drawer)); if (drawer.getAutomaton() instanceof TuringMachine) { TuringMachine turingMachine = (TuringMachine) drawer.getAutomaton(); if (turingMachine.tapes() == 1) { list.add(new BuildingBlockTool(view, drawer)); list.add(new BlockTransitionTool(view, drawer)); } } return list; } }
Papabyte/odex-frontend
src/components/OrderBook/orderBook.test.js
import React from 'react'; import { mount } from 'enzyme'; import OrderBook from './OrderBook'; describe('Component methods', () => { let wrapper, instance; let currentPair = { pair: 'DAI_WETH', baseTokenSymbol: 'DAI', quoteTokenSymbol: 'WETH', baseAsset: '0xc838efcb6512a2ca12027ebcdf9e1fc5e4ff7ee3', quoteAsset: '0xe8e84ee367bc63ddb38d3d01bccef106c194dc47', }; beforeEach(() => { wrapper = mount(<OrderBook asks={[]} bids={[]} currentPair={currentPair} onCollapse={jest.fn()} />); instance = wrapper.instance(); }); it('changeTab modifies selectedTabId state', () => { instance.changeTab('depth-chart'); expect(wrapper.state('selectedTabId')).toBe('depth-chart'); }); it('toggleCollapse modifies isOpen state', () => { instance.toggleCollapse(); expect(wrapper.state('isOpen')).toBe(false); }); });
hugorebelo/gitlabhq
spec/frontend/contributors/utils_spec.js
import * as utils from '~/contributors/utils'; describe('Contributors Util Functions', () => { describe('xAxisLabelFormatter', () => { it('should return year if the date is in January', () => { expect(utils.xAxisLabelFormatter(new Date('01-12-2019'))).toEqual('2019'); }); it('should return month name otherwise', () => { expect(utils.xAxisLabelFormatter(new Date('12-02-2019'))).toEqual('Dec'); expect(utils.xAxisLabelFormatter(new Date('07-12-2019'))).toEqual('Jul'); }); }); describe('dateFormatter', () => { it('should format provided date to YYYY-MM-DD format', () => { expect(utils.dateFormatter(new Date('December 17, 1995 03:24:00'))).toEqual('1995-12-17'); expect(utils.dateFormatter(new Date(1565308800000))).toEqual('2019-08-09'); }); }); });
RossK1/exchangelib
exchangelib/services/archive_item.py
<filename>exchangelib/services/archive_item.py<gh_stars>1000+ from .common import EWSAccountService, create_folder_ids_element, create_item_ids_element from ..util import create_element, MNS from ..version import EXCHANGE_2013 class ArchiveItem(EWSAccountService): """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/archiveitem-operation""" SERVICE_NAME = 'ArchiveItem' element_container_name = '{%s}Items' % MNS supported_from = EXCHANGE_2013 def call(self, items, to_folder): """Move a list of items to a specific folder in the archive mailbox. :param items: a list of (id, changekey) tuples or Item objects :param to_folder: :return: None """ return self._elems_to_objs(self._chunked_get_elements(self.get_payload, items=items, to_folder=to_folder)) def _elems_to_objs(self, elems): from ..items import Item for elem in elems: if isinstance(elem, Exception): yield elem continue yield Item.id_from_xml(elem) def get_payload(self, items, to_folder): archiveitem = create_element('m:%s' % self.SERVICE_NAME) folder_id = create_folder_ids_element(tag='m:ArchiveSourceFolderId', folders=[to_folder], version=self.account.version) item_ids = create_item_ids_element(items=items, version=self.account.version) archiveitem.append(folder_id) archiveitem.append(item_ids) return archiveitem
irico1997/linguee-api
linguee_api/downloaders/httpx_downloader.py
import httpx from linguee_api.downloaders.interfaces import DownloaderError, IDownloader class HTTPXDownloader(IDownloader): """ Real downloader. Sends request to linguee.com to read the page. """ async def download(self, url: str) -> str: async with httpx.AsyncClient() as client: try: response = await client.get(url) except httpx.ConnectError as e: raise DownloaderError(str(e)) from e if response.status_code != 200: raise DownloaderError( f"The Linguee server returned {response.status_code}" ) return response.text
antoniocarlon/jummyshapefile
src/test/java/test/TestShapefile.java
<gh_stars>1-10 /* * Copyright 2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.util.List; import junit.framework.TestCase; import com.jummyshapefile.dbf.model.DBFField; import com.jummyshapefile.dbf.model.DBFFieldDescriptor; import com.jummyshapefile.shapefile.Shapefile; import com.jummyshapefile.shapefile.model.Entity; import com.jummyshapefile.utils.FileUtils; public class TestShapefile extends TestCase { public void testProjection() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); final String projection = shapefile.getProjection(); assertNotNull(projection); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } public void testNumEntities() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); final int numEntities = shapefile.getNumEntities(); assertEquals(5, numEntities); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } public void testFields() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); final List<DBFFieldDescriptor> fieldDescriptors = shapefile .getDataFieldDescriptors(); final DBFFieldDescriptor descriptor = fieldDescriptors.get(2); final String fieldName = descriptor.getName(); final String fieldType = descriptor.getType(); final int fieldLength = descriptor.getLength(); assertEquals("Field1", fieldName); assertEquals("N", fieldType); assertEquals(4, fieldLength); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } public void testEntityByRecordNumber() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); Entity entity = shapefile.getEntityByRecordNumber(3, true); assertEquals(5, entity.getShapeType()); assertEquals(3, entity.getRecordNum()); assertNotNull(entity.getGeometry()); assertTrue(entity.getData().size() > 0); entity = shapefile.getEntityByRecordNumber(3, false); assertEquals(5, entity.getShapeType()); assertEquals(3, entity.getRecordNum()); assertNotNull(entity.getGeometry()); assertTrue(entity.getData().size() == 0); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } public void testEntityDataByRecordNumber() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); final List<DBFField> entityData = shapefile .getEntityDataByRecordNumber(3); assertTrue(entityData.size() > 0); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } public void testEntitiesInRectangle() throws IOException, URISyntaxException { final URL shpResource = getClass().getClassLoader().getResource( "Polygon.shp"); final File shpFile = new File(shpResource.toURI()); final FileInputStream shpIS = new FileInputStream(shpFile); final URL shxResource = getClass().getClassLoader().getResource( "Polygon.shx"); final File shxFile = new File(shxResource.toURI()); final FileInputStream shxIS = new FileInputStream(shxFile); final URL dbfResource = getClass().getClassLoader().getResource( "Polygon.dbf"); final File dbfFile = new File(dbfResource.toURI()); final FileInputStream dbfIS = new FileInputStream(dbfFile); final URL prjResource = getClass().getClassLoader().getResource( "Polygon.prj"); final File prjFile = new File(prjResource.toURI()); final FileInputStream prjIS = new FileInputStream(prjFile); Shapefile shapefile = null; try { shapefile = new Shapefile(shpIS, shxIS, dbfIS, prjIS); List<Entity> entities = shapefile.getEntitiesInRectangle(440539, 4471192, 448629, 4476120, true); assertTrue(entities.size() == 2); Entity entity = entities.get(0); assertEquals(5, entity.getShapeType()); assertEquals(1, entity.getRecordNum()); assertNotNull(entity.getGeometry()); assertTrue(entity.getData().size() > 0); entities = shapefile.getEntitiesInRectangle(440539, 4471192, 448629, 4476120, false); assertTrue(entities.size() == 2); entity = entities.get(0); assertEquals(5, entity.getShapeType()); assertEquals(1, entity.getRecordNum()); assertNotNull(entity.getGeometry()); assertTrue(entity.getData().size() == 0); entities = shapefile.getEntitiesInRectangle(10, 10, 200, 200, true); assertTrue(entities.size() == 0); } finally { shapefile.close(); FileUtils.closeInputStream(prjIS); FileUtils.closeInputStream(dbfIS); FileUtils.closeInputStream(shxIS); FileUtils.closeInputStream(shpIS); } } }
danielbui78/reality
src/gui/RealityUI/qtDesignerPlugins/ReModifiersEditorPlugin.cpp
/** * Qt Designer plugin. */ #include <QtPlugin> #include "ReModifiersEditorPlugin.h" #include "../MaterialEditors/ReModifiers.h" ReModifiersEditorPlugin::ReModifiersEditorPlugin(QObject* parent) : QObject(parent) { } QString ReModifiersEditorPlugin::name() const { return "ReModifiers"; } QString ReModifiersEditorPlugin::includeFile() const { return "ReModifiers.h"; } QString ReModifiersEditorPlugin::group() const { return "Reality"; } QIcon ReModifiersEditorPlugin::icon() const { return QIcon(); } QString ReModifiersEditorPlugin::toolTip() const { return "The Reality Modifiers Editor"; } QString ReModifiersEditorPlugin::whatsThis() const { return toolTip(); } bool ReModifiersEditorPlugin::isContainer() const { return false; } QWidget* ReModifiersEditorPlugin::createWidget(QWidget* parent) { return new ReModifiers(parent); }
ThomasAdam/got-portable
include/got_object.h
/* * Copyright (c) 2018 <NAME> <<EMAIL>> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ struct got_object_id { u_int8_t sha1[SHA1_DIGEST_LENGTH]; }; struct got_blob_object; struct got_tree_object; struct got_tree_entry; struct got_tag_object; struct got_commit_object; struct got_object_qid { STAILQ_ENTRY(got_object_qid) entry; struct got_object_id id; void *data; /* managed by API user */ }; STAILQ_HEAD(got_object_id_queue, got_object_qid); const struct got_error *got_object_qid_alloc(struct got_object_qid **, struct got_object_id *); void got_object_qid_free(struct got_object_qid *); void got_object_id_queue_free(struct got_object_id_queue *); /* * Deep-copy elements from ID queue src to ID queue dest. Do not copy any * qid->data pointers! This is the caller's responsibility if needed. */ const struct got_error *got_object_id_queue_copy( const struct got_object_id_queue *src, struct got_object_id_queue *dest); /* Object types. */ #define GOT_OBJ_TYPE_ANY 0 /* wildcard value at run-time */ #define GOT_OBJ_TYPE_COMMIT 1 #define GOT_OBJ_TYPE_TREE 2 #define GOT_OBJ_TYPE_BLOB 3 #define GOT_OBJ_TYPE_TAG 4 /* 5 is reserved */ #define GOT_OBJ_TYPE_OFFSET_DELTA 6 #define GOT_OBJ_TYPE_REF_DELTA 7 /* * Labels used in object data. */ #define GOT_OBJ_LABEL_COMMIT "commit" #define GOT_OBJ_LABEL_TREE "tree" #define GOT_OBJ_LABEL_BLOB "blob" #define GOT_OBJ_LABEL_TAG "tag" #define GOT_COMMIT_LABEL_TREE "tree " #define GOT_COMMIT_LABEL_PARENT "parent " #define GOT_COMMIT_LABEL_AUTHOR "author " #define GOT_COMMIT_LABEL_COMMITTER "committer " #define GOT_TAG_LABEL_OBJECT "object " #define GOT_TAG_LABEL_TYPE "type " #define GOT_TAG_LABEL_TAG "tag " #define GOT_TAG_LABEL_TAGGER "tagger " struct got_repository; /* * Obtain a string representation of an object ID. The output depends on * the hash function used by the repository format (currently SHA1). */ const struct got_error *got_object_id_str(char **, struct got_object_id *); /* * Compare two object IDs. Return value behaves like memcmp(3). */ int got_object_id_cmp(const struct got_object_id *, const struct got_object_id *); /* * Created a newly allocated copy of an object ID. * The caller should dispose of it with free(3). */ struct got_object_id *got_object_id_dup(struct got_object_id *); /* * Get a newly allocated ID of the object which resides at the specified * path in the specified tree. * The caller should dispose of it with free(3). */ const struct got_error *got_object_tree_find_path(struct got_object_id **id, mode_t *mode, struct got_repository *repo, struct got_tree_object *tree, const char *path); /* * Get a newly allocated ID of the object which resides at the specified * path in the tree of the specified commit. * The caller should dispose of it with free(3). */ const struct got_error *got_object_id_by_path(struct got_object_id **, struct got_repository *, struct got_commit_object *, const char *); /* * Obtain the type of an object. * Returns one of the GOT_OBJ_TYPE_x values (see above). */ const struct got_error *got_object_get_type(int *, struct got_repository *, struct got_object_id *); /* * Attempt to resolve the textual representation of an object ID * to the ID of an existing object in the repository. * The caller should dispose of the ID with free(3). */ const struct got_error *got_object_resolve_id_str(struct got_object_id **, struct got_repository *, const char *); /* * Attempt to open a commit object in a repository. * The caller must dispose of the commit with got_object_commit_close(). */ const struct got_error *got_object_open_as_commit(struct got_commit_object **, struct got_repository *, struct got_object_id *); /* Dispose of a commit object. */ void got_object_commit_close(struct got_commit_object *); /* Obtain the ID of the tree created in a commit. */ struct got_object_id *got_object_commit_get_tree_id(struct got_commit_object *); /* Obtain the number of parent commits of a commit. */ int got_object_commit_get_nparents(struct got_commit_object *); /* Obtain the list of parent commits of a commit. */ const struct got_object_id_queue *got_object_commit_get_parent_ids( struct got_commit_object *); /* Get the author's name and email address. */ const char *got_object_commit_get_author(struct got_commit_object *); /* Get an author's commit timestamp in UTC. */ time_t got_object_commit_get_author_time(struct got_commit_object *); /* Get an author's timezone offset. */ time_t got_object_commit_get_author_gmtoff(struct got_commit_object *); /* Get the committer's name and email address. */ const char *got_object_commit_get_committer(struct got_commit_object *); /* Get a committer's commit timestamp in UTC. */ time_t got_object_commit_get_committer_time(struct got_commit_object *); /* Get a committer's timezone offset. */ time_t got_object_commit_get_committer_gmtoff(struct got_commit_object *); /* * Get the commit log message. * PGP-signatures contained in the log message will be stripped. * The caller must dispose of it with free(3). */ const struct got_error *got_object_commit_get_logmsg(char **, struct got_commit_object *); /* Get the raw commit log message.*/ const char *got_object_commit_get_logmsg_raw(struct got_commit_object *); /* * Attempt to open a tree object in a repository. * The caller must dispose of the tree with got_object_tree_close(). */ const struct got_error *got_object_open_as_tree(struct got_tree_object **, struct got_repository *, struct got_object_id *); /* Dispose of a tree object. */ void got_object_tree_close(struct got_tree_object *); /* Get the number of entries in this tree object. */ int got_object_tree_get_nentries(struct got_tree_object *); /* Get the first tree entry from a tree, or NULL if there is none. */ struct got_tree_entry *got_object_tree_get_first_entry( struct got_tree_object *); /* Get the last tree entry from a tree, or NULL if there is none. */ struct got_tree_entry *got_object_tree_get_last_entry(struct got_tree_object *); /* Get the entry with the specified index from a tree object. */ struct got_tree_entry *got_object_tree_get_entry( struct got_tree_object *, int); /* Find a particular entry in a tree by name. */ struct got_tree_entry *got_object_tree_find_entry( struct got_tree_object *, const char *); /* Get the file permission mode of a tree entry. */ mode_t got_tree_entry_get_mode(struct got_tree_entry *); /* Get the name of a tree entry. */ const char *got_tree_entry_get_name(struct got_tree_entry *); /* Get the object ID of a tree entry. */ struct got_object_id *got_tree_entry_get_id(struct got_tree_entry *); /* * Get a string containing the target path of a given a symlink tree entry. * The caller should dispose of it with free(3). */ const struct got_error *got_tree_entry_get_symlink_target(char **, struct got_tree_entry *, struct got_repository *); /* Get the index of a tree entry. */ int got_tree_entry_get_index(struct got_tree_entry *); /* Get the next tree entry from a tree, or NULL if there is none. */ struct got_tree_entry *got_tree_entry_get_next(struct got_tree_object *, struct got_tree_entry *); /* Get the previous tree entry from a tree, or NULL if there is none. */ struct got_tree_entry *got_tree_entry_get_prev(struct got_tree_object *, struct got_tree_entry *); /* Return non-zero if the specified tree entry is a Git submodule. */ int got_object_tree_entry_is_submodule(struct got_tree_entry *); /* Return non-zero if the specified tree entry is a symbolic link. */ int got_object_tree_entry_is_symlink(struct got_tree_entry *); /* * Resolve an in-repository symlink at the specified path in the tree * corresponding to the specified commit. If the specified path is not * a symlink then set *link_target to NULL. * Otherwise, resolve symlinks recursively and return the final link * target path. The caller must dispose of it with free(3). */ const struct got_error *got_object_resolve_symlinks(char **, const char *, struct got_commit_object *, struct got_repository *); /* * Compare two trees and indicate whether the entry at the specified path * differs between them. The path must not be the root path "/"; the function * got_object_id_cmp() should be used instead to compare the tree roots. */ const struct got_error *got_object_tree_path_changed(int *, struct got_tree_object *, struct got_tree_object *, const char *, struct got_repository *); /* * Attempt to open a blob object in a repository. * The size_t argument specifies the block size of an associated read buffer. * The caller must dispose of the blob with got_object_blob_close(). */ const struct got_error *got_object_open_as_blob(struct got_blob_object **, struct got_repository *, struct got_object_id *, size_t); /* Dispose of a blob object. */ const struct got_error *got_object_blob_close(struct got_blob_object *); /* * Get the length of header data at the beginning of the blob's read buffer. * Note that header data is only present upon the first invocation of * got_object_blob_read_block() after the blob is opened. */ size_t got_object_blob_get_hdrlen(struct got_blob_object *); /* * Get a pointer to the blob's read buffer. * The read buffer is filled by got_object_blob_read_block(). */ const uint8_t *got_object_blob_get_read_buf(struct got_blob_object *); /* * Read the next chunk of data from a blob, up to the blob's read buffer * block size. The size_t output argument indicates how many bytes have * been read into the blob's read buffer. Zero bytes will be reported if * all data in the blob has been read. */ const struct got_error *got_object_blob_read_block(size_t *, struct got_blob_object *); /* Rewind an open blob's data stream back to the beginning. */ void got_object_blob_rewind(struct got_blob_object *); /* * Read the entire content of a blob and write it to the specified file. * Flush and rewind the file as well. Indicate the amount of bytes * written in the size_t output argument, and the number of lines in the * file in the int argument, and line offsets in the off_t argument * (NULL can be passed for any output argument). */ const struct got_error *got_object_blob_dump_to_file(off_t *, int *, off_t **, FILE *, struct got_blob_object *); /* * Read the entire content of a blob into a newly allocated string buffer * and terminate it with '\0'. This is intended for blobs which contain a * symlink target path. It should not be used to process arbitrary blobs. * Use got_object_blob_dump_to_file() or got_tree_entry_get_symlink_target() * instead if possible. The caller must dispose of the string with free(3). */ const struct got_error *got_object_blob_read_to_str(char **, struct got_blob_object *); /* * Attempt to open a tag object in a repository. * The caller must dispose of the tree with got_tag_object_close(). */ const struct got_error *got_object_open_as_tag(struct got_tag_object **, struct got_repository *, struct got_object_id *); /* Dispose of a tag object. */ void got_object_tag_close(struct got_tag_object *); /* Get the name of a tag. */ const char *got_object_tag_get_name(struct got_tag_object *); /* Get type of the object a tag points to. */ int got_object_tag_get_object_type(struct got_tag_object *); /* * Get ID of the object a tag points to. * This must not be freed by the caller. Use got_object_id_dup() if needed. */ struct got_object_id *got_object_tag_get_object_id(struct got_tag_object *); /* Get the timestamp of the tag. */ time_t got_object_tag_get_tagger_time(struct got_tag_object *); /* Get the tag's timestamp's GMT offset. */ time_t got_object_tag_get_tagger_gmtoff(struct got_tag_object *); /* Get the author of the tag. */ const char *got_object_tag_get_tagger(struct got_tag_object *); /* Get the tag message associated with the tag. */ const char *got_object_tag_get_message(struct got_tag_object *); const struct got_error *got_object_commit_add_parent(struct got_commit_object *, const char *); /* Create a new tag object in the repository. */ const struct got_error *got_object_tag_create(struct got_object_id **, const char *, struct got_object_id *, const char *, time_t, const char *, struct got_repository *);
smart-cow/scow
cow-webapp/src/main/java/org/wiredwidgets/cow/webapp/client/bpm/Template.java
/** * Approved for Public Release: 10-4800. Distribution Unlimited. * Copyright 2014 The MITRE Corporation, * Licensed under the Apache License, * Version 2.0 (the "License"); * * You may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. * * See the License for the specific language governing permissions and limitations under the License. */ package org.wiredwidgets.cow.webapp.client.bpm; import java.util.ArrayList; import java.util.Date; import org.wiredwidgets.cow.webapp.client.BpmServiceMain; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.tree.Tree; /** * Stores a tree of Activity objects for a saved workflow. * * Workflows always start with an Activities, but the Template * uses a special type of Activities called BaseList. This is * not required by the XML schema, but it is enforced in the GUI. * * * @author JSTASIK * */ public class Template { // The root node protected BaseList base; // The name of the Template protected String name; // The key of the Template protected String key; protected Integer maxId; /** * Default constructor */ public Template() { base = new BaseList(); name = ""; key = String.valueOf((new Date()).getTime()); maxId = 0; } public Integer getMaxId() { return maxId; } public void setMaxId(Integer maxId) { this.maxId = maxId; } public void setMaxId(String sMaxID){ if (sMaxID == null || sMaxID.isEmpty()) { this.maxId = 0; } else { try { int num = Integer.parseInt(sMaxID); this.maxId = num; } catch (NumberFormatException e) { this.maxId = 0; } } } public String getName() { return name; } public String getKey() { return key; } public BaseList getBase() { return base; } public void setName(String n) { name = n; } public void setKey(String s) { key = s; } public void setBase(BaseList base) { this.base = base; } public Tree getTree(boolean edit) { return base.getTree(edit); } public void clearTree() { base.clearTree(); } /** * Checks a workflow for errors * @return true if it has errors, otherwise false */ public boolean hasErrors() { ArrayList<String> errors = new ArrayList<String>(); boolean hasError = base.hasErrors(errors); if(name == null || name.equals("")) { hasError = true; errors.add("Workflow has no name"); } if(hasError) { String output = ""; for(String s : errors) { output += "- " + s + "<br />"; } SC.say(output); } return hasError; } /** * Gets the XML description of this workflow */ public String toString() { //The key should be the same as the name for now. //String out = "<process name=\"" + name + "\" key=\"" + name + "\" xmlns=\"" + BpmServiceMain.modelNamespace + "\">"; String out = "<process name=\"" + name + "\" key=\"" + name + "\" maxId=\"" + maxId.toString() + "\" xmlns=\"" + BpmServiceMain.modelNamespace + "\">"; out += "<bypassAssignee>admin</bypassAssignee>"; out += base.toString(); out += "</process>"; return out; } }
mpfullstack/coneecta-front-gatsby
src/features/professionalProfile/professionalProfileSagas.js
import { all, takeLatest, put, call, fork } from 'redux-saga/effects'; import { loadProfessionalProfile, initProfile, loadProfessionalProfileReviews, initProfileReviews, showService } from './professionalProfileSlice'; import { showApiError } from '../global/globalSlice'; import api from '../../api'; import { getSlugFromPath } from '../../helpers/helpers'; import Query from '../../helpers/query'; function* onLoadProfessionalProfile() { // We receive the professional id (id) an optionally service slug (sid) in payload yield takeLatest(loadProfessionalProfile, function* ({ payload }) { const result = yield call(api.getProfessionalProfile, payload.id); if (result.error) { yield put(showApiError(result.error.code)); } else { if (payload.sid) { // sid is the service slug, we should find the corresponding service id for this // service slug in the profile result.services = result.services.filter( service => service.slug === payload.sid ); if (result.services.length) { yield put(showService(result.services[0].id)); } } else if (result.services.length === 1) { yield put(showService(Number(result.services[0].id))); } // Get professional slug from URL let slug = getSlugFromPath(Query.getPath(window.location)); if (!slug) { slug = Query.getParams(window.location).slug; } yield put(initProfile({ ...result, slug })); } }); } function* onLoadProfessionalProfileReviews() { yield takeLatest(loadProfessionalProfileReviews, function* ({ payload }) { const result = yield call(api.getProfessionalProfileReviews, payload); if (result.error) { yield put(showApiError(result.error.code)); } else { yield put(initProfileReviews(result)); } }); } export default function* () { yield all([ fork(onLoadProfessionalProfile), fork(onLoadProfessionalProfileReviews) ]) };
statisticsnorway/data-collector-api
src/main/java/no/ssb/dc/api/node/ResponsePredicate.java
<filename>src/main/java/no/ssb/dc/api/node/ResponsePredicate.java package no.ssb.dc.api.node; public interface ResponsePredicate extends Leaf { String RESPONSE_PREDICATE_RESULT = "RESPONSE_PREDICATE_RESULT"; }
adwaithkj/sortandsearchalgos
tree/bfs.py
<reponame>adwaithkj/sortandsearchalgos<gh_stars>0 class Node: def __init__(self, key): self.key = key self.left = None self.right = None def bfs(self, searchitem): if self.root == searchitem: return path
CJoriginal/cjlumberyard
dev/Gems/Boids/Code/source/BugsFlock.h
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #ifndef CRYINCLUDE_GAMEDLL_BOIDS_BUGSFLOCK_H #define CRYINCLUDE_GAMEDLL_BOIDS_BUGSFLOCK_H #pragma once #include "Flock.h" /*! Single Bug. */ class CBoidBug : public CBoidObject { public: CBoidBug(SBoidContext& bc); void Update(float dt, SBoidContext& bc); void Render(SRendParams& rp, CCamera& cam, SBoidContext& bc); private: void UpdateBugsBehavior(float dt, SBoidContext& bc); void UpdateDragonflyBehavior(float dt, SBoidContext& bc); void UpdateFrogsBehavior(float dt, SBoidContext& bc); // void CalcRandomTarget( const Vec3 &origin,SBoidContext &bc ); friend class CBugsFlock; int m_objectId; // Vec3 m_targetPos; // Flags. unsigned m_onGround : 1; //! True if landed on ground. // unsigned m_landing : 1; //! True if bird wants to land. // unsigned m_takingoff : 1; //! True if bird is just take-off from land. }; /*! Bugs Flock, is a specialized flock type for all kind of small bugs and flies around player. */ class CBugsFlock : public CFlock { public: CBugsFlock(IEntity* pEntity); ~CBugsFlock(); virtual void CreateBoids(SBoidsCreateContext& ctx); protected: friend class CBoidBug; }; #endif // CRYINCLUDE_GAMEDLL_BOIDS_BUGSFLOCK_H
Viniciusalopes/ads20192
ap/ExerciciosN1/src/bll/Exercicio16.java
<gh_stars>1-10 /* * --------------------------------------------------------------------------------------- * Licença : MIT - Copyright 2019 Viniciusalopes (Vovolinux) <<EMAIL>> * Criado em : 20/09/2019 * Projeto : ExerciciosN1 * Finalidade: N1 * --------------------------------------------------------------------------------------- */ package bll; import java.util.Scanner; /** * 16. Construa um algoritmo que, tendo como dados de entrada dois pontos * quaisquer no plano, P(x1,y1) e P(x2,y2), escreva a distância entre eles. A * fórmula que efetua tal cálculo é: */ // ___________________ // d = √(x2-x1)² + (y2-y1)² // public class Exercicio16 { public static void main(String[] args) { vai(); } public static void vai() { // Variáveis Scanner sc = new Scanner(System.in); double x1, x2, y1, y2, d; // Entrada System.out.println(); System.out.print("Coordenada 'x1': "); x1 = sc.nextDouble(); System.out.print("Coordenada 'x2': "); x2 = sc.nextDouble(); System.out.print("Coordenada 'y1': "); y1 = sc.nextDouble(); System.out.print("Coordenada 'y2': "); y2 = sc.nextDouble(); // Processamento d = Math.sqrt((Math.pow(x2 - x1, 2)) + (Math.pow(y2 - y1, 2))); // Saída System.out.println(); System.out.printf("A distância entre os dois pontos é %.2f\n", d); } }
basverweij/aco2016
day16/input.go
<filename>day16/input.go package main var ( inputValue int64 = 96207 inputLen = 17 )
douzhongqiang/GPUFilterEngine
GPUFilterCore/gpufiltercore.h
#ifndef GPUFILTERCORE_H #define GPUFILTERCORE_H #include "GPUFilterCore_global.h" class GPUFILTERCORE_EXPORT GPUFilterCore { public: GPUFilterCore(); }; #endif // GPUFILTERCORE_H
RobertDamerius/GenericTarget
packages/+GT/GenericTargetCode/source/GenericTarget/PeriodicTimer.hpp
#pragma once class PeriodicTimer { public: /** * @brief Create a periodic timer object. */ PeriodicTimer(); /** * @brief Delete the periodic timer object. */ ~PeriodicTimer(); /** * @brief Create the periodic timer. * @param [in] time The time in seconds. * @return True if success, false otherwise. * @details On Windows, time must not be less than 0.001! */ bool Create(double time); /** * @brief Destroy the periodic timer. */ void Destroy(void); /** * @brief Wait for a signal. * @param [in] resetTimeOfStart True if internal time-of-start value should be reset, false otherwise (default value is: false). * @return True if timer signal was received successfully, false otherwise. * @details If timer is not created or was destroyed, false will be returned immediately. */ bool WaitForSignal(bool resetTimeOfStart = false); /** * @brief Get the elapsed time to the start (@ref Create) of the timer. * @return Elapsed time in seconds. */ double GetTimeToStart(void); private: std::chrono::time_point<std::chrono::steady_clock> timeOfStart; #ifdef _WIN32 HANDLE hTimer; #else int fdTimer; #endif };
j3l4ck0ut/UdemyCpp
6_STLContainerUndIteratoren/ForwardList.cc
#include <array> #include <vector> #include <deque> #include <forward_list> #include <iostream> int main() { std::vector<int> my_vector = { 1, 2 }; for (int i = 0; i < my_vector.size(); i++) { std::cout << &my_vector[i] << std::endl; } std::cout << std::endl; std::forward_list<int> my_flist; my_flist.assign({ 1, 2, 3, 4 }); for (auto it = my_flist.begin(); it != my_flist.end(); it++) { std::cout << *it << " " << &*it << std::endl; } std::cout << std::endl; my_flist.pop_front(); my_flist.remove(3); my_flist.push_front(5); auto it = my_flist.begin(); //it++; my_flist.insert_after(it, 42); for (auto it = my_flist.begin(); it != my_flist.end(); it++) { std::cout << *it << " " << &*it << std::endl; } return 0; }
kinnder/processPlanning
application/src/main/java/application/storage/xml/SystemTransformationsXMLFile.java
package application.storage.xml; import planning.method.SystemTransformations; public class SystemTransformationsXMLFile extends XMLFile<SystemTransformations> { public SystemTransformationsXMLFile() { super(new SystemTransformationsXMLSchema()); } }
active-auth/active-auth-iam-core
src/main/java/cn/glogs/activeauth/iamcore/domain/AuthenticationPrincipalSecretKey.java
package cn.glogs.activeauth.iamcore.domain; import cn.glogs.activeauth.iamcore.config.properties.LocatorConfiguration; import io.swagger.v3.oas.annotations.media.Schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.apache.commons.lang3.StringUtils; import javax.persistence.*; import java.util.Base64; import java.util.Date; @Data @Entity public class AuthenticationPrincipalSecretKey implements IamResource { private static final Base64.Encoder base64Encoder = Base64.getEncoder(); private static final String INFO_SECURED = "INFORMATION IS SECURED AND HIDDEN!"; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @ManyToOne private AuthenticationPrincipal principal; @Column(unique = true) private String keyCode; private String description; @Lob private String pubKey; @Transient private String priKey; private boolean enabled; private SecretKeyType keyType; private Date createTime; @Override public String resourceLocator(LocatorConfiguration locatorConfiguration) { return locatorConfiguration.fullLocator(String.valueOf(principal.getId()), "secret-key", String.valueOf(id)); } public enum SecretKeyType { RSA_2048, PRIVATE_KEY } public Vo vo(LocatorConfiguration locatorConfiguration) { Vo vo = new Vo(); vo.id = id; vo.locator = resourceLocator(locatorConfiguration); vo.keyCode = keyCode; if (StringUtils.isNotBlank(priKey)) vo.privateKey = new String(base64Encoder.encode(priKey.getBytes())); vo.description = description; vo.enabled = enabled; vo.createTime = createTime; return vo; } @Data @Schema(name = "AuthenticationPrincipalKeyPair.Vo") public static class Vo { private Long id; @Schema(example = "arn:cloudapp:iam::72:key-pair/45") private String locator; @Schema(defaultValue = "39125471-2164-4ae6-b41c-7a0f2f28f1ae") private String keyCode; @Schema(defaultValue = "base64Encode('-----BEGIN PRIVATE KEY----- \n ****** \n -----END PRIVATE KEY-----')") private String privateKey; @Schema(defaultValue = "My Private Key.") private String description; private boolean enabled; private Date createTime; public Vo securePrivateKey() { this.privateKey = INFO_SECURED; return this; } } @Data @Schema(name = "AuthenticationPrincipalKeyPair.GenKeyPairForm") @AllArgsConstructor @NoArgsConstructor public static class GenKeyPairForm { @Schema(defaultValue = "My Private Key.") private String description; } }
vttranlina/james-project
server/data/data-jmap/src/main/java/org/apache/james/jmap/api/change/State.java
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.jmap.api.change; import java.util.Objects; import java.util.UUID; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; public class State { public interface Factory { Factory DEFAULT = new DefaultFactory(); State generate(); } public static class DefaultFactory implements Factory { @Override public State generate() { return of(UUID.randomUUID()); } } public static final State INITIAL = of(UUID.fromString("2c9f1b12-b35a-43e6-9af2-0106fb53a943")); public static State of(UUID value) { Preconditions.checkNotNull(value, "State 'value' should not be null."); return new State(value); } private final UUID value; private State(UUID value) { this.value = value; } public UUID getValue() { return value; } @Override public final boolean equals(Object o) { if (o instanceof State) { State state = (State) o; return Objects.equals(this.value, state.value); } return false; } @Override public final int hashCode() { return Objects.hash(value); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("value", value) .toString(); } }
raquelhortab/ubiquo
ubiquo_design/install/db/migrate/20090224153407_add_plugin_permissions.rb
<filename>ubiquo_design/install/db/migrate/20090224153407_add_plugin_permissions.rb class AddPluginPermissions < ActiveRecord::Migration def self.up if const_defined?(:Permission) Permission.create :key => "design_management", :name => "Design management" Permission.create :key => "sitemap_management", :name => "Sitemap management" end end def self.down if const_defined?(:Permission) Permission.destroy_all(:key => %w[design_management sitemap_management]) end end end
scgbear/azure-cosmos-graph-bulk-executor
java/src/main/java/com/azure/graph/bulk/sample/Main.java
<filename>java/src/main/java/com/azure/graph/bulk/sample/Main.java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.graph.bulk.sample; import com.azure.graph.bulk.impl.model.GremlinEdge; import com.azure.graph.bulk.impl.model.GremlinVertex; import com.azure.graph.bulk.sample.model.PersonVertex; import com.azure.graph.bulk.sample.model.ProcessingResults; import com.azure.graph.bulk.sample.model.RelationshipEdge; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; import java.util.List; import java.util.stream.Stream; public class Main { private static final ProcessingResults results = new ProcessingResults(); public static void main(String[] args) { try { Options options = getOptions(); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption(ArgNames.DOMAIN_SAMPLE)) { runDomainSample(cmd); } else { runPOJOSample(cmd); } } catch (Exception e) { results.failure(e); } finally { results.end(); System.exit(0); } } private static void runDomainSample(CommandLine cmd) { results.transitionState("Build sample vertices"); List<PersonVertex> vertices = GenerateDomainSamples.getVertices( Integer.parseInt(cmd.getOptionValue(ArgNames.VERTEX_COUNT))); results.transitionState("Build sample edges"); List<RelationshipEdge> edges = GenerateDomainSamples.getEdges( vertices, Integer.parseInt(cmd.getOptionValue(ArgNames.EDGE_MAX))); results.setCounts(vertices.size(), edges.size()); executeWithDomain(vertices.stream(), edges.stream(), cmd.hasOption(ArgNames.CREATE_DOCS)); } private static void runPOJOSample(CommandLine cmd) { results.transitionState("Build sample vertices"); List<GremlinVertex> vertices = GeneratePOJOSamples.getVertices( Integer.parseInt(cmd.getOptionValue(ArgNames.VERTEX_COUNT))); results.transitionState("Build sample edges"); List<GremlinEdge> edges = GeneratePOJOSamples.getEdges( vertices, Integer.parseInt(cmd.getOptionValue(ArgNames.EDGE_MAX))); results.setCounts(vertices.size(), edges.size()); executeWithPOJO(vertices.stream(), edges.stream(), cmd.hasOption(ArgNames.CREATE_DOCS)); } private static Options getOptions() { Options options = new Options(); options.addOption( "v", ArgNames.VERTEX_COUNT, true, "How many vertices to generate for the sample"); options.addOption( "e", ArgNames.EDGE_MAX, true, "Max of edges to attach to each vertex"); options.addOption( "d", ArgNames.DOMAIN_SAMPLE, false, "Indicates if the bulk executor sample should run the sample using Domain structures. If not present, the sample will use the raw GremlinVertex and GremlinEdge POJOs. This will allow you to compare the two different implementation methods."); options.addOption( "c", ArgNames.CREATE_DOCS, false, "Indicates if the bulk executor sample should run the sample using create item operations. If not preset, the sample will use upsert item operations instead."); return options; } private static void executeWithDomain(Stream<PersonVertex> vertices, Stream<RelationshipEdge> edges, boolean createDocs) { results.transitionState("Configure Database"); UploadWithBulkLoader loader = new UploadWithBulkLoader(); results.transitionState("Write Documents"); loader.uploadDocuments(vertices, edges, createDocs); } private static void executeWithPOJO(Stream<GremlinVertex> vertices, Stream<GremlinEdge> edges, boolean createDocs) { results.transitionState("Configure Database"); UploadWithBulkLoader loader = new UploadWithBulkLoader(); results.transitionState("Write Documents"); loader.uploadDocuments(vertices, edges, createDocs); } }
storjarn/node-toolbox
src/rpg/RPG.Body.js
<reponame>storjarn/node-toolbox<filename>src/rpg/RPG.Body.js /** * */ (function(){ var Class = require('../base/Base.Class'); var RPG = require('./RPG') /* public */ var Body = Class.extend('Body', { init : function(){ this.parent(); this.CurrentDamage = function() { var dmg = 0; for (var loc in this.HitLocations) { for (var i = 0; i < this.HitLocations[loc].Wounds.length; ++i) { dmg += this.HitLocations[loc].Wounds[i].Value; } } return dmg; }; this.Damage = function(wound, location) { var armor = null; var origValue = wound.Value; for (var itemName in this.Inventory[location]) { if (!!this.Inventory[location][itemName] && !!this.Inventory[location][itemName].Armor) { armor = this.Inventory[location][itemName].Armor; Wound.Value -= (armor[wound.DamageType] || 0) * (armor.Value || 0); } } if (wound.Value < 1) { wound = null; } else if (origValue / 2 > wound.Value) { wound.DamageType = "Blunt"; wound.Shard = null; } if (!!wound) { this.HitLocations[location].Wounds.push(wound); } }; }, LoadBodyDefinition : function(configuration){ var self = this; this.HitLocations = {}; this.Inventory = {}; var lookupIndex = 0; var lastIndex = 0; for(var key in configuration.Locations) { var location = configuration.Locations[key]; this.HitLocations[key] = { Health : (function(location){ return function() { return location.Health.apply(self); } })(location), Wounds : [] }; this.Inventory[key] = location.InventorySlots; for(; lookupIndex < (lastIndex + location.Percentage); ++lookupIndex) { this.LocationLookup[lookupIndex.toString()] = key; } lastIndex = lookupIndex; } // console.log(self.HitLocations); }, HitLocations : {}, Inventory : {}, LocationLookup : {}, LookupLocation : function(index) { var bodyLocation = this.LocationLookup[index.toString()]; return this.HitLocations[bodyLocation]; }, toData : function() { return { Inventory : this.Inventory } } }) module.exports = Body; })()
sacceus/BabylonCpp
src/BabylonCpp/include/babylon/meshes/_instance_data_storage.h
#ifndef BABYLON_MESHES_INSTANCE_DATA_STORAGE_H #define BABYLON_MESHES_INSTANCE_DATA_STORAGE_H #include <unordered_map> #include <babylon/babylon_api.h> #include <babylon/babylon_common.h> #include <babylon/maths/path3d.h> namespace BABYLON { struct _InstancesBatch; struct _VisibleInstances; class Buffer; using _InstancesBatchPtr = std::shared_ptr<_InstancesBatch>; using _VisibleInstancesPtr = std::shared_ptr<_VisibleInstances>; using BufferPtr = std::shared_ptr<Buffer>; /** * @brief Hidden */ struct BABYLON_SHARED_EXPORT _InstanceDataStorage { _VisibleInstancesPtr visibleInstances = nullptr; Int32Array renderIdForInstances; _InstancesBatchPtr batchCache = nullptr; // let's start with a maximum of 32 instances unsigned int instancesBufferSize = 32 * 16 * 4; BufferPtr instancesBuffer = nullptr; Float32Array instancesData; size_t overridenInstanceCount; bool isFrozen = false; _InstancesBatchPtr previousBatch = nullptr; bool hardwareInstancedRendering = false; std::optional<unsigned int> sideOrientation = std::nullopt; bool manualUpdate = false; }; // end of struct _InstanceDataStorage } // end of namespace BABYLON #endif // end of BABYLON_MESHES_INSTANCE_DATA_STORAGE_H
chdiazguerra/final-reality-chdiazguerra
src/main/java/com/github/chdiazguerra/finalreality/model/character/player/classes/WhiteMage.java
package com.github.chdiazguerra.finalreality.model.character.player.classes; import com.github.chdiazguerra.finalreality.model.character.ICharacter; import com.github.chdiazguerra.finalreality.model.character.player.AbstractPlayerCharacter; import com.github.chdiazguerra.finalreality.model.weapon.IWeapon; import org.jetbrains.annotations.NotNull; import java.util.Objects; import java.util.concurrent.BlockingQueue; /** * A class that holds all the information of a White Mage class character. * * @author <NAME> */ public class WhiteMage extends AbstractPlayerCharacter { /** * Creates a new character of White Mage class, with a name and the queue. * * @param name * the character's name * @param turnsQueue * the queue with the characters waiting for their turn */ public WhiteMage(@NotNull String name, @NotNull BlockingQueue<ICharacter> turnsQueue, int life, int defense) { super(turnsQueue, name, life, defense); } @Override public int hashCode() { return Objects.hash(WhiteMage.class, getName()); } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (!(o instanceof WhiteMage)) { return false; } final WhiteMage that = (WhiteMage) o; return getName().equals(that.getName()); } @Override public void equip(IWeapon weapon) { if(this.getIsAlive()) { weapon.equippedByWhiteMage(this); } } }
lechuongit/alibaba-cloud-sdk-go
services/edas/get_k8s_storage_info.go
package edas //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // GetK8sStorageInfo invokes the edas.GetK8sStorageInfo API synchronously func (client *Client) GetK8sStorageInfo(request *GetK8sStorageInfoRequest) (response *GetK8sStorageInfoResponse, err error) { response = CreateGetK8sStorageInfoResponse() err = client.DoAction(request, response) return } // GetK8sStorageInfoWithChan invokes the edas.GetK8sStorageInfo API asynchronously func (client *Client) GetK8sStorageInfoWithChan(request *GetK8sStorageInfoRequest) (<-chan *GetK8sStorageInfoResponse, <-chan error) { responseChan := make(chan *GetK8sStorageInfoResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.GetK8sStorageInfo(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // GetK8sStorageInfoWithCallback invokes the edas.GetK8sStorageInfo API asynchronously func (client *Client) GetK8sStorageInfoWithCallback(request *GetK8sStorageInfoRequest, callback func(response *GetK8sStorageInfoResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *GetK8sStorageInfoResponse var err error defer close(result) response, err = client.GetK8sStorageInfo(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // GetK8sStorageInfoRequest is the request struct for api GetK8sStorageInfo type GetK8sStorageInfoRequest struct { *requests.RoaRequest ClusterId string `position:"Query" name:"ClusterId"` } // GetK8sStorageInfoResponse is the response struct for api GetK8sStorageInfo type GetK8sStorageInfoResponse struct { *responses.BaseResponse Code int `json:"Code" xml:"Code"` Message string `json:"Message" xml:"Message"` RequestId string `json:"RequestId" xml:"RequestId"` StorageInfo StorageInfo `json:"StorageInfo" xml:"StorageInfo"` } // CreateGetK8sStorageInfoRequest creates a request to invoke GetK8sStorageInfo API func CreateGetK8sStorageInfoRequest() (request *GetK8sStorageInfoRequest) { request = &GetK8sStorageInfoRequest{ RoaRequest: &requests.RoaRequest{}, } request.InitWithApiInfo("Edas", "2017-08-01", "GetK8sStorageInfo", "/pop/v5/k8s/acs/k8s_storage", "edas", "openAPI") request.Method = requests.GET return } // CreateGetK8sStorageInfoResponse creates a response to parse from GetK8sStorageInfo response func CreateGetK8sStorageInfoResponse() (response *GetK8sStorageInfoResponse) { response = &GetK8sStorageInfoResponse{ BaseResponse: &responses.BaseResponse{}, } return }
gisproject2015/Bridge3DModeling
node_modules/npm/lib/test.js
<gh_stars>1-10 module.exports = test var testCmd = require("./utils/lifecycle.js").cmd("test") function test (args, cb) { testCmd(args, function (er) { if (!er) return cb() if (er.code === "ELIFECYCLE") { return cb("Test failed. See above for more details.") } return cb(er) }) }
law27/Programming-Examples
Java/Generics/GenericConstructor.java
class Cons { double val; <T extends Number> Cons(T arg) { val = arg.doubleValue(); } public void print() { System.out.println(val); } } public class GenericConstructor { public static void main (String[] args) { Cons a = new Cons(4); a.print(); Cons b = new Cons(22.0F); b.print(); } }
jimfoltz/jf-sketchup-ffi
lib/sketchup_ffi/loop_input.rb
<filename>lib/sketchup_ffi/loop_input.rb module SketchupFFI attach_function(:SULoopInputCreate, [SULoopInputRef], SUResult) attach_function(:SULoopInputRelease, [SULoopInputRef], SUResult) attach_function(:SULoopInputAddVertexIndex, [SULoopInputRef, :size_t], SUResult) attach_function(:SULoopInputEdgeSetHidden, [SULoopInputRef, :size_t, :bool], SUResult) attach_function(:SULoopInputEdgeSetSoft, [SULoopInputRef, :size_t, :bool], SUResult) attach_function(:SULoopInputEdgeSetSmooth, [SULoopInputRef, :size_t, :bool], SUResult) attach_function(:SULoopInputEdgeSetMaterial, [SULoopInputRef, :size_t, SUMaterialRef], SUResult) attach_function(:SULoopInputEdgeSetLayer, [SULoopInputRef, :size_t, SULayerRef], SUResult) attach_function(:SULoopInputAddCurve, [SULoopInputRef, :size_t, :size_t], SUResult) attach_function(:SULoopInputIsClosed, [SULoopInputRef, :bool_ptr], SUResult) end
cobaltblueocean/Fudge-Java
src/org/fudgemsg/ImmutableFudgeMsg.java
<filename>src/org/fudgemsg/ImmutableFudgeMsg.java /** * Copyright (C) 2009 - present by OpenGamma Inc. and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fudgemsg; /** * An immutable message in the Fudge system. * <p> * The message consists of a list of {@link FudgeMsgField Fudge fields}. * This class holds the entire message in memory. * <p> * Applications are recommended to store and manipulate a {@link ImmutableFudgeFieldContainer} * instance rather than this class for future flexibility. * <p> * This class can be created as a copy of an existing {@link FudgeFieldContainer}. * For efficiency, the reference to a {@link FudgeContext} is kept and the context is not copied. * In that scenario, changes made to the context will be made visible through this class, for * example the behavior of {@link #getFieldValue}. If this is not desired, create a * {@link ImmutableFudgeContext} from your underlying {@code FudgeContext} for use in cloning messages. * Message fields are copied at one level deep only. * Any sub-messages, or referenced objects may be still be mutable. * <p> * This class is intended to be immutable but not all contents will necessarily be immutable. */ public class ImmutableFudgeMsg extends FudgeMsgBase implements ImmutableFudgeFieldContainer { /** * Creates a new instance by copying another message. * <p> * The new instance will share the same Fudge context which may be undesirable as * that context may be mutable. * * @param fudgeMsg the message to copy, not null */ public ImmutableFudgeMsg(final FudgeMsgBase fudgeMsg) { this(fudgeMsg, fudgeMsg.getFudgeContext()); } /** * Creates a new {@link ImmutableFudgeMsg} by copying fields from another {@link FudgeFieldContainer} using * the specified {@link FudgeContext} for type resolution. * * @param fields the message to copy, not null * @param fudgeContext the context to use for the new message, not null */ public ImmutableFudgeMsg(final FudgeFieldContainer fields, final FudgeContext fudgeContext) { super(fields, fudgeContext); } /** * Creates an immutable empty message. * * @param fudgeContext the context, not null */ protected ImmutableFudgeMsg(final FudgeContext fudgeContext) { super(fudgeContext); } //------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (obj == this) { return true; } return obj instanceof ImmutableFudgeMsg && super.equals(obj); } }
mbiang/cloudformstest
Automate/CloudFormsPOC/Integration/RedHat/OpenStack/LoadBalancer.class/__methods__/reclaimloadbalancer.rb
<reponame>mbiang/cloudformstest begin def log(level, msg) @method = 'reclaimLoadBalancer' $evm.log(level, "#{@method}: #{msg}") end def dump_root() log(:info, "Root:<$evm.root> Begin $evm.root.attributes") $evm.root.attributes.sort.each { |k, v| log(:info, "Root:<$evm.root> Attribute - #{k}: #{v}")} log(:info, "Root:<$evm.root> End $evm.root.attributes") log(:info, "") end def remove_members(pool_id, netconn) log(:info, "Removing members from #{pool_id}") members = netconn.list_lb_members(:pool_id => pool_id)[:body]["members"] log(:info, "Members #{members.inspect}") for member in members log(:info, "Deleteing Member: #{member["id"]}") netconn.delete_lb_member(member["id"]) log(:info, "Deleted member #{member["id"]}") end log(:info, "Done Removing members from #{pool_id}") end def remove_monitor(monitor_id, pool_id, netconn) log(:info, "Disassociating Monitor #{monitor_id} from #{pool_id}") netconn.disassociate_lb_health_monitor(pool_id, monitor_id) log(:info, "Disassociated #{monitor_id}") log(:info, "Deleting Monitor #{monitor_id}") begin netconn.delete_lb_health_monitor(monitor_id) log(:info, "Successfully deleted monitor #{monitor_id}") rescue lberr log(:error, "Error delete monitor #{monitor_id} #{lberr.class} [#{lberr}]") log(:error, "#{lberr.backtrace.join("\n")}") log(:error, "Continuing anyway") end end def remove_vip(vip_id, pool_id, netconn) log(:info, "Reclaiming VIP #{vip_id} from pool #{pool_id}") netconn.delete_lb_vip(vip_id) log(:info, "Deleted VIP #{vip_id}") end def remove_pool(pool_id, netconn) log(:info, "Cleaning up pool #{pool_id}") netconn.delete_lb_pool(pool_id) log(:info, "Deleted LB Pool #{pool_id}") end def return_floatingip(floatingip_id, netconn) log(:info, "Returning floating ip #{floatingip_id} to the available pool") netconn.delete_floating_ip(floatingip_id) log(:info, "Returned floating ip #{floatingip_id}") end log(:info, "Begin Automate Method") gem 'fog', '>=1.22.0' require 'fog' dump_root log(:info, "Service: #{$evm.root['service'].inspect}") service = $evm.root['service'] raise "Unable to find service in $evm.root['service']" if service.nil? floatingip_id = service.custom_get("FLOATING_IP") pool_id = service.custom_get("POOL_ID") monitor_id = service.custom_get("MONITOR_ID") vip_id = service.custom_get("VIP_ID") tenant_tag = service.tags.select { |tag_element| tag_element.starts_with?("cloud_tenants/") }.first.split("/", 2).last log(:info, "floatingip_id: #{floatingip_id rescue nil}") log(:info, "pool_id: #{pool_id rescue nil}") log(:info, "monitor_id: #{monitor_id rescue nil}") log(:info, "vip_id: #{vip_id rescue nil}") log(:info, "tenant: #{tenant_tag rescue nil}") # For now, let's just choose the first one. openstack = $evm.vmdb(:ems_openstack).all.first log(:info, "Logging with with tenant: #{tenant_tag}") netconn = Fog::Network.new({ :provider => 'OpenStack', :openstack_api_key => openstack.authentication_password, :openstack_username => openstack.authentication_userid, :openstack_auth_url => "http://#{openstack[:hostname]}:#{openstack[:port]}/v2.0/tokens", :openstack_tenant => tenant_tag }) log(:info, "Logged into OpenStack successfully") remove_members(pool_id, netconn) remove_monitor(monitor_id, pool_id, netconn) remove_vip(vip_id, pool_id, netconn) remove_pool(pool_id, netconn) return_floatingip(floatingip_id, netconn) unless floatingip_id.blank? log(:info, "Removing Service from the VMDB") service.remove_from_vmdb log(:info, "End Automate Method") rescue => err log(:error, "Unexpected Exception: [#{err}]\n#{err.backtrace.join("\n")}") exit MIQ_ABORT end
Shinku-Chen/baidu-marketing
model/const.go
package model const ( // BASE_URL_SMS sms/service api base url BASE_URL_SMS = "https://api.baidu.com/json/sms/service/" // api base url // BASE_URL_FEED feed/v2 api base url BASE_URL_FEED = "https://api.baidu.com/json/feed/v1/" )
Dletta/grappling-gun
node_modules/gitter-faye/javascript/protocol/publication.js
'use strict'; var Faye = require('../faye'); var Faye_Class = require('../util/class'); var Faye_Deferrable = require('../mixins/deferrable'); var Faye_Publication = Faye_Class(Faye_Deferrable); module.exports = Faye_Publication;
cksspk/ruoyi-vue-pro
yudao-admin-server/src/main/java/cn/iocoder/yudao/adminserver/modules/tool/controller/test/ToolTestDemoController.java
package cn.iocoder.yudao.adminserver.modules.tool.controller.test; import cn.hutool.core.thread.ThreadUtil; import cn.iocoder.yudao.framework.common.pojo.CommonResult; import cn.iocoder.yudao.framework.common.pojo.PageResult; import cn.iocoder.yudao.framework.excel.core.util.ExcelUtils; import cn.iocoder.yudao.framework.operatelog.core.annotations.OperateLog; import cn.iocoder.yudao.framework.tracer.core.annotation.BizTrace; import cn.iocoder.yudao.adminserver.modules.tool.controller.test.vo.*; import cn.iocoder.yudao.adminserver.modules.tool.convert.test.ToolTestDemoConvert; import cn.iocoder.yudao.adminserver.modules.tool.dal.dataobject.test.ToolTestDemoDO; import cn.iocoder.yudao.adminserver.modules.tool.service.test.ToolTestDemoService; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiOperation; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import javax.annotation.Resource; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeUnit; import static cn.iocoder.yudao.framework.common.pojo.CommonResult.success; import static cn.iocoder.yudao.framework.operatelog.core.enums.OperateTypeEnum.EXPORT; @Api(tags = "测试示例") @RestController @RequestMapping("/tool/test-demo") @Validated public class ToolTestDemoController { @Resource private ToolTestDemoService testDemoService; @PostMapping("/create") @ApiOperation("创建测试示例") @PreAuthorize("@ss.hasPermission('tool:test-demo:create')") public CommonResult<Long> createTestDemo(@Valid @RequestBody ToolTestDemoCreateReqVO createReqVO) { return success(testDemoService.createTestDemo(createReqVO)); } @PutMapping("/update") @ApiOperation("更新测试示例") @PreAuthorize("@ss.hasPermission('tool:test-demo:update')") public CommonResult<Boolean> updateTestDemo(@Valid @RequestBody ToolTestDemoUpdateReqVO updateReqVO) { testDemoService.updateTestDemo(updateReqVO); return success(true); } @DeleteMapping("/delete") @ApiOperation("删除测试示例") @ApiImplicitParam(name = "id", value = "编号", required = true, dataTypeClass = Long.class) @PreAuthorize("@ss.hasPermission('tool:test-demo:delete')") public CommonResult<Boolean> deleteTestDemo(@RequestParam("id") Long id) { testDemoService.deleteTestDemo(id); return success(true); } @GetMapping("/get") @ApiOperation("获得测试示例") @ApiImplicitParam(name = "id", value = "编号", required = true, dataTypeClass = Long.class) @PreAuthorize("@ss.hasPermission('tool:test-demo:query')") // @Lock4j // 分布式锁 public CommonResult<ToolTestDemoRespVO> getTestDemo(@RequestParam("id") Long id) { if (true) { // 测试分布式锁 ThreadUtil.sleep(5, TimeUnit.SECONDS); } ToolTestDemoDO testDemo = testDemoService.getTestDemo(id); return success(ToolTestDemoConvert.INSTANCE.convert(testDemo)); } @GetMapping("/list") @ApiOperation("获得测试示例列表") @ApiImplicitParam(name = "ids", value = "编号列表", required = true, dataTypeClass = List.class) @PreAuthorize("@ss.hasPermission('tool:test-demo:query')") // @RateLimiter(name = "backendA") @BizTrace(id = "#ids", type = "'user'") public CommonResult<List<ToolTestDemoRespVO>> getTestDemoList(@RequestParam("ids") Collection<Long> ids) { List<ToolTestDemoDO> list = testDemoService.getTestDemoList(ids); return success(ToolTestDemoConvert.INSTANCE.convertList(list)); } @GetMapping("/page") @ApiOperation("获得测试示例分页") @PreAuthorize("@ss.hasPermission('tool:test-demo:query')") public CommonResult<PageResult<ToolTestDemoRespVO>> getTestDemoPage(@Valid ToolTestDemoPageReqVO pageVO) { PageResult<ToolTestDemoDO> pageResult = testDemoService.getTestDemoPage(pageVO); return success(ToolTestDemoConvert.INSTANCE.convertPage(pageResult)); } @GetMapping("/export-excel") @ApiOperation("导出测试示例 Excel") @PreAuthorize("@ss.hasPermission('tool:test-demo:export')") @OperateLog(type = EXPORT) public void exportTestDemoExcel(@Valid ToolTestDemoExportReqVO exportReqVO, HttpServletResponse response) throws IOException { List<ToolTestDemoDO> list = testDemoService.getTestDemoList(exportReqVO); // 导出 Excel List<ToolTestDemoExcelVO> datas = ToolTestDemoConvert.INSTANCE.convertList02(list); ExcelUtils.write(response, "测试示例.xls", "数据", ToolTestDemoExcelVO.class, datas); } }
function2-llx/MONAI
tests/test_focal_loss.py
# Copyright (c) MONAI Consortium # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import torch import torch.nn as nn import torch.nn.functional as F from monai.losses import FocalLoss from monai.networks import one_hot from tests.utils import test_script_save class TestFocalLoss(unittest.TestCase): def test_consistency_with_cross_entropy_2d(self): """For gamma=0 the focal loss reduces to the cross entropy loss""" focal_loss = FocalLoss(to_onehot_y=False, gamma=0.0, reduction="mean", weight=1.0) ce = nn.BCEWithLogitsLoss(reduction="mean") max_error = 0 class_num = 10 batch_size = 128 for _ in range(100): # Create a random tensor of shape (batch_size, class_num, 8, 4) x = torch.rand(batch_size, class_num, 8, 4, requires_grad=True) # Create a random batch of classes l = torch.randint(low=0, high=2, size=(batch_size, class_num, 8, 4)).float() if torch.cuda.is_available(): x = x.cuda() l = l.cuda() output0 = focal_loss(x, l) output1 = ce(x, l) a = float(output0.cpu().detach()) b = float(output1.cpu().detach()) if abs(a - b) > max_error: max_error = abs(a - b) self.assertAlmostEqual(max_error, 0.0, places=3) def test_consistency_with_cross_entropy_2d_no_reduction(self): """For gamma=0 the focal loss reduces to the cross entropy loss""" import numpy as np focal_loss = FocalLoss(to_onehot_y=False, gamma=0.0, reduction="none", weight=1.0) ce = nn.BCEWithLogitsLoss(reduction="none") max_error = 0 class_num = 10 batch_size = 128 for _ in range(100): # Create a random tensor of shape (batch_size, class_num, 8, 4) x = torch.rand(batch_size, class_num, 8, 4, requires_grad=True) # Create a random batch of classes l = torch.randint(low=0, high=2, size=(batch_size, class_num, 8, 4)).float() if torch.cuda.is_available(): x = x.cuda() l = l.cuda() output0 = focal_loss(x, l) output1 = ce(x, l) a = output0.cpu().detach().numpy() b = output1.cpu().detach().numpy() error = np.abs(a - b) max_error = np.maximum(error, max_error) assert np.allclose(max_error, 0) def test_consistency_with_cross_entropy_2d_onehot_label(self): """For gamma=0 the focal loss reduces to the cross entropy loss""" focal_loss = FocalLoss(to_onehot_y=True, gamma=0.0, reduction="mean") ce = nn.BCEWithLogitsLoss(reduction="mean") max_error = 0 class_num = 10 batch_size = 128 for _ in range(100): # Create a random tensor of shape (batch_size, class_num, 8, 4) x = torch.rand(batch_size, class_num, 8, 4, requires_grad=True) # Create a random batch of classes l = torch.randint(low=0, high=class_num, size=(batch_size, 1, 8, 4)) if torch.cuda.is_available(): x = x.cuda() l = l.cuda() output0 = focal_loss(x, l) output1 = ce(x, one_hot(l, num_classes=class_num)) a = float(output0.cpu().detach()) b = float(output1.cpu().detach()) if abs(a - b) > max_error: max_error = abs(a - b) self.assertAlmostEqual(max_error, 0.0, places=3) def test_consistency_with_cross_entropy_classification(self): """for gamma=0 the focal loss reduces to the cross entropy loss""" focal_loss = FocalLoss(to_onehot_y=True, gamma=0.0, reduction="mean") ce = nn.BCEWithLogitsLoss(reduction="mean") max_error = 0 class_num = 10 batch_size = 128 for _ in range(100): # Create a random scores tensor of shape (batch_size, class_num) x = torch.rand(batch_size, class_num, requires_grad=True) # Create a random batch of classes l = torch.randint(low=0, high=class_num, size=(batch_size, 1)) l = l.long() if torch.cuda.is_available(): x = x.cuda() l = l.cuda() output0 = focal_loss(x, l) output1 = ce(x, one_hot(l, num_classes=class_num)) a = float(output0.cpu().detach()) b = float(output1.cpu().detach()) if abs(a - b) > max_error: max_error = abs(a - b) self.assertAlmostEqual(max_error, 0.0, places=3) def test_consistency_with_cross_entropy_classification_01(self): # for gamma=0.1 the focal loss differs from the cross entropy loss focal_loss = FocalLoss(to_onehot_y=True, gamma=0.1, reduction="mean") ce = nn.BCEWithLogitsLoss(reduction="mean") max_error = 0 class_num = 10 batch_size = 128 for _ in range(100): # Create a random scores tensor of shape (batch_size, class_num) x = torch.rand(batch_size, class_num, requires_grad=True) # Create a random batch of classes l = torch.randint(low=0, high=class_num, size=(batch_size, 1)) l = l.long() if torch.cuda.is_available(): x = x.cuda() l = l.cuda() output0 = focal_loss(x, l) output1 = ce(x, one_hot(l, num_classes=class_num)) a = float(output0.cpu().detach()) b = float(output1.cpu().detach()) if abs(a - b) > max_error: max_error = abs(a - b) self.assertNotAlmostEqual(max_error, 0.0, places=3) def test_bin_seg_2d(self): # define 2d examples target = torch.tensor([[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]]) # add another dimension corresponding to the batch (batch size = 1 here) target = target.unsqueeze(0) # shape (1, H, W) pred_very_good = 100 * F.one_hot(target, num_classes=2).permute(0, 3, 1, 2).float() - 50.0 # initialize the mean dice loss loss = FocalLoss(to_onehot_y=True) # focal loss for pred_very_good should be close to 0 target = target.unsqueeze(1) # shape (1, 1, H, W) focal_loss_good = float(loss(pred_very_good, target).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) def test_empty_class_2d(self): num_classes = 2 # define 2d examples target = torch.tensor([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) # add another dimension corresponding to the batch (batch size = 1 here) target = target.unsqueeze(0) # shape (1, H, W) pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2).float() - 500.0 # initialize the mean dice loss loss = FocalLoss(to_onehot_y=True) # focal loss for pred_very_good should be close to 0 target = target.unsqueeze(1) # shape (1, 1, H, W) focal_loss_good = float(loss(pred_very_good, target).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) def test_multi_class_seg_2d(self): num_classes = 6 # labels 0 to 5 # define 2d examples target = torch.tensor([[0, 0, 0, 0], [0, 1, 2, 0], [0, 3, 4, 0], [0, 0, 0, 0]]) # add another dimension corresponding to the batch (batch size = 1 here) target = target.unsqueeze(0) # shape (1, H, W) pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2).float() - 500.0 # initialize the mean dice loss loss = FocalLoss(to_onehot_y=True) loss_onehot = FocalLoss(to_onehot_y=False) # focal loss for pred_very_good should be close to 0 target_one_hot = F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2) # test one hot target = target.unsqueeze(1) # shape (1, 1, H, W) focal_loss_good = float(loss(pred_very_good, target).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) focal_loss_good = float(loss_onehot(pred_very_good, target_one_hot).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) def test_bin_seg_3d(self): num_classes = 2 # labels 0, 1 # define 3d examples target = torch.tensor( [ # raw 0 [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]], # raw 1 [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]], # raw 2 [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]], ] ) # add another dimension corresponding to the batch (batch size = 1 here) target = target.unsqueeze(0) # shape (1, H, W, D) target_one_hot = F.one_hot(target, num_classes=num_classes).permute(0, 4, 1, 2, 3) # test one hot pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 4, 1, 2, 3).float() - 500.0 # initialize the mean dice loss loss = FocalLoss(to_onehot_y=True) loss_onehot = FocalLoss(to_onehot_y=False) # focal loss for pred_very_good should be close to 0 target = target.unsqueeze(1) # shape (1, 1, H, W) focal_loss_good = float(loss(pred_very_good, target).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) focal_loss_good = float(loss_onehot(pred_very_good, target_one_hot).cpu()) self.assertAlmostEqual(focal_loss_good, 0.0, places=3) def test_foreground(self): background = torch.ones(1, 1, 5, 5) foreground = torch.zeros(1, 1, 5, 5) target = torch.cat((background, foreground), dim=1) input = torch.cat((background, foreground), dim=1) target[:, 0, 2, 2] = 0 target[:, 1, 2, 2] = 1 fgbg = FocalLoss(to_onehot_y=False, include_background=True)(input, target) fg = FocalLoss(to_onehot_y=False, include_background=False)(input, target) self.assertAlmostEqual(float(fgbg.cpu()), 0.1116, places=3) self.assertAlmostEqual(float(fg.cpu()), 0.1733, places=3) def test_ill_opts(self): chn_input = torch.ones((1, 2, 3)) chn_target = torch.ones((1, 2, 3)) with self.assertRaisesRegex(ValueError, ""): FocalLoss(reduction="unknown")(chn_input, chn_target) def test_ill_shape(self): chn_input = torch.ones((1, 2, 3)) chn_target = torch.ones((1, 3)) with self.assertRaisesRegex(ValueError, ""): FocalLoss(reduction="mean")(chn_input, chn_target) def test_ill_class_weight(self): chn_input = torch.ones((1, 4, 3, 3)) chn_target = torch.ones((1, 4, 3, 3)) with self.assertRaisesRegex(ValueError, ""): FocalLoss(include_background=True, weight=(1.0, 1.0, 2.0))(chn_input, chn_target) with self.assertRaisesRegex(ValueError, ""): FocalLoss(include_background=False, weight=(1.0, 1.0, 1.0, 1.0))(chn_input, chn_target) with self.assertRaisesRegex(ValueError, ""): FocalLoss(include_background=False, weight=(1.0, 1.0, -1.0))(chn_input, chn_target) def test_script(self): loss = FocalLoss() test_input = torch.ones(2, 2, 8, 8) test_script_save(loss, test_input, test_input) if __name__ == "__main__": unittest.main()
zhangkn/iOS14Header
System/Library/PrivateFrameworks/AVConference.framework/VCTransportSession.h
/* * This header is generated by classdump-dyld 1.0 * on Sunday, September 27, 2020 at 11:51:33 AM Mountain Standard Time * Operating System: Version 14.0 (Build 18A373) * Image Source: /System/Library/PrivateFrameworks/AVConference.framework/AVConference * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. */ @protocol OS_dispatch_queue; #import <AVConference/AVConference-Structs.h> @class NSObject, NSString, TimingCollection, VCConnectionManager, NSMutableArray; @interface VCTransportSession : NSObject { BOOL _isCallActive; BOOL _requiresWiFi; BOOL _useCompressedConnectionData; BOOL _didRegisterForBasebandNotifications; unsigned _basebandNotificationRegistrationToken; NSObject*<OS_dispatch_queue> _stateQueue; NSObject*<OS_dispatch_queue> _notificationQueue; NSObject* _connectionSetupPiggybackBlob; NSString* _localFrameworkVersion; NSString* _remoteFrameworkVersion; opaqueRTCReportingRef _reportingAgent; TimingCollection* _perfTimings; VCConnectionManager* _connectionManager; /*^block*/id _eventHandler; NSMutableArray* _streams; opaque_pthread_mutex_t _stateLock; } @property (nonatomic,readonly) int detailedErrorCode; @property (nonatomic,readonly) BOOL isHandoverSupported; @property (nonatomic,readonly) BOOL isRemoteOSPreLion; @property (assign,nonatomic) BOOL requiresWiFi; //@synthesize requiresWiFi=_requiresWiFi - In the implementation block @property (assign,nonatomic) BOOL useCompressedConnectionData; //@synthesize useCompressedConnectionData=_useCompressedConnectionData - In the implementation block @property (nonatomic,readonly) unsigned connectionSetupRTTEstimate; @property (nonatomic,retain) NSObject * connectionSetupPiggybackBlob; //@synthesize connectionSetupPiggybackBlob=_connectionSetupPiggybackBlob - In the implementation block @property (nonatomic,retain) TimingCollection * perfTimings; //@synthesize perfTimings=_perfTimings - In the implementation block @property (nonatomic,readonly) VCConnectionManager * connectionManager; //@synthesize connectionManager=_connectionManager - In the implementation block @property (nonatomic,copy) id eventHandler; //@synthesize eventHandler=_eventHandler - In the implementation block @property (nonatomic,readonly) unsigned basebandNotificationRegistrationToken; //@synthesize basebandNotificationRegistrationToken=_basebandNotificationRegistrationToken - In the implementation block @property (assign,setter=setCallActive:,nonatomic) BOOL isCallActive; @property (readonly) int networkInterfaceType; @property (readonly) unsigned networkMTU; @property (readonly) BOOL isIPv6; +(int)vtpPacketTypeForStreamType:(unsigned)arg1 ; +(unsigned)trafficClassForStreamType:(unsigned)arg1 ; -(id)eventHandler; -(VCConnectionManager *)connectionManager; -(void)setRequiresWiFi:(BOOL)arg1 ; -(void)setEventHandler:(id)arg1 ; -(void)dealloc; -(void)start; -(int)networkInterfaceType; -(BOOL)isCallActive; -(BOOL)isIPv6; -(BOOL)requiresWiFi; -(void)stop; -(void)setCallActive:(BOOL)arg1 ; -(id)initWithNotificationQueue:(id)arg1 reportingAgent:(opaqueRTCReportingRef)arg2 ; -(void)callEventHandlerWithEvent:(unsigned)arg1 info:(id)arg2 ; -(int)createVFD:(int*)arg1 forStreamType:(unsigned)arg2 ; -(unsigned)networkMTU; -(BOOL)getConnectionSetupData:(id*)arg1 withOptions:(id)arg2 error:(id*)arg3 ; -(void)setConnectionSetupPiggybackBlob:(NSObject *)arg1 ; -(NSObject *)connectionSetupPiggybackBlob; -(int)getSignalStrengthBars:(int*)arg1 displayBars:(int*)arg2 maxDisplayBars:(int*)arg3 ; -(int)flushBasebandQueueForConnection:(id)arg1 payloadInfoList:(id)arg2 ; -(int)updateQualityIndicator:(int)arg1 isIPv6:(BOOL)arg2 ; -(int)registerBasebandNotificationsForConnection:(id)arg1 ; -(void)deregisterBasebandNotifications; -(void)setConnectionSelectionVersionWithLocalFrameworkVersion:(id)arg1 remoteFrameworkVersion:(id)arg2 ; -(void)setPerfTimings:(TimingCollection *)arg1 ; -(BOOL)useCompressedConnectionData; -(void)setUseCompressedConnectionData:(BOOL)arg1 ; -(int)detailedErrorCode; -(void)reportNetworkConditionsDegraded; -(BOOL)isRemoteOSPreLion; -(void)handleMediaReceivedOverPeerToPeerLinkWithConnectionId:(int)arg1 ; -(void)handleMediaReceivedOverRelayLinkWithConnectionId:(int)arg1 ; -(int)flushBasebandQueueWithPayloads:(id)arg1 flushCount:(unsigned*)arg2 ; -(unsigned)basebandNotificationRegistrationToken; -(BOOL)isHandoverSupported; -(int)updateBasebandForConnection:(id)arg1 ; -(unsigned)connectionSetupRTTEstimate; -(int)deregisterNotificationForTransportStream:(OpaqueVCTransportStreamRef)arg1 ; -(void)cleanupBaseband; -(void)handleTransportStreamDeactivated:(OpaqueVCTransportStreamRef)arg1 ; -(int)createTransportStream:(OpaqueVCTransportStream*)arg1 withType:(unsigned)arg2 options:(id)arg3 ; -(TimingCollection *)perfTimings; @end
hungrybluedev/Numerical-Methods
SLE/Gaussian Elimination Method/partialpivot.h
<filename>SLE/Gaussian Elimination Method/partialpivot.h #ifndef PARTIAL_PIVOT_H #define PARTIAL_PIVOT_H #include "matrix.h" char *ge_part_pivot(Matrix matrix); #endif // PARTIAL_PIVOT_H
frc2357/robotlib2357
src/main/java/com/systemmeltdown/robotlog/outputs/ZipFileLogWriter.java
package com.systemmeltdown.robotlog.outputs; import java.io.IOException; import java.io.OutputStreamWriter; import java.util.HashMap; import java.util.Map; import java.util.zip.ZipOutputStream; public class ZipFileLogWriter implements LogWriter { public static final int COMPRESSION_LEVEL = 5; public static final String INTERNAL_FILE_NAME = "robotlog-session.json"; private double m_timeSecondsRoundingFactor; private ZipOutputStream m_zipOut; private OutputStreamWriter m_zipWriter; private Map<String, Object> m_header; private Map<String, Class<?>> m_subscribedTopics; private boolean m_needsComma = false; public ZipFileLogWriter(String dirPath, String fileName, Map<String, Object> header, double timeSecondsRoundingFactor) { try { m_zipOut = ZipFileUtils.initZipFile(dirPath, fileName, COMPRESSION_LEVEL, INTERNAL_FILE_NAME); m_zipWriter = new OutputStreamWriter(m_zipOut); m_header = header; m_subscribedTopics = new HashMap<String, Class<?>>(); m_timeSecondsRoundingFactor = timeSecondsRoundingFactor; } catch (Exception e) { System.err.println("ZipFileOutput: Failed to initialize zip file"); e.printStackTrace(); m_zipOut = null; m_zipWriter = null; return; } } @Override public void onStart(long relativeNanos) { println("{"); println(" " + ZipFileUtils.printHeader(m_header) + ","); println(" \"entries\": ["); } @Override public void onStop(long relativeNanos) { println(""); println(" ],"); printSubscribedTopics(); println("}"); completeFile(); } @Override public void onSubscribe(String topicName, Class<?> valueType, long relativeNanos) { m_subscribedTopics.put(topicName, valueType); } @Override public void onUnsubscribe(String topicName, long relativeNanos) { // Don't remove topics. We need to keep a list of all topics for this session. } @Override public void onEntry(String topicName, Object value, long relativeNanos) { Class<?> valueType = m_subscribedTopics.get(topicName); String entryStr = ZipFileUtils.printEntry(topicName, value, valueType, relativeNanos, m_timeSecondsRoundingFactor); if (m_needsComma) { println(","); } print(" " + entryStr); m_needsComma = true; } private void printSubscribedTopics() { println(" \"topics\": ["); m_needsComma = false; for (Map.Entry<String, Class<?>> entry : m_subscribedTopics.entrySet()) { printTopic(entry.getKey(), entry.getValue()); } println(""); println(" ]"); } private void printTopic(String name, Class<?> valueType) { if (m_needsComma) { println(","); } print(" " + ZipFileUtils.printTopic(name, valueType)); m_needsComma = true; } private void println(String text) { print(text + '\n'); } private void print(String text) { if (m_zipWriter == null) { // Don't log to stderr, it was already logged in the constructor return; } try { m_zipWriter.write(text); m_zipWriter.flush(); } catch (IOException ioe) { System.err.println("ZipFileLogWriter.print: IO Exception"); ioe.printStackTrace(); } } private void completeFile() { try { ZipFileUtils.completeZipFile(m_zipOut); } catch (IOException e) { System.err.println("ZipFileLogWriter.complete: IO Exception"); e.printStackTrace(); } } }
alb-i986/ALMOctaneJavaRESTSDK
sdk-src/src/main/java/com/hpe/adm/nga/sdk/entities/delete/DeleteEntities.java
<filename>sdk-src/src/main/java/com/hpe/adm/nga/sdk/entities/delete/DeleteEntities.java /* * © Copyright 2016-2021 Micro Focus or one of its affiliates. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hpe.adm.nga.sdk.entities.delete; import com.hpe.adm.nga.sdk.APIMode; import com.hpe.adm.nga.sdk.entities.OctaneCollection; import com.hpe.adm.nga.sdk.entities.get.GetEntities; import com.hpe.adm.nga.sdk.model.EntityModel; import com.hpe.adm.nga.sdk.network.OctaneHttpClient; import com.hpe.adm.nga.sdk.network.OctaneRequest; import com.hpe.adm.nga.sdk.query.Query; /** * This class hold the DeleteEntities objects and serve all functions concern to * REST delete. */ public class DeleteEntities { private final OctaneRequest octaneRequest; public DeleteEntities(OctaneHttpClient octaneHttpClient, String urlDomain) { octaneRequest = new OctaneRequest(octaneHttpClient, urlDomain); } /** * Execute a DeleteEntities request * * @return collection of deleted entities */ public OctaneCollection<EntityModel> execute() { return DeleteHelper.getInstance().deleteEntityModels(octaneRequest); } /** * 1. Request DeleteEntities Execution <br> using a custom api mode value * 2. Parse response to a new Collection object * @return a collection of entities models that have been retrieved */ public OctaneCollection<EntityModel> execute(APIMode apiMode) { octaneRequest.addHeader(apiMode); OctaneCollection<EntityModel> result = execute(); octaneRequest.removeHeader(apiMode); return result; } /** * UpdateEntities DeleteEntities with new Query parameters * * @param query - new Query parameters * @return a DeleteEntities Object with new Query parameters */ public DeleteEntities query(Query query) { octaneRequest.getOctaneUrl().setDqlQueryParam(query); return this; } /** * Append a new path element, for special cases * @param path The new path section to be added * @return this */ public DeleteEntities addPath(String path) { octaneRequest.getOctaneUrl().getPaths().add(path); return this; } }
itplants/coder
coder-base/apps/websendcom/netendcom.js
<gh_stars>0 var mycron = require('cron').CronJob; var moment = require("moment"); var fs = require('fs'); var mustache = require('mustache'); var util = require('util'); var fs = require('fs'); var http = require('http'); var EventEmitter = require('events').EventEmitter; exports.settings={}; //These are dynamically updated by the runtime //settings.appname - the app id (folder) where your app is installed //settings.viewpath - prefix to where your view html files are located //settings.staticurl - base url path to static assets /static/apps/appname //settings.appurl - base url path to this app /app/appname //settings.device_name - name given to this coder by the user, Ie."<NAME>" //settings.coder_owner - name of the user, Ie. "<NAME>." //settings.coder_color - hex css color given to this coder. exports.get_routes = [ { path:'/', handler:'index_handler' }, ]; exports.post_routes = [ ]; exports.index_handler = function( req, res ) { var tmplvars = {}; tmplvars['static_url'] = exports.settings.staticurl; tmplvars['app_name'] = exports.settings.appname; tmplvars['app_url'] = exports.settings.appurl; tmplvars['device_name'] = exports.settings.device_name; res.render( exports.settings.viewpath + '/index', tmplvars ); }; exports.on_destroy = function() { }; var sudoscripts = process.cwd() + '/sudo_scripts'; console.log('sudoscripts: ' + sudoscripts); var tempv=""; var count=0; var portno=9001; function command( com ) { var ev = new EventEmitter(); var spawn = require('child_process').spawn; var scanproc = spawn( sudoscripts + '/sendcom',['-e '+com]); scanproc.stdout.setEncoding("utf8"); scanproc.stdout.addListener('data', function (data) { scanproc.stdoutStr += data; }); scanproc.stdout.on('data', function (data) { // console.log('stdout: ' + data); var temp=data.split("\n"); //console.log('stdout: ' + temp[1]); ev.emit('done', temp[1]); }); return ev; } function oncom(req){ req.on('done',function(arg1) { arg1=arg1.replace("?%",""); str=str+arg1+"\n"; // console.log(str); }); } var str="itplanter status\n"; console.log('Server running at http://esison.local:'+portno+'/'); function writeITPLANTER(datas){ var ev = new EventEmitter(); var cline=datas.split('\n'); console.log("command No:"+cline.length); for(i=0;i<cline.length;i++){ var compar=cline[i].split(' '); switch (compar[0]){ case 'PWM': console.log("command:"+'PWM'+'H'+compar[1]); var comH=command('H'+compar[1]); oncom(comH); break; case 'PumpWorkTime': console.log("command:"+'PumpWorkTime'+'U'+compar[1]); var comU=command('U'+compar[1]); oncom(comU); break; case 'nLamp': console.log("command:"+'nLamp'+'f'+compar[1]); var comf=command('f'+compar[1]); oncom(comf); break; case 'nPump': console.log("command:"+'nPump'+'g'+compar[1]); var comg=command('g'+compar[1]); oncom(comg); break; case 'nDuty': console.log("command:"+'nDuty'+'n'+compar[1]); var comn=command('n'+compar[1]); oncom(comn); break; case 'Time':// time set var time=compar[1] console.log("command:"+'G'+time); var comG=command('G'+time); oncom(comG); break; case 'Lamp': // Light console.log("command:"+'Lamp'+'f'+compar[1]); var coml=command('f'); var fn= oncomXW(coml,'W',compar); break; // Pump case 'Pump': console.log("command:"+'Pump'+'g'+compar[1]); var comp=command('g'); var gn= oncomXW(comp,'X',compar); break; // Duty case 'Duty': console.log("command:"+'Duty'+'n'+compar[1]); var comd=command('n'); var nn= oncomXW(comd,'Y',compar); break; }// switch end }// next i ev.emit('done', str); return ev; function oncomXW(req, com, compar){ req.on('done',function(arg1) { str=str+arg1+"\n"; var fn=arg1.split(" ")[1]; var comX={}; for(var i=0;i<fn;i++){ // * Schedule * * * var comoars=compar[2]; for(j=3;j<compar.length;j++){ comoars = compars+','+compar[j]; } console.log("command:"+com+i+','+compars); comX[i]=command(com+i+','+compars); oncom(comX[i]); } return fn; }); } } function readITPLANTER(){ var ev = new EventEmitter(); var res=""; var comA=command('A'); oncom(comA); var comB=command('B'); oncom(comB); var comC=command('C'); oncom(comC); var comH=command('H'); oncom(comH); var comU=command('U'); oncom(comU); function oncomX(req, com){ req.on('done',function(arg1) { str=str+arg1+"\n"; var fn=arg1.split(" ")[1]; // console.log("---------fn="+fn); var comX={}; for(var i=0;i<fn;i++){ comX[i]=command(com+i); oncom(comX[i]); } return fn; }); // reformat str // 1. gather same program // 2. rearrange as number // var data=str; var fs = require('fs'); fs.writeFile('itp_sensor_record.txt', data , function (err) { console.log(err); }); } // Light var comf=command('f'); var fn= oncomX(comf,'W'); // Pump var comg=command('g'); var gn= oncomX(comg,'X'); // Duty var comn=command('n'); var nn= oncomX(comn,'Y'); var comG=command('G'); oncom(comG); ev.emit('done', str); return ev; } var itp=readITPLANTER(); function onRequest(request, response) { // console.log("request.method:"+request.method); switch (request.method){ case 'POST': var data=''; request.setEncoding('utf8'); request.on('data', function(dataChunk) { dataChunk = dataChunk.replace(/undefined/g,''); dataChunk = dataChunk.replace(/&submit=Send/g,''); dataChunk = dataChunk.replace(/\+/g,''); dataChunk = dataChunk.replace(/\=/g,' '); dataChunk = dataChunk.replace(/&/g,'\n'); data += dataChunk; }); request.on('end', function() { // undefinedG=123456&submit=Send // data += '\n'; console.log('getdata:|'+data+'|'); response.end('finished'); // write data writeITPLANTER(data); }); break; case 'GET': console.log("Get Request received.:"); var itplanter=readITPLANTER(); itplanter.on('done',function(arg1) { console.log("ITPLANTER.done:"+arg1); }); console.log("Request received.:"+str+" count:"+(count++)); response.writeHead(200, {"Content-Type": "text/plain"}); response.write(str); response.end(); break; } } /* itp.on('done',function(arg1) { console.log("ITPLANTER.done:"+arg1); }); */ http.createServer(onRequest).listen(portno); console.log("Server has started."); // moment().format(); // 2014-07-16T09:00:00+09:00 (default) // moment().format("YYYY-MM-DD"), // 2014-07-17 // moment().format("YYYY-MM-DD HH:mm:ssZ"), // 2014-07-17 10:00:00+09:00 var job = new mycron({ cronTime: '*/9 * * * *', onTick: function() { itp.on('done',function(arg1) { console.log("ITPLANTER.done:"+arg1); // var data = moment().format()+"\n"+str; fs.writeFile('writetest.txt', data , function (err) { console.log(data); }); }); console.log("onTick!! write data !!"); console.log(moment().format()); }, start: false // start: false, // timeZone: 'Japan/Tokyo' // }); job.start();
zhangshixu209/qiaoyang
src/main/java/com/chmei/nzbcommon/cmservice/IControlService.java
package com.chmei.nzbcommon.cmservice; import com.chmei.nzbcommon.cmbean.InputDTO; import com.chmei.nzbcommon.cmbean.OutputDTO; /** * 统一调用接口。 */ public interface IControlService { /** * 接口定义 */ OutputDTO execute(InputDTO inputDTO); }
razzlefratz/MotleyTools
gcpp/classes/ocprocwords.cpp
/*====================================================================* * * ocprocwords.cpp - definition of ocprocwords class. * * symbol table of C Language Macro Processor keywords; * *. Motley Tools by <NAME> *: Published 1982-2005 by <NAME> for personal use *; Licensed under the Internet Software Consortium License * *--------------------------------------------------------------------*/ #ifndef oCPROCWORDS_SOURCE #define oCPROCWORDS_SOURCE /*====================================================================* * custom header files; *--------------------------------------------------------------------*/ #include "../classes/ocprocwords.hpp" /*====================================================================* * * ocprocwords(); * *. Motley Tools by <NAME> *: Published 1982-2005 by <NAME> for personal use *; Licensed under the Internet Software Consortium License * *--------------------------------------------------------------------*/ ocprocwords::ocprocwords () { static char const * table [] = { "define", "elif", "else", "endif", "error", "if", "ifdef", "ifndef", "include", "line", "pragma", "undef", (char const *) (0) }; okeywords::mtitle = "cprocwords"; okeywords::mcount = sizeof (table) / sizeof (table [0]) -1; okeywords::mtable = table; okeywords::mcheck (); return; } /*====================================================================* * * ~ocprocwords(); * *. Motley Tools by <NAME> *: Published 1982-2005 by <NAME> for personal use *; Licensed under the Internet Software Consortium License * *--------------------------------------------------------------------*/ ocprocwords::~ ocprocwords () { return; } /*====================================================================* * end definition; *--------------------------------------------------------------------*/ #endif
BryceStevenWilley/EfileProxyServer
src/main/java/org/oasis_open/docs/codelist/ns/genericode/_1/Data.java
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.0.1 // See <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2021.04.16 at 04:29:27 PM UTC // package org.oasis_open.docs.codelist.ns.genericode._1; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * Data type definition. * * <p>Java class for Data complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="Data"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="Annotation" type="{http://docs.oasis-open.org/codelist/ns/genericode/1.0/}Annotation" minOccurs="0"/&gt; * &lt;element name="Parameter" type="{http://docs.oasis-open.org/codelist/ns/genericode/1.0/}DatatypeFacet" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;attGroup ref="{http://docs.oasis-open.org/codelist/ns/genericode/1.0/}Language"/&gt; * &lt;attribute name="Type" use="required" type="{http://www.w3.org/2001/XMLSchema}token" /&gt; * &lt;attribute name="DatatypeLibrary" type="{http://www.w3.org/2001/XMLSchema}anyURI" /&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Data", propOrder = { "annotation", "parameter" }) public class Data { @XmlElement(name = "Annotation") protected Annotation annotation; @XmlElement(name = "Parameter") protected List<DatatypeFacet> parameter; @XmlAttribute(name = "Type", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "token") protected String type; @XmlAttribute(name = "DatatypeLibrary") @XmlSchemaType(name = "anyURI") protected String datatypeLibrary; @XmlAttribute(name = "Lang") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "language") protected String lang; /** * Gets the value of the annotation property. * * @return * possible object is * {@link Annotation } * */ public Annotation getAnnotation() { return annotation; } /** * Sets the value of the annotation property. * * @param value * allowed object is * {@link Annotation } * */ public void setAnnotation(Annotation value) { this.annotation = value; } /** * Gets the value of the parameter property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the parameter property. * * <p> * For example, to add a new item, do as follows: * <pre> * getParameter().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link DatatypeFacet } * * */ public List<DatatypeFacet> getParameter() { if (parameter == null) { parameter = new ArrayList<DatatypeFacet>(); } return this.parameter; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the datatypeLibrary property. * * @return * possible object is * {@link String } * */ public String getDatatypeLibrary() { return datatypeLibrary; } /** * Sets the value of the datatypeLibrary property. * * @param value * allowed object is * {@link String } * */ public void setDatatypeLibrary(String value) { this.datatypeLibrary = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } }
sjj3086786/aliyun-openapi-java-sdk
aliyun-java-sdk-cloudphoto/src/main/java/com/aliyuncs/cloudphoto/model/v20170711/CreateTransactionRequest.java
<gh_stars>1-10 /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aliyuncs.cloudphoto.model.v20170711; import com.aliyuncs.RpcAcsRequest; import com.aliyuncs.http.ProtocolType; /** * @author auto create * @version */ public class CreateTransactionRequest extends RpcAcsRequest<CreateTransactionResponse> { public CreateTransactionRequest() { super("CloudPhoto", "2017-07-11", "CreateTransaction", "cloudphoto"); setProtocol(ProtocolType.HTTPS); } private String ext; private Long size; private String libraryId; private String storeName; private String force; private String md5; public String getExt() { return this.ext; } public void setExt(String ext) { this.ext = ext; if(ext != null){ putQueryParameter("Ext", ext); } } public Long getSize() { return this.size; } public void setSize(Long size) { this.size = size; if(size != null){ putQueryParameter("Size", size.toString()); } } public String getLibraryId() { return this.libraryId; } public void setLibraryId(String libraryId) { this.libraryId = libraryId; if(libraryId != null){ putQueryParameter("LibraryId", libraryId); } } public String getStoreName() { return this.storeName; } public void setStoreName(String storeName) { this.storeName = storeName; if(storeName != null){ putQueryParameter("StoreName", storeName); } } public String getForce() { return this.force; } public void setForce(String force) { this.force = force; if(force != null){ putQueryParameter("Force", force); } } public String getMd5() { return this.md5; } public void setMd5(String md5) { this.md5 = md5; if(md5 != null){ putQueryParameter("Md5", md5); } } @Override public Class<CreateTransactionResponse> getResponseClass() { return CreateTransactionResponse.class; } }
DevJoaoLopes/Skoole
src/pages/Dashboard/pages/Cronograma/index.js
<gh_stars>1-10 import Cronograma from './Cronograma'; export default Cronograma;
emgreen33/geneac
lib/tasks/snapshot_reset.rake
<reponame>emgreen33/geneac # frozen_string_literal: true namespace :snapshot do desc 'Drop all snapshots' task reset: :environment do Snapshot.all.each(&:destroy!) ActiveRecord::Base.connection.reset_pk_sequence!(Snapshot.table_name) puts 'Dropped all snapshots' end end
shubhamWaghmare-sap/ADT_Frontend-1
org.abapgit.adt.backend/src/org/abapgit/adt/backend/internal/AbapGitPullModifiedObjectsContentHandlerV1.java
<reponame>shubhamWaghmare-sap/ADT_Frontend-1 package org.abapgit.adt.backend.internal; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import org.abapgit.adt.backend.model.agitpullmodifiedobjects.IAbapGitPullModifiedObjects; import org.abapgit.adt.backend.model.agitpullmodifiedobjects.util.AgitpullmodifiedobjectsResourceFactoryImpl; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import com.sap.adt.communication.content.ContentHandlerException; import com.sap.adt.communication.content.IContentHandler; import com.sap.adt.communication.message.IMessageBody; public class AbapGitPullModifiedObjectsContentHandlerV1 implements IContentHandler<IAbapGitPullModifiedObjects> { private static final String CONTENT_TYPE = "application/abapgit.adt.repo.pull.modified.objs.v1+xml"; //$NON-NLS-1$ private static final String RESOURCE_NAME = "resource.agitpullmodifiedobjects"; //$NON-NLS-1$ private final org.eclipse.emf.common.util.URI virtualResourceUri = org.eclipse.emf.common.util.URI.createURI(RESOURCE_NAME); @Override public IAbapGitPullModifiedObjects deserialize(IMessageBody body, Class<? extends IAbapGitPullModifiedObjects> dataType) { try { InputStream content = body.getContent(); Resource resource = new AgitpullmodifiedobjectsResourceFactoryImpl().createResource(this.virtualResourceUri); resource.load(content, null); IAbapGitPullModifiedObjects objs = loadEmf(resource); return objs; } catch (IOException e) { throw new ContentHandlerException("Error parsing abapgit pull modified objects", e); //$NON-NLS-1$ } } public IAbapGitPullModifiedObjects loadEmf(Resource resource) { //use some virtual resource name EObject documentRoot = resource.getContents().get(0); if (documentRoot != null) { for (EObject element : documentRoot.eContents()) { if (element instanceof IAbapGitPullModifiedObjects) { return (IAbapGitPullModifiedObjects) element; } } } throw new IllegalArgumentException("Invalid XML content - root model entity not found"); //$NON-NLS-1$ } @Override public IMessageBody serialize(IAbapGitPullModifiedObjects dataObject, Charset charset) { return null; } @Override public String getSupportedContentType() { return CONTENT_TYPE; } @Override public Class<IAbapGitPullModifiedObjects> getSupportedDataType() { return IAbapGitPullModifiedObjects.class; } }
llllJokerllll/Programacion
Trimestre2/BoletinJDBC/src/com/acarballeira/exercicios/bd/exercicio6/package-info.java
package com.acarballeira.exercicios.bd.exercicio6;
kevinzhwl/ObjectARXMod
2002/arxlabs/Step07/Project.cpp
// Project.cpp : Initialization functions #include "StdAfx.h" #include "StdArx.h" #include "resource.h" HINSTANCE _hdllInstance =NULL ; // This command registers an ARX command. void AddCommand(const char* cmdGroup, const char* cmdInt, const char* cmdLoc, const int cmdFlags, const AcRxFunctionPtr cmdProc, const int idLocal = -1); // NOTE: DO NOT edit the following lines. //{{AFX_ARX_MSG void InitApplication(); void UnloadApplication(); //}}AFX_ARX_MSG // NOTE: DO NOT edit the following lines. //{{AFX_ARX_ADDIN_FUNCS //}}AFX_ARX_ADDIN_FUNCS ///////////////////////////////////////////////////////////////////////////// // DLL Entry Point extern "C" BOOL WINAPI DllMain(HINSTANCE hInstance, DWORD dwReason, LPVOID /*lpReserved*/) { if (dwReason == DLL_PROCESS_ATTACH) { _hdllInstance = hInstance; } else if (dwReason == DLL_PROCESS_DETACH) { } return TRUE; // ok } ///////////////////////////////////////////////////////////////////////////// // ObjectARX EntryPoint extern "C" AcRx::AppRetCode acrxEntryPoint(AcRx::AppMsgCode msg, void* pkt) { switch (msg) { case AcRx::kInitAppMsg: // Comment out the following line if your // application should be locked into memory acrxDynamicLinker->unlockApplication(pkt); acrxDynamicLinker->registerAppMDIAware(pkt); AcRxObject *pSvc ; if ( !(pSvc =acrxServiceDictionary->at (ASDKEMPLOYEEDETAILS_DBXSERVICE)) ) { // Try to load the module, if it is not yet present if ( !acrxDynamicLinker->loadModule ("AsdkEmployeeDetails.dbx", 0) ) { acutPrintf ("Unable to load AsdkEmployeeDetails.dbx. Unloading this application...\n") ; return (AcRx::kRetError) ; } } if ( !(pSvc =acrxServiceDictionary->at (ASDKEMPLOYEE_DBXSERVICE)) ) { // Try to load the module, if it is not yet present if ( !acrxDynamicLinker->loadModule ("AsdkEmployee.dbx", 0) ) { acutPrintf ("Unable to load AsdkEmployee.dbx. Unloading this application...\n") ; return (AcRx::kRetError) ; } } InitApplication(); break; case AcRx::kUnloadAppMsg: UnloadApplication(); break; } return AcRx::kRetOK; } // Init this application. Register your // commands, reactors... void InitApplication() { // NOTE: DO NOT edit the following lines. //{{AFX_ARX_INIT AddCommand("ASDK", "MYINPUT", "MYINPUT", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkmyinput); AddCommand("ASDK", "MYSELECT", "MYSELECT", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkmyselect); AddCommand("ASDK", "CREATE", "CREATE", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkcreate); AddCommand("ASDK", "SETLAYER", "SETLAYER", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdksetlayer); AddCommand("ASDK", "ADDENTRY", "ADDENTRY", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkaddentry); AddCommand("ASDK", "REMOVEENTRY", "REMOVEENTRY", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkremoveentry); AddCommand("ASDK", "LISTENTRIES", "LISTENTRIES", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET | ACRX_CMD_NOINTERNALLOCK, asdklistentries); AddCommand("ASDK", "ADDDETAIL", "ADDDETAIL", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkadddetail); AddCommand("ASDK", "REMOVEDETAIL", "REMOVEDETAIL", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkremovedetail); AddCommand("ASDK", "LISTDETAILS", "LISTDETAILS", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdklistdetails); AddCommand("ASDK", "CREATEEMPLOYEE", "CREATEEMPLOYEE", ACRX_CMD_TRANSPARENT | ACRX_CMD_USEPICKSET, asdkcreateemployee); //}}AFX_ARX_INIT // TODO: add your initialization functions } // Unload this application. Unregister all objects // registered in InitApplication. void UnloadApplication() { // NOTE: DO NOT edit the following lines. //{{AFX_ARX_EXIT acedRegCmds->removeGroup("ASDK"); //}}AFX_ARX_EXIT // TODO: clean up your application } // This functions registers an ARX command. // It can be used to read the localized command name // from a string table stored in the resources. void AddCommand(const char* cmdGroup, const char* cmdInt, const char* cmdLoc, const int cmdFlags, const AcRxFunctionPtr cmdProc, const int idLocal) { char cmdLocRes[65]; // If idLocal is not -1, it's treated as an ID for // a string stored in the resources. if (idLocal != -1) { // Load strings from the string table and register the command. ::LoadString(_hdllInstance, idLocal, cmdLocRes, 64); acedRegCmds->addCommand(cmdGroup, cmdInt, cmdLocRes, cmdFlags, cmdProc); } else // idLocal is -1, so the 'hard coded' // localized function name is used. acedRegCmds->addCommand(cmdGroup, cmdInt, cmdLoc, cmdFlags, cmdProc); }
samuelko123/book-store-api
utils/validator.js
<reponame>samuelko123/book-store-api<filename>utils/validator.js const Ajv = require("ajv") const schemas = require('../models/schemas') const { ValidationError } = require('./errors') // initialize ajv const ajv = new Ajv({ removeAdditional: false, useDefaults: true, coerceTypes: true, allErrors: true, logger: false, }) // add all schemas for (let key in schemas) { let schema = schemas[key] ajv.addSchema(schema, key) } exports.check_input = function (schema_name, obj) { let validate = ajv.getSchema(schema_name) let is_valid = validate(obj) if (!is_valid) { throw new ValidationError(validate.errors, obj) } } exports.check_output = function (schema_name, obj) { let validate = ajv.getSchema(schema_name) let is_valid = validate(obj) if (!is_valid) { throw new Error(validate.errors) } }
AntoAndGar/PFP
Esercizi/E5.scala
/* funzioni di ordine superiore - costruzione di funzioni per casi Scrivere un metodo Scala concatena che, date tre funzioni f1:Double=>Double, f2:Double=>Double e f3:Double=>Double e due valori Double a e b con a<=b, restituisce una funzione che coincide con f1 per tutti gli argomenti prima di a, con f2 nell’intervallo [a,b], e con f3 dopo b. */ object E5 extends App { def concat(f1:Double=>Double, f2:Double=>Double, f3:Double=>Double)(a:Double, b:Double):Double=>Double = { (x: Double) => if (x<a) f1(x) else if (x<b) f2(x) else f3(x) } println(concat(_*2,_*3,identity)(10,20)(1)) println(concat(_*2,_*3,identity)(10,20)(15)) println(concat(_*2,_*3,identity)(10,20)(25)) }
zipated/src
third_party/swiftshader/src/Common/Resource.cpp
// Copyright 2016 The SwiftShader Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Resource.hpp" #include "Memory.hpp" namespace sw { Resource::Resource(size_t bytes) : size(bytes) { blocked = 0; accessor = PUBLIC; count = 0; orphaned = false; buffer = allocate(bytes); } Resource::~Resource() { deallocate(buffer); } void *Resource::lock(Accessor claimer) { criticalSection.lock(); while(count > 0 && accessor != claimer) { blocked++; criticalSection.unlock(); unblock.wait(); criticalSection.lock(); blocked--; } accessor = claimer; count++; criticalSection.unlock(); return buffer; } void *Resource::lock(Accessor relinquisher, Accessor claimer) { criticalSection.lock(); // Release while(count > 0 && accessor == relinquisher) { count--; if(count == 0) { if(blocked) { unblock.signal(); } else if(orphaned) { criticalSection.unlock(); delete this; return 0; } } } // Acquire while(count > 0 && accessor != claimer) { blocked++; criticalSection.unlock(); unblock.wait(); criticalSection.lock(); blocked--; } accessor = claimer; count++; criticalSection.unlock(); return buffer; } void Resource::unlock() { criticalSection.lock(); count--; if(count == 0) { if(blocked) { unblock.signal(); } else if(orphaned) { criticalSection.unlock(); delete this; return; } } criticalSection.unlock(); } void Resource::unlock(Accessor relinquisher) { criticalSection.lock(); while(count > 0 && accessor == relinquisher) { count--; if(count == 0) { if(blocked) { unblock.signal(); } else if(orphaned) { criticalSection.unlock(); delete this; return; } } } criticalSection.unlock(); } void Resource::destruct() { criticalSection.lock(); if(count == 0 && !blocked) { criticalSection.unlock(); delete this; return; } orphaned = true; criticalSection.unlock(); } const void *Resource::data() const { return buffer; } }
mechasource/mechcommander2
source/mechcmd2/objtype.h
<reponame>mechasource/mechcommander2 //--------------------------------------------------------------------------- // // ObjType.h -- File contains the Basic Game Object Type definition // // MechCommander 2 // //---------------------------------------------------------------------------// // Copyright (C) Microsoft Corporation. All rights reserved. // //===========================================================================// #pragma once #ifndef OBJTYPE_H #define OBJTYPE_H //--------------------------------------------------------------------------- //#include "mclib.h" //#include "dobjtype.h" //#include "dgameobj.h" //#include "stuff/stuff.h" #define MAX_NAME 25 //--------------------------------------------------------------------------- // Classes class ObjectType { protected: ObjectTypeNumber objTypeNum; // What exactly am I? int32_t numUsers; // How many people love me? int32_t objectTypeClass; // What type am I? ObjectClass objectClass; // What object class am i? ObjectTypeNumber destroyedObject; // What I turn into when I die. ObjectTypeNumber explosionObject; // How I blow up bool potentialContact; // Can I can be a contact? wchar_t* appearName; // Base Name of appearance Files. float extentRadius; // Smallest sphere which will hold me. bool keepMe; // Do not EVER cache this objType out. int32_t iconNumber; // my index into the big strip o' icons int32_t teamId; // DEfault for this type uint8_t subType; // if building, what type of building? etc. public: PVOID operator new(size_t ourSize); void operator delete(PVOID us); void init(void) { objectClass = INVALID; objectTypeClass = -1; // This is an invalid_object destroyedObject = -1; explosionObject = -1; potentialContact = false; extentRadius = 0; // Nothing can hit me if this is zero. keepMe = false; iconNumber = -1; // defaults to no icon appearName = nullptr; subType = 0; } ObjectType(void) { init(void); } virtual int32_t init(std::unique_ptr<File> objFile, uint32_t fileSize); int32_t init(FitIniFilePtr objFile); virtual ~ObjectType(void) { destroy(void); } virtual void destroy(void); virtual GameObjectPtr createInstance(void); void addUser(void) { numUsers++; } void removeUser(void) { numUsers--; } void noMoreUsers(void) { numUsers = 0; } bool inUse(void) { return (numUsers > 0); } bool lovable(void) { return keepMe; } void makeLovable(void) { keepMe = true; } ObjectTypeNumber whatAmI(void) { return (objTypeNum); } const std::wstring_view& getAppearanceTypeName(void) { return (appearName); } bool getPotentialContact(void) { return (potentialContact); } int32_t getObjectTypeClass(void) { return (objectTypeClass); } ObjectClass getObjectClass(void) { return (objectClass); } ObjectTypeNumber getDestroyedObject(void) { return (destroyedObject); } ObjectTypeNumber getExplosionObject(void) { return (explosionObject); } float getExtentRadius(void) { return (extentRadius); } void setExtentRadius(float newRadius) { extentRadius = newRadius; } ObjectTypeNumber getObjTypeNum(void) { return (objTypeNum); } void setObjTypeNum(ObjectTypeNumber objTNum) { objTypeNum = objTNum; } void setIconNumber(int32_t newNumber) { iconNumber = newNumber; } int32_t getIconNumber(void) { return iconNumber; } int32_t getTeamId(void) { return teamId; } void setSubType(uint8_t type) { subType = type; } uint8_t getSubType(void) { return (subType); } virtual bool handleCollision(GameObjectPtr collidee, GameObjectPtr collider); virtual bool handleDestruction(GameObjectPtr collidee, GameObjectPtr collider); virtual float getBurnTime(void) { return (0.0); } void createExplosion(Stuff::Vector3D& position, float dmg = 0.0, float rad = 0.0); }; //--------------------------------------------------------------------------- class ObjectTypeManager { public: int32_t numObjectTypes; ObjectTypePtr* table; static UserHeapPtr objectTypeCache; static UserHeapPtr objectCache; static PacketFilePtr objectFile; //-------------------------------------------------------- // Following is done to maintain compatibility with MC1... static int32_t bridgeTypeHandle; static int32_t forestTypeHandle; static int32_t wallHeavyTypeHandle; static int32_t wallMediumTypeHandle; static int32_t wallLightTypeHandle; public: void init(void) {} ObjectTypeManager(void) { init(void); } int32_t init(const std::wstring_view& objectFileName, int32_t objectTypeCacheSize, int32_t objectCacheSize, int32_t maxObjectTypes = 1024); void destroy(void); ~ObjectTypeManager(void) { destroy(void); } void remove(int32_t objTypeNum); void remove(ObjectTypePtr ptr); ObjectTypePtr load(ObjectTypeNumber objTypeNum, bool noCacheOut = true, bool forceLoad = false); ObjectTypePtr get(ObjectTypeNumber objTypeNum, bool loadIt = true); GameObjectPtr create(ObjectTypeNumber objTypeNum); }; //--------------------------------------------------------------------------- #endif
anasanzari/algoliasearch-client-go
algolia/opt/retrieve_mappings.go
// Code generated by go generate. DO NOT EDIT. package opt import "encoding/json" // RetrieveMappingsOption is a wrapper for an RetrieveMappings option parameter. It holds // the actual value of the option that can be accessed by calling Get. type RetrieveMappingsOption struct { value bool } // RetrieveMappings wraps the given value into a RetrieveMappingsOption. func RetrieveMappings(v bool) *RetrieveMappingsOption { return &RetrieveMappingsOption{v} } // Get retrieves the actual value of the option parameter. func (o *RetrieveMappingsOption) Get() bool { if o == nil { return false } return o.value } // MarshalJSON implements the json.Marshaler interface for // RetrieveMappingsOption. func (o RetrieveMappingsOption) MarshalJSON() ([]byte, error) { return json.Marshal(o.value) } // UnmarshalJSON implements the json.Unmarshaler interface for // RetrieveMappingsOption. func (o *RetrieveMappingsOption) UnmarshalJSON(data []byte) error { if string(data) == "null" { o.value = false return nil } return json.Unmarshal(data, &o.value) } // Equal returns true if the given option is equal to the instance one. In case // the given option is nil, we checked the instance one is set to the default // value of the option. func (o *RetrieveMappingsOption) Equal(o2 *RetrieveMappingsOption) bool { if o == nil { return o2 == nil || o2.value == false } if o2 == nil { return o == nil || o.value == false } return o.value == o2.value } // RetrieveMappingsEqual returns true if the two options are equal. // In case of one option being nil, the value of the other must be nil as well // or be set to the default value of this option. func RetrieveMappingsEqual(o1, o2 *RetrieveMappingsOption) bool { return o1.Equal(o2) }
gargamol/base-cms
services/graphql-server/build-redirects.js
const { content: canonicalPathFor, requestParser: getCanonicalRules } = require('@base-cms/canonical-path'); const { BaseDB } = require('@base-cms/db'); const { get } = require('@base-cms/object-path'); const createDB = require('./src/basedb'); const cleanPath = (value) => { if (!value) return ''; const v = value.trim(); if (!v) return v; if (/^http/.test(v)) return v; return `/${v.replace(/^\/+/, '')}`; }; const { log } = console; const { TENANT_KEY } = process.env; const basedb = createDB(TENANT_KEY); const canonicalRules = getCanonicalRules({ headers: {} }); const iterateCursor = (cursor, cb) => new Promise((resolve, reject) => { cursor.forEach(cb, (err) => { if (err) { reject(err); } else { resolve(); } }); }); const getPrimarySectionLoader = async (ids) => { const query = { _id: { $in: ids }, status: 1, }; const options = { projection: { alias: 1 }, }; const sections = await basedb.find('website.Section', query, options); const sectionMap = sections.reduce((map, section) => map.set(`${section._id}`, section), new Map()); return (_, id) => sectionMap.get(`${id}`); }; const buildContentRedirects = async (contentColl) => { log('Retrieving content redirects...'); const sectionIds = await contentColl.distinct('mutations.Website.primarySection.$id', { 'mutations.Website.redirects.0': { $exists: true }, }); log('Getting primarySection references...'); const load = await getPrimarySectionLoader(sectionIds); log('Primary section references loaded.'); const context = { canonicalRules, load }; const cursor = await contentColl.aggregate([ { $match: { 'mutations.Website.redirects.0': { $exists: true } } }, { $project: { type: 1, 'mutations.Website.redirects': 1, 'mutations.Website.slug': 1, 'mutations.Website.primarySection': 1, }, }, { $unwind: '$mutations.Website.redirects' }, { $project: { type: 1, 'mutations.Website.redirects': 1, 'mutations.Website.slug': 1, 'mutations.Website.primarySection': 1, }, }, ]); const results = []; await iterateCursor(cursor, async (doc) => { if (typeof doc === 'object') { const redirect = get(doc, 'mutations.Website.redirects'); const from = redirect; const slug = BaseDB.get(doc, 'mutations.Website.slug'); const to = await canonicalPathFor({ slug, ...doc }, context); results.push({ from, to }); } }); log(`Found ${results.length} content redirects.`); return results; }; const buildSectionRedirects = async (sectionColl) => { log('Retrieving section redirects...'); const cursor = await sectionColl.aggregate([ { $match: { 'redirects.0': { $exists: true } } }, { $unwind: '$redirects' }, { $project: { redirects: 1, alias: 1 } }, ]); const docs = await cursor.toArray(); log(`Found ${docs.length} section redirects.`); return Promise.all(docs.filter(doc => typeof doc === 'object').map(async ({ alias, redirects }) => { const from = redirects; const to = alias; return { from, to }; })); }; const buildIssueRedirects = async (issueColl) => { log('Retrieving issue redirects...'); const cursor = await issueColl.aggregate([ { $match: { 'redirects.0': { $exists: true } } }, { $unwind: '$redirects' }, { $project: { redirects: 1 } }, ]); const docs = await cursor.toArray(); log(`Found ${docs.length} issue redirects.`); return Promise.all(docs.filter(doc => typeof doc === 'object').map(async ({ _id, redirects }) => { const from = redirects; const to = `/magazine/${_id}`; return { from, to }; })); }; const buildGlobalRedirects = () => { const code = TENANT_KEY.split('_')[1]; const key = code === 'pia' ? 'sr' : code; return [ { from: `/content/${key}/en/video.html`, to: '/videos' }, { from: `/content/${key}/en/video/video-landing-page.html`, to: '/videos' }, { from: `/content/${key}/en/currentissue`, to: '/magazine' }, { from: `/content/${key}/en/past-issues.html`, to: '/magazine' }, { from: `/content/${key}/en/events.html`, to: '/events' }, { from: `/content/${key}/en/event-listing.html`, to: '/events' }, { from: `/content/${key}/en/archives.html`, to: '/magazine' }, { from: `/content/${key}/en/index.html`, to: '/' }, { from: `/content/${key}/en/whitpapers.html`, to: '/white-papers' }, { from: `/content/${key}/en/whitepaper-listings.html`, to: '/white-papers' }, { from: `/content/${key}/en/whitepaper-listing.html`, to: '/white-papers' }, { from: `/content/${key}/en/search.html`, to: '/search' }, { from: `/content/${key}/en/advertise.html`, to: '/page/advertise' }, { from: `/content/${key}/en/webcasts.html`, to: '/webcasts' }, { from: `/content/${key}/en/webcast-listing.html`, to: '/webcasts' }, { from: `/content/${key}/en/podcasts.html`, to: '/podcasts' }, { from: `/content/${key}/en/index.html`, to: '/' }, { from: `/content/${key}/en/magazine.html`, to: '/magazine' }, { from: `/content/${key}/en/about-us.html`, to: '/page/about-us' }, { from: `/content/${key}/en/newsletter.html`, to: '/subscribe/email' }, { from: '/past-issues.html', to: '/magazine' }, { from: '/search.html', to: '/search' }, { from: '/whitepapers.html', to: '/white-papers' }, { from: '/whitepaper-listings.html', to: '/white-papers' }, { from: '/whitepaper-listing.html', to: '/white-papers' }, { from: '/events.html', to: '/events' }, { from: '/advertise', to: '/page/advertise' }, { from: '/advertise.html', to: '/advertise' }, { from: '/webcasts.html', to: '/webcasts' }, { from: '/webcast-listing.html', to: '/webcasts' }, { from: '/events.html', to: '/events' }, { from: '/event-listing.html', to: '/events' }, { from: '/podcasts.html', to: '/podcasts' }, { from: '/index/contact-us.html', to: '/contact-us' }, { from: '/index/about-us.html', to: '/page/about-us' }, { from: '/subscribe.html', to: '/subscribe' }, { from: '/magazine.html', to: '/magazine' }, { from: '/archives.html', to: '/magazine' }, { from: '/about-us.html', to: '/page/about-us' }, { from: '/newsletter.html', to: '/suscribe/email' }, { from: '/video.html', to: '/videos' }, { from: '/video/video-landing-page.html', to: '/videos' }, { from: '/index.html', to: '/' }, ]; }; const buildWebsiteProductRedirects = async (productColl) => { const cursor = await productColl.find({ type: 'Site', redirects: { $exists: true } }, { projection: { redirects: 1 } }); const docs = await cursor.toArray(); return docs.filter(({ redirects }) => typeof redirects === 'object').reduce((arr, { redirects }) => { Object.keys(redirects).forEach((from) => { arr.push({ from, to: redirects[from] }); }); return arr; }, []); }; const run = async () => { const client = await basedb.client.connect(); log(`BaseCMS DB connected to ${client.s.url} for ${basedb.tenant}`); const [redirectsColl, contentColl, sectionColl, issueColl, productColl] = await Promise.all([ basedb.collection('website', 'Redirects'), basedb.collection('platform', 'Content'), basedb.collection('website', 'Section'), basedb.collection('magazine', 'Issue'), basedb.collection('platform', 'Product'), ]); const promised = await Promise.all([ buildContentRedirects(contentColl), buildSectionRedirects(sectionColl), buildIssueRedirects(issueColl), buildGlobalRedirects(), buildWebsiteProductRedirects(productColl), ]); const redirects = promised .reduce((arr, el) => arr.concat(el), []) .map(({ from, to }) => ({ from: cleanPath(from), to: cleanPath(to) })) .filter(({ from, to }) => from && to); log('Beginning bulk write process...'); const bulkOps = redirects.map(({ from, to }) => ({ updateOne: { filter: { from }, update: { $set: { from, to } }, upsert: true, }, })); const { matchedCount } = await redirectsColl.bulkWrite(bulkOps); log('Bulk write complete.', matchedCount); log('Creating index...'); await redirectsColl.createIndex({ from: 1 }, { unique: true }); log('Indexing complete.'); await basedb.close(); }; run().catch(e => setImmediate(() => { throw e; }));
petercipov/mobi
deployer-spotify/src/main/java/com/petercipov/mobi/deployer/spotify/SpotifyOptions.java
package com.petercipov.mobi.deployer.spotify; import com.google.common.collect.Lists; import com.petercipov.mobi.Instance; import com.petercipov.mobi.deployer.RxDeployment; import com.petercipov.traces.api.Trace; import com.spotify.docker.client.messages.ContainerConfig; import com.spotify.docker.client.messages.HostConfig; import com.spotify.docker.client.messages.PortBinding; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import rx.Observable; /** * * @author <NAME> */ public class SpotifyOptions extends RxDeployment { private final ContainerConfig.Builder containerConfig; private final HostConfig.Builder hostConfig; private final SpotifyRxDocker docker; public SpotifyOptions(SpotifyRxDocker docker) { this.containerConfig = ContainerConfig.builder(); this.hostConfig = HostConfig.builder(); this.docker = docker; } @Override public RxDeployment addVolume(String hostPath, String guestPath) { return addVolumes(Arrays.asList(hostPath+":"+guestPath)); } @Override public RxDeployment addVolumes(Iterable<String> volumeBindings) { List<String> volumes = Lists.newLinkedList(volumeBindings); if (this.hostConfig.binds() != null) { volumes.addAll(this.hostConfig.binds()); } this.hostConfig.binds(volumes); return this; } @Override public RxDeployment addEnv(String variable) { List<String> env = new LinkedList<>(); if (this.containerConfig.env() == null) { env.add(variable); } else { env.addAll(this.containerConfig.env()); env.add(variable); } this.containerConfig.env(env); return this; } @Override public RxDeployment setPublishAllPorts(boolean enabled) { this.hostConfig.publishAllPorts(enabled); return this; } @Override public RxDeployment addPortMapping(String port, int customPort) { if (this.hostConfig.portBindings() == null) { HashMap<String, List<PortBinding>> ports = new HashMap<>(); ports.put(port, Arrays.asList(PortBinding.of(null, customPort))); this.hostConfig.portBindings(ports); } else { this.hostConfig.portBindings() .put(port, Arrays.asList(PortBinding.of(null, customPort))); } return this; } public ContainerConfig buildForImage(Instance image) { return this.containerConfig .image(image.toString()) .hostConfig(this.hostConfig.build()) .build(); } @Override public RxDeployment setWorkDir(String workDir) { this.containerConfig.workingDir(workDir); return this; } @Override public RxDeployment setUser(String user) { this.containerConfig.user(user); return this; } @Override public RxDeployment setCmd(String ... cmd) { this.containerConfig.cmd(cmd); return this; } @Override public RxDeployment setCpuQuota(long quota) { this.containerConfig.cpuQuota(quota); return this; } @Override public RxDeployment setCpuShares(long shares) { this.containerConfig.cpuShares(shares); return this; } @Override public RxDeployment setDomainName(String name) { this.containerConfig.domainname(name); return this; } @Override public RxDeployment setEntryPoint(String... entry) { this.containerConfig.entrypoint(entry); return this; } @Override public RxDeployment addExposedPort(String port) { HashSet<String> ports = new HashSet<>(); ports.add(port); if (this.containerConfig.exposedPorts() != null) { ports.addAll(this.containerConfig.exposedPorts()); } this.containerConfig.exposedPorts(ports); return this; } @Override public RxDeployment setHostName(String hostName) { this.containerConfig.hostname(hostName); return this; } @Override public RxDeployment addLabel(String key, String value) { HashMap<String, String> labels = new HashMap<>(); labels.put(key, value); if (this.containerConfig.labels() != null) { labels.putAll(this.containerConfig.labels()); } this.containerConfig.labels(labels); return this; } @Override public RxDeployment setMacAdress(String mac) { this.containerConfig.macAddress(mac); return this; } @Override public RxDeployment setMemory(long memory) { this.containerConfig.memory(memory); return this; } @Override public RxDeployment setNetworkDisabled(boolean disabled) { this.containerConfig.networkDisabled(disabled); return this; } @Override public RxDeployment setOpenStdIn(boolean open) { this.containerConfig.openStdin(open); return this; } @Override public RxDeployment setStdInOnce(boolean once) { this.containerConfig.stdinOnce(once); return this; } @Override public RxDeployment setTty(boolean enabled) { this.containerConfig.tty(enabled); return this; } @Override public RxDeployment addEnv(String name, String value) { return addEnv(name.trim()+"="+value.trim()); } @Override public RxDeployment setMemory(long memory, long swap) { this.containerConfig.memory(memory); if (swap < 0) { this.containerConfig.memorySwap(-1l); } else { this.containerConfig.memorySwap(memory+swap); } return this; } @Override public RxDeployment setCgroupParent(String parent) { this.hostConfig.cgroupParent(parent); return this; } @Override public RxDeployment addDns(String... dns) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(dns)); if (this.hostConfig.dns() != null) { list.addAll(this.hostConfig.dns()); } this.hostConfig.dns(list); return this; } @Override public RxDeployment addDnsSearch(String... dns) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(dns)); if (this.hostConfig.dnsSearch() != null) { list.addAll(this.hostConfig.dnsSearch()); } this.hostConfig.dnsSearch(list); return this; } @Override public RxDeployment addExtraHosts(String... hosts) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(hosts)); if (this.hostConfig.extraHosts() != null) { list.addAll(this.hostConfig.extraHosts()); } this.hostConfig.extraHosts(list); return this; } @Override public RxDeployment addLinks(String... links) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(links)); if (this.hostConfig.links() != null) { list.addAll(this.hostConfig.links()); } this.hostConfig.links(list); return this; } @Override public RxDeployment addLxcParameter(String key, String value) { LinkedList<HostConfig.LxcConfParameter> list = new LinkedList<>(); list.add(new HostConfig.LxcConfParameter(key, value)); if (this.hostConfig.lxcConf()!= null) { list.addAll(this.hostConfig.lxcConf()); } this.hostConfig.lxcConf(list); return this; } @Override public RxDeployment setNetworkMode(String mode) { this.hostConfig.networkMode(mode); return this; } @Override public RxDeployment setPrivileged(boolean privileged) { this.hostConfig.privileged(privileged); return this; } @Override public RxDeployment addSecurityOpt(String... opts) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(opts)); if (this.hostConfig.securityOpt()!= null) { list.addAll(this.hostConfig.securityOpt()); } this.hostConfig.securityOpt(list); return this; } @Override public RxDeployment addVolumeFrom(String... volumes) { LinkedList<String> list = new LinkedList<>(); list.addAll(Arrays.asList(volumes)); if (this.hostConfig.volumesFrom()!= null) { list.addAll(this.hostConfig.volumesFrom()); } this.hostConfig.volumesFrom(list); return this; } @Override public RxDeployment publishAllPorts() { return setPublishAllPorts(true); } @Override protected Observable<String> createContainer(Trace trace, Instance image) { return this.docker.createContainer(trace, image, this); } }
Jean-Domingues/Estudos-basicos
javascript/7-Javascript-Assincrono/AJAX-HMLhttpRequest/fetch-axios/assets/js/main.js
<reponame>Jean-Domingues/Estudos-basicos //como seria feito com fetch Api // fetch('pessoas.json') // .then(resposta => resposta.json()) // .then(json => carregaElementosNaPagina(json)); axios('pessoas.json') .then(resposta => carregaElementosNaPagina(resposta.data)); //Recebe os dados já convertidos, e envia para a função que irá exibilos // na página html function carregaElementosNaPagina(json) { const table = document.createElement('table'); for(let pessoa of json) { const tr = document.createElement('tr'); let td1 = document.createElement('td'); td1.innerHTML = pessoa.nome; tr.appendChild(td1); let td2 = document.createElement('td'); td2.innerHTML = pessoa.idade; tr.appendChild(td2); table.appendChild(tr); } const resultado = document.querySelector('.resultado'); resultado.appendChild(table); }