repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
windystrife/UnrealEngine_NVIDIAGameWork
Engine/Source/Runtime/AIModule/Classes/BehaviorTree/BTAuxiliaryNode.h
<filename>Engine/Source/Runtime/AIModule/Classes/BehaviorTree/BTAuxiliaryNode.h // Copyright 1998-2017 Epic Games, Inc. All Rights Reserved. #pragma once #include "CoreMinimal.h" #include "UObject/ObjectMacros.h" #include "BehaviorTree/BTNode.h" #include "BTAuxiliaryNode.generated.h" struct FBTAuxiliaryMemory : public FBTInstancedNodeMemory { float NextTickRemainingTime; float AccumulatedDeltaTime; }; /** * Auxiliary nodes are supporting nodes, that receive notification about execution flow and can be ticked * * Because some of them can be instanced for specific AI, following virtual functions are not marked as const: * - OnBecomeRelevant * - OnCeaseRelevant * - TickNode * * If your node is not being instanced (default behavior), DO NOT change any properties of object within those functions! * Template nodes are shared across all behavior tree components using the same tree asset and must store * their runtime properties in provided NodeMemory block (allocation size determined by GetInstanceMemorySize() ) * */ UCLASS(Abstract) class AIMODULE_API UBTAuxiliaryNode : public UBTNode { GENERATED_UCLASS_BODY() /** wrapper for node instancing: OnBecomeRelevant */ void WrappedOnBecomeRelevant(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory) const; /** wrapper for node instancing: OnCeaseRelevant */ void WrappedOnCeaseRelevant(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory) const; /** wrapper for node instancing: TickNode */ void WrappedTickNode(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory, float DeltaSeconds) const; virtual void DescribeRuntimeValues(const UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory, EBTDescriptionVerbosity::Type Verbosity, TArray<FString>& Values) const override; virtual uint16 GetSpecialMemorySize() const override; /** fill in data about tree structure */ void InitializeParentLink(uint8 InChildIndex); /** @return parent task node */ const UBTNode* GetMyNode() const; /** @return index of child in parent's array or MAX_uint8 */ uint8 GetChildIndex() const; protected: /** if set, OnBecomeRelevant will be used */ uint8 bNotifyBecomeRelevant:1; /** if set, OnCeaseRelevant will be used */ uint8 bNotifyCeaseRelevant:1; /** if set, OnTick will be used */ uint8 bNotifyTick : 1; /** if set, conditional tick will use remaining time form node's memory */ uint8 bTickIntervals : 1; /** child index in parent node */ uint8 ChildIndex; /** called when auxiliary node becomes active * this function should be considered as const (don't modify state of object) if node is not instanced! */ virtual void OnBecomeRelevant(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory); /** called when auxiliary node becomes inactive * this function should be considered as const (don't modify state of object) if node is not instanced! */ virtual void OnCeaseRelevant(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory); /** tick function * this function should be considered as const (don't modify state of object) if node is not instanced! */ virtual void TickNode(UBehaviorTreeComponent& OwnerComp, uint8* NodeMemory, float DeltaSeconds); /** sets next tick time */ void SetNextTickTime(uint8* NodeMemory, float RemainingTime) const; /** gets remaining time for next tick */ float GetNextTickRemainingTime(uint8* NodeMemory) const; }; FORCEINLINE uint8 UBTAuxiliaryNode::GetChildIndex() const { return ChildIndex; }
securitykernel/bc-java
core/src/main/java/org/bouncycastle/crypto/tls/TlsPSKKeyExchange.java
package org.bouncycastle.crypto.tls; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.BigInteger; import java.util.Vector; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.DHParameters; import org.bouncycastle.crypto.params.DHPrivateKeyParameters; import org.bouncycastle.crypto.params.DHPublicKeyParameters; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.crypto.params.ECPublicKeyParameters; import org.bouncycastle.crypto.params.RSAKeyParameters; import org.bouncycastle.crypto.util.PublicKeyFactory; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.io.Streams; /** * (D)TLS PSK key exchange (RFC 4279). */ public class TlsPSKKeyExchange extends AbstractTlsKeyExchange { protected TlsPSKIdentity pskIdentity; protected TlsPSKIdentityManager pskIdentityManager; protected TlsDHVerifier dhVerifier; protected DHParameters dhParameters; protected int[] namedCurves; protected short[] clientECPointFormats, serverECPointFormats; protected byte[] psk_identity_hint = null; protected byte[] psk = null; protected DHPrivateKeyParameters dhAgreePrivateKey = null; protected DHPublicKeyParameters dhAgreePublicKey = null; protected ECPrivateKeyParameters ecAgreePrivateKey = null; protected ECPublicKeyParameters ecAgreePublicKey = null; protected AsymmetricKeyParameter serverPublicKey = null; protected RSAKeyParameters rsaServerPublicKey = null; protected TlsEncryptionCredentials serverCredentials = null; protected byte[] premasterSecret; /** * @deprecated Use constructor that takes a TlsDHVerifier */ public TlsPSKKeyExchange(int keyExchange, Vector supportedSignatureAlgorithms, TlsPSKIdentity pskIdentity, TlsPSKIdentityManager pskIdentityManager, DHParameters dhParameters, int[] namedCurves, short[] clientECPointFormats, short[] serverECPointFormats) { this(keyExchange, supportedSignatureAlgorithms, pskIdentity, pskIdentityManager, new DefaultTlsDHVerifier(), dhParameters, namedCurves, clientECPointFormats, serverECPointFormats); } public TlsPSKKeyExchange(int keyExchange, Vector supportedSignatureAlgorithms, TlsPSKIdentity pskIdentity, TlsPSKIdentityManager pskIdentityManager, TlsDHVerifier dhVerifier, DHParameters dhParameters, int[] namedCurves, short[] clientECPointFormats, short[] serverECPointFormats) { super(keyExchange, supportedSignatureAlgorithms); switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: case KeyExchangeAlgorithm.PSK: case KeyExchangeAlgorithm.RSA_PSK: break; default: throw new IllegalArgumentException("unsupported key exchange algorithm"); } this.pskIdentity = pskIdentity; this.pskIdentityManager = pskIdentityManager; this.dhVerifier = dhVerifier; this.dhParameters = dhParameters; this.namedCurves = namedCurves; this.clientECPointFormats = clientECPointFormats; this.serverECPointFormats = serverECPointFormats; } public void skipServerCredentials() throws IOException { if (keyExchange == KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } } public void processServerCredentials(TlsCredentials serverCredentials) throws IOException { if (!(serverCredentials instanceof TlsEncryptionCredentials)) { throw new TlsFatalAlert(AlertDescription.internal_error); } processServerCertificate(serverCredentials.getCertificate()); this.serverCredentials = (TlsEncryptionCredentials)serverCredentials; } public byte[] generateServerKeyExchange() throws IOException { this.psk_identity_hint = pskIdentityManager.getHint(); if (this.psk_identity_hint == null && !requiresServerKeyExchange()) { return null; } ByteArrayOutputStream buf = new ByteArrayOutputStream(); if (this.psk_identity_hint == null) { TlsUtils.writeOpaque16(TlsUtils.EMPTY_BYTES, buf); } else { TlsUtils.writeOpaque16(this.psk_identity_hint, buf); } if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { if (this.dhParameters == null) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralServerKeyExchange(context.getSecureRandom(), this.dhParameters, buf); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { this.ecAgreePrivateKey = TlsECCUtils.generateEphemeralServerKeyExchange(context.getSecureRandom(), namedCurves, clientECPointFormats, buf); } return buf.toByteArray(); } public void processServerCertificate(Certificate serverCertificate) throws IOException { if (keyExchange != KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } if (serverCertificate.isEmpty()) { throw new TlsFatalAlert(AlertDescription.bad_certificate); } org.bouncycastle.asn1.x509.Certificate x509Cert = serverCertificate.getCertificateAt(0); SubjectPublicKeyInfo keyInfo = x509Cert.getSubjectPublicKeyInfo(); try { this.serverPublicKey = PublicKeyFactory.createKey(keyInfo); } catch (RuntimeException e) { throw new TlsFatalAlert(AlertDescription.unsupported_certificate, e); } // Sanity check the PublicKeyFactory if (this.serverPublicKey.isPrivate()) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.rsaServerPublicKey = validateRSAPublicKey((RSAKeyParameters)this.serverPublicKey); TlsUtils.validateKeyUsage(x509Cert, KeyUsage.keyEncipherment); super.processServerCertificate(serverCertificate); } public boolean requiresServerKeyExchange() { switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: return true; default: return false; } } public void processServerKeyExchange(InputStream input) throws IOException { this.psk_identity_hint = TlsUtils.readOpaque16(input); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhParameters = TlsDHUtils.receiveDHParameters(dhVerifier, input); this.dhAgreePublicKey = new DHPublicKeyParameters(TlsDHUtils.readDHParameter(input), dhParameters); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { ECDomainParameters ecParams = TlsECCUtils.readECParameters(namedCurves, clientECPointFormats, input); byte[] point = TlsUtils.readOpaque8(input); this.ecAgreePublicKey = TlsECCUtils.validateECPublicKey(TlsECCUtils.deserializeECPublicKey( clientECPointFormats, ecParams, point)); } } public void validateCertificateRequest(CertificateRequest certificateRequest) throws IOException { throw new TlsFatalAlert(AlertDescription.unexpected_message); } public void processClientCredentials(TlsCredentials clientCredentials) throws IOException { throw new TlsFatalAlert(AlertDescription.internal_error); } public void generateClientKeyExchange(OutputStream output) throws IOException { if (psk_identity_hint == null) { pskIdentity.skipIdentityHint(); } else { pskIdentity.notifyIdentityHint(psk_identity_hint); } byte[] psk_identity = pskIdentity.getPSKIdentity(); if (psk_identity == null) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.psk = pskIdentity.getPSK(); if (psk == null) { throw new TlsFatalAlert(AlertDescription.internal_error); } TlsUtils.writeOpaque16(psk_identity, output); context.getSecurityParameters().pskIdentity = Arrays.clone(psk_identity); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralClientKeyExchange(context.getSecureRandom(), dhParameters, output); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { this.ecAgreePrivateKey = TlsECCUtils.generateEphemeralClientKeyExchange(context.getSecureRandom(), serverECPointFormats, ecAgreePublicKey.getParameters(), output); } else if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { this.premasterSecret = TlsRSAUtils.generateEncryptedPreMasterSecret(context, this.rsaServerPublicKey, output); } } public void processClientKeyExchange(InputStream input) throws IOException { byte[] psk_identity = TlsUtils.readOpaque16(input); this.psk = pskIdentityManager.getPSK(psk_identity); if (psk == null) { throw new TlsFatalAlert(AlertDescription.unknown_psk_identity); } context.getSecurityParameters().pskIdentity = psk_identity; if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhAgreePublicKey = new DHPublicKeyParameters(TlsDHUtils.readDHParameter(input), dhParameters); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { byte[] point = TlsUtils.readOpaque8(input); ECDomainParameters curve_params = this.ecAgreePrivateKey.getParameters(); this.ecAgreePublicKey = TlsECCUtils.validateECPublicKey(TlsECCUtils.deserializeECPublicKey( serverECPointFormats, curve_params, point)); } else if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { byte[] encryptedPreMasterSecret; if (TlsUtils.isSSL(context)) { // TODO Do any SSLv3 clients actually include the length? encryptedPreMasterSecret = Streams.readAll(input); } else { encryptedPreMasterSecret = TlsUtils.readOpaque16(input); } this.premasterSecret = serverCredentials.decryptPreMasterSecret(encryptedPreMasterSecret); } } public byte[] generatePremasterSecret() throws IOException { byte[] other_secret = generateOtherSecret(psk.length); ByteArrayOutputStream buf = new ByteArrayOutputStream(4 + other_secret.length + psk.length); TlsUtils.writeOpaque16(other_secret, buf); TlsUtils.writeOpaque16(psk, buf); Arrays.fill(psk, (byte)0); this.psk = null; return buf.toByteArray(); } protected byte[] generateOtherSecret(int pskLength) throws IOException { if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { if (dhAgreePrivateKey != null) { return TlsDHUtils.calculateDHBasicAgreement(dhAgreePublicKey, dhAgreePrivateKey); } throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { if (ecAgreePrivateKey != null) { return TlsECCUtils.calculateECDHBasicAgreement(ecAgreePublicKey, ecAgreePrivateKey); } throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { return this.premasterSecret; } return new byte[pskLength]; } protected RSAKeyParameters validateRSAPublicKey(RSAKeyParameters key) throws IOException { // TODO What is the minimum bit length required? // key.getModulus().bitLength(); if (!key.getExponent().isProbablePrime(2)) { throw new TlsFatalAlert(AlertDescription.illegal_parameter); } return key; } }
LinRayx/ToyRenderer
src/Drawable/BlurMaterial.cpp
<gh_stars>1-10 #include "Drawable/BlurMaterial.h" namespace Draw { BlurMaterial::BlurMaterial() : MaterialBaseParent() { } void BlurMaterial::BuildCommandBuffer(shared_ptr<Graphics::CommandBuffer> cmd) { auto& drawCmdBuffers = cmd->drawCmdBuffers; auto& rp = Graphics::nameToRenderPass[Graphics::RenderPassType::FULLSCREEN_BLUR]; VkRenderPassBeginInfo renderPassBeginInfo = Graphics::initializers::renderPassBeginInfo(); renderPassBeginInfo.framebuffer = rp->framebuffer; renderPassBeginInfo.renderPass = rp->renderPass; renderPassBeginInfo.renderArea.extent.width = rp->width; renderPassBeginInfo.renderArea.extent.height = rp->height; renderPassBeginInfo.clearValueCount = static_cast<uint32_t>(rp->clearValues.size()); renderPassBeginInfo.pClearValues = rp->clearValues.data(); for (size_t i = 0; i < drawCmdBuffers.size(); i++) { vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, desc_ptr->GetPipelineLayout(), 0, static_cast<uint32_t>(desc_ptr->descriptorSets[i].size()), desc_ptr->descriptorSets[i].data(), 0, nullptr); vkCmdDraw(drawCmdBuffers[i], 3, 1, 0, 0); vkCmdEndRenderPass(drawCmdBuffers[i]); } } void BlurMaterial::AddBlurMap(string name) { addTexture(Graphics::LayoutType::SCENE, Graphics::StageFlag::FRAGMENT, Draw::textureManager->nameToTex[name].textureImageView, Draw::textureManager->nameToTex[name].textureSampler); } void BlurMaterial::initPipelineCreateInfo(VkGraphicsPipelineCreateInfo& pinfo) { cout << "BlurMaterial::initPipelineCreateInfo" << endl; using namespace Graphics; shaderStages.emplace_back(Bind::CreateShaderStage(Bind::ShaderType::FULLSCREEN_VERT, VK_SHADER_STAGE_VERTEX_BIT, std::move(vert_defs))); shaderStages.emplace_back(Bind::CreateShaderStage(Bind::ShaderType::BLUR, VK_SHADER_STAGE_FRAGMENT_BIT, std::move(frag_defs))); depthStencilState.depthTestEnable = VK_FALSE; depthStencilState.depthWriteEnable = VK_FALSE; rasterizationState.cullMode = VK_CULL_MODE_NONE; pinfo.renderPass = nameToRenderPass[RenderPassType::FULLSCREEN_BLUR]->renderPass; pinfo.pVertexInputState = &emptyVertexInputState; } }
5tefan/py-netcdf-timeseries-gui
pyntpg/dataset_tabs/ncinfo_preview.py
import os import netCDF4 as nc from PyQt5.QtCore import pyqtSlot from PyQt5.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QProgressBar, QPlainTextEdit class NcinfoPreview(QWidget): """ A widget which displays a preview of the netcdf object loaded/accessible. """ def __init__(self): super(NcinfoPreview, self).__init__() self.layout = QVBoxLayout() self.layout.setSpacing(0) self.setLayout(self.layout) status = QWidget() status_layout = QHBoxLayout() status_layout.setSpacing(0) status_layout.setContentsMargins(0, 0, 0, 0) status.setLayout(status_layout) label = QLabel("Dataset Summary:") label.setContentsMargins(0, 6, 10, 6) status_layout.addWidget(label) self.progress = QProgressBar() self.progress.setRange(0, 0) self.progress.setVisible(False) status_layout.addWidget(self.progress) self.layout.addWidget(status) self.textbox = QPlainTextEdit() self.textbox.setReadOnly(True) self.layout.addWidget(self.textbox) def show_progress(self, max): """ Show the progress bar. :return: None """ self.progress.setVisible(True) self.progress.setRange(0, max) self.progress.setValue(0) @pyqtSlot(str) def update(self, netcdf_filepath): """ Update the text displayed inside the preview widget. :return: None """ if (isinstance(netcdf_filepath, str) or isinstance(netcdf_filepath, basestring)) and os.path.exists(netcdf_filepath): try: text = self.make_nc_preview(nc.Dataset(netcdf_filepath)) except IOError as e: text = repr(e) self.textbox.setPlainText(text) else: self.textbox.setPlainText("Select file(s)! %s" % netcdf_filepath) self.progress.setVisible(False) @staticmethod def make_nc_preview(netcdf_obj): """ Create a string which provides a sufficient summary or preview of the netCDF object. :param netcdf_obj: A netCDF4 object to be previewed :return: A string to summarize the netCDF4 object """ result = "" for var in netcdf_obj.variables.values(): result += "%s(%s): [%s]\n" % (var.name, ",".join(var.dimensions), getattr(var, "units", "")) return result # For testing individual widget if __name__ == "__main__": import sys from PyQt5.QtWidgets import QApplication app = QApplication(sys.argv) main = NcinfoPreview() main.update_text(nc.Dataset('/home/scodresc/Downloads/g13_magneto_512ms_20160326_20160326.nc')) main.show() exit(app.exec_())
Ajderka/job4j
chess/src/main/java/ru/job4j/chess/figures/white/KnightWhite.java
<filename>chess/src/main/java/ru/job4j/chess/figures/white/KnightWhite.java package ru.job4j.chess.figures.white; import ru.job4j.chess.exception.ImpossibleMoveException; import ru.job4j.chess.figures.Cell; import ru.job4j.chess.figures.Figure; /** * @author <NAME> (<EMAIL>) * @version $Id$ * @since 0.1 */ public class KnightWhite implements Figure { private final Cell position; public KnightWhite(final Cell position) { this.position = position; } @Override public Cell position() { return this.position; } @Override public Cell[] way(Cell source, Cell dest) { Cell[] steps; if (isTurnRight(source, dest)) { steps = new Cell[]{dest}; } else { throw new ImpossibleMoveException(this.getClass().getSimpleName()); } return steps; } private boolean isTurnRight(Cell source, Cell dest) { boolean result = false; if (Math.abs(dest.x - source.x) == 1 && Math.abs(dest.y - source.y) == 2) { result = true; } if (Math.abs(dest.y - source.y) == 1 && Math.abs(dest.x - source.x) == 2) { result = true; } return result; } @Override public Figure copy(Cell dest) { return new KnightWhite(dest); } }
Arodev76/L2Advanced
src/main/java/l2f/gameserver/stats/conditions/ConditionTargetAggro.java
package l2f.gameserver.stats.conditions; import l2f.gameserver.model.Creature; import l2f.gameserver.model.instances.MonsterInstance; import l2f.gameserver.stats.Env; public class ConditionTargetAggro extends Condition { private final boolean _isAggro; public ConditionTargetAggro(boolean isAggro) { _isAggro = isAggro; } @Override protected boolean testImpl(Env env) { Creature target = env.target; if (target == null) return false; if (target.isMonster()) return ((MonsterInstance) target).isAggressive() == _isAggro; if (target.isPlayer()) return target.getKarma() > 0; return false; } }
changcheng/wro4j
wro4j-core/src/main/java/ro/isdc/wro/model/resource/processor/decorator/AbstractProcessorDecorator.java
package ro.isdc.wro.model.resource.processor.decorator; import ro.isdc.wro.model.resource.processor.ResourcePostProcessor; import ro.isdc.wro.model.resource.processor.ResourcePreProcessor; /** * This class exist only for backward compatibility (third party implementation and documentation) and will be removed * in 1.5.0. * * @author <NAME> * @deprecated use {@link ProcessorDecorator} instead. */ @Deprecated public abstract class AbstractProcessorDecorator extends ProcessorDecorator { public AbstractProcessorDecorator(ResourcePreProcessor preProcessor) { super(preProcessor); } public AbstractProcessorDecorator(ResourcePostProcessor postProcessor) { super(postProcessor); } }
SoftwareDevTest/softwareapimanager
application/models/Company.go
package models import "time" type QySecret struct { Id int Proid int Company string Appid string Appsecret string Status int Ctime int } /** * 获取企业秘钥列表 * param proid 项目id * param company 公司名 * param start 开始位置 * param limit 条数 * return result 秘钥列表 */ func CompanyList(proid int, company string, start int, limit int) (result map[string]interface{}) { defer Db.Close() Db = Connect() var count int var secret []QySecret obj := Db.Hander.Table("qy_secret").Where("proid =? and status in (?)", proid, []int{1, 2}) if len(company) > 0 { obj = obj.Where("company like ?", "%"+company+"%") } obj.Count(&count) obj.Offset(start).Limit(limit).Find(&secret) result = make(map[string]interface{}) result["totalCount"] = count result["list"] = secret return } /** * 获取企业秘钥 * param id 密钥id */ func GetCompany(id int) (result QySecret) { defer Db.Close() Db = Connect() Db.Hander.Table("qy_secret").Where("status in (?)", []int{1, 2}). Where("id = ?", id).Find(&result) return } /** * 保存企业秘钥 * param id 密钥id * param data 密钥数据 */ func CompanySave(id int, data map[string]interface{}) bool { defer Db.Close() Db = Connect() if id > 0 { err := Db.Hander.Table("qy_secret"). Where("id = ? and proid=? ", id, data["proid"].(int)). Updates(data).Error if err != nil { return false } } else { time := time.Now().Unix() info := &QySecret{ 0, data["proid"].(int), data["company"].(string), data["appid"].(string), data["appsecret"].(string), data["status"].(int), int(time), } err := Db.Hander.Table("qy_secret").Create(info).Error if err != nil { return false } } return true } /** * 删除企业秘钥 * param id 密钥id * param proid 项目id */ func CompanyOperate(id int, proid int) bool { defer Db.Close() Db = Connect() if id > 0 { err := Db.Hander.Table("qy_secret"). Where("id = ? and proid = ?", id, proid). Update("status", 3).Error if err != nil { return false } return true } return false }
fcoclavero/vscvs
vscvs/trainers/engines/__init__.py
from .engines import attach_metrics
wittech/imsdk-android
imsdk/src/main/java/com/qunar/im/ui/util/easyphoto/easyphotos/ui/widget/PreviewRecyclerView.java
<filename>imsdk/src/main/java/com/qunar/im/ui/util/easyphoto/easyphotos/ui/widget/PreviewRecyclerView.java package com.qunar.im.ui.util.easyphoto.easyphotos.ui.widget; import android.content.Context; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.recyclerview.widget.RecyclerView; import android.util.AttributeSet; import android.view.MotionEvent; /** * 图片预览 RecyclerView * Create By lishilin On 2019/3/25 */ public class PreviewRecyclerView extends RecyclerView { private boolean isLock;// 是否锁住 RecyclerView ,避免和 PhotoView 双指放大缩小操作冲突 public PreviewRecyclerView(@NonNull Context context) { super(context); } public PreviewRecyclerView(@NonNull Context context, @Nullable AttributeSet attrs) { super(context, attrs); } public PreviewRecyclerView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override public boolean onInterceptTouchEvent(MotionEvent event) { switch (event.getActionMasked()) { case MotionEvent.ACTION_POINTER_DOWN:// 非第一个触点按下 isLock = true; break; case MotionEvent.ACTION_UP:// 最后一个触点抬起 isLock = false; break; } if (isLock) { return false;// 不拦截,交给子View处理 } return super.onInterceptTouchEvent(event); } @Override public boolean dispatchTouchEvent(MotionEvent event) { switch (event.getActionMasked()) { case MotionEvent.ACTION_POINTER_DOWN:// 非第一个触点按下 isLock = true; break; case MotionEvent.ACTION_UP:// 最后一个触点抬起 isLock = false; break; } return super.dispatchTouchEvent(event); } }
gaozining/LogiCommon
logi-security-spring-boot-starter/src/main/java/com/didiglobal/logi/security/service/UserRoleService.java
package com.didiglobal.logi.security.service; import java.util.List; /** * @author cjm */ public interface UserRoleService { /** * 根据角色id,获取用户idList * @param roleId 角色id * @return 用户idList */ List<Integer> getUserIdListByRoleId(Integer roleId); /** * 根据用户id,获取角色idList * @param userId 用户id * @return 角色idList */ List<Integer> getRoleIdListByUserId(Integer userId); /** * 根据用户id,更新用户与角色的关联信息 * @param userId 用户id * @param roleIdList 角色idList */ void updateUserRoleByUserId(Integer userId, List<Integer> roleIdList); /** * 根据角色id,更新用户与角色的关联信息 * @param roleId 角色id * @param userIdList 用户idList */ void updateUserRoleByRoleId(Integer roleId, List<Integer> userIdList); /** * 根据角色id获取授予用户数 * @param roleId 角色id * @return 角色授予用户数 */ int getUserRoleCountByRoleId(Integer roleId); }
MGlolenstine/NPCs
src/main/java/me/mrdaniel/npcs/managers/menu/MainPage.java
package me.mrdaniel.npcs.managers.menu; import javax.annotation.Nonnull; import org.spongepowered.api.data.key.Keys; import org.spongepowered.api.entity.living.Living; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.format.TextColors; import me.mrdaniel.npcs.io.NPCFile; import me.mrdaniel.npcs.utils.TextUtils; public class MainPage extends Page { private static final Text BUTTONS = Text.builder().append( Text.of(" "), Text.builder().append(Text.of(TextColors.YELLOW, "[Go To]")).onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Teleport to NPC"))).onClick(TextActions.runCommand("/npc goto")).build(), Text.of(" "), Text.builder().append(Text.of(TextColors.YELLOW, "[Move]")).onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Teleport NPC to you"))).onClick(TextActions.runCommand("/npc move")).build(), Text.of(" "), Text.builder().append(Text.of(TextColors.YELLOW, "[Deselect]")).onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Deselect"))).onClick(TextActions.runCommand("/npc deselect")).build(), Text.of(" "), Text.builder().append(Text.of(TextColors.DARK_GREEN, "[Mount]")).onHover(TextActions.showText(Text.of(TextColors.DARK_GREEN, "Mount"))).onClick(TextActions.runCommand("/npc mount")).build(), Text.of(" "), Text.builder().append(Text.of(TextColors.GOLD, "[Copy]")).onHover(TextActions.showText(Text.of(TextColors.GOLD, "Copy"))).onClick(TextActions.suggestCommand("/npc copy")).build(), Text.of(" "), Text.builder().append(Text.of(TextColors.RED, "[Remove]")).onHover(TextActions.showText(Text.of(TextColors.RED, "Remove"))).onClick(TextActions.suggestCommand("/npc remove")).build()) .build(); public MainPage(@Nonnull final Living npc, @Nonnull final NPCFile file) { super(npc, file); } @Override public void updatePage(final Living npc, final NPCFile file) { int c = 0; lines[c] = BUTTONS; ++c; lines[++c] = Text.of(TextColors.GOLD, "NPC ID: ", TextColors.RED, file.getId()); lines[++c] = Text.of(TextColors.GOLD, "Entity: ", TextColors.RED, TextUtils.capitalize(npc.getType().getName())); lines[++c] = Text.of(TextColors.GOLD, "Location: ", TextColors.RED, npc.getWorld().getName(), " ", npc.getLocation().getBlockX(), " ", npc.getLocation().getBlockY(), " ", npc.getLocation().getBlockZ()); ++c; lines[++c] = Text.builder().append(Text.of(TextColors.GOLD, "Name: ", TextColors.AQUA)).append(file.getName().orElse(Text.of("None"))).onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Change"))).onClick(TextActions.suggestCommand("/npc name <name>")).build(); if (npc.supports(Keys.SKIN_UNIQUE_ID)) { lines[++c] = Text.builder().append(Text.of(TextColors.GOLD, "Skin: ", TextColors.AQUA, file.getSkinName().orElse("None"))).onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Change"))).onClick(TextActions.suggestCommand("/npc skin <name>")).build(); } ++c; lines[++c] = TextUtils.getToggleText("Look", "/npc look", file.getLooking()); lines[++c] = TextUtils.getToggleText("Interact", "/npc interact", file.getInteract()); if (npc.supports(Keys.CREEPER_CHARGED)) { lines[++c] = TextUtils.getToggleText("Charged", "/npc charged", file.getCharged()); } if (npc.supports(Keys.ANGRY)) { lines[++c] = TextUtils.getToggleText("Angry", "/npc angry", file.getAngry()); } if (npc.supports(Keys.IS_SITTING)) { lines[++c] = TextUtils.getToggleText("Sit", "/npc sit", file.getSitting()); } if (npc.supports(Keys.GLOWING)) { lines[++c] = TextUtils.getToggleText("Glow", "/npc glow", file.getGlow()); } if (npc.supports(Keys.GLOWING)) { lines[++c] = TextUtils.getOptionsText("GlowColor", "/npc glowcolor <color>", file.getGlowColor().map(v -> TextUtils.capitalize(v.getName())).orElse("White")); } if (npc.supports(Keys.SLIME_SIZE)) { lines[++c] = TextUtils.getOptionsText("Size", "/npc size <size>", String.valueOf(file.getSize())); } if (npc.supports(Keys.CAREER)) { lines[++c] = TextUtils.getOptionsText("Career", "/npc career <career>", file.getCareer().map(v -> v.getName()).orElse("None")); } if (npc.supports(Keys.OCELOT_TYPE)) { lines[++c] = TextUtils.getOptionsText("Cat", "/npc cat <cattype>", file.getCat().map(v -> TextUtils.capitalize(v.getId().toLowerCase().replace("ocelot", ""))).orElse("None")); } if (npc.supports(Keys.HORSE_STYLE)) { lines[++c] = TextUtils.getOptionsText("Style", "/npc style <style>", file.getHorseStyle().map(v -> TextUtils.capitalize(v.getName().toLowerCase())).orElse("None")); } if (npc.supports(Keys.HORSE_COLOR)) { lines[++c] = TextUtils.getOptionsText("Color", "/npc color <color>", file.getHorseColor().map(v -> TextUtils.capitalize(v.getName().toLowerCase())).orElse("None")); } if (npc.supports(Keys.LLAMA_VARIANT)) { lines[++c] = TextUtils.getOptionsText("Variant", "/npc variant <variant>", file.getVariant().map(v -> TextUtils.capitalize(v.getName().toLowerCase())).orElse("None")); } } }
olsemeno/jruby
spec/ruby/library/datetime/subtract_spec.rb
<reponame>olsemeno/jruby require_relative '../../spec_helper' require 'date' describe "DateTime#-" do it "is able to subtract sub-millisecond precision values" do date = DateTime.new(2017) diff = Rational(123456789, 24*60*60*1000*1000) ((date + diff) - date).should == diff (date - (date + diff)).should == -diff (date - (date - diff)).should == diff ((date - diff) - date).should == -diff end it "correctly calculates sub-millisecond time differences" do #5493 dt1 = DateTime.new(2018, 1, 1, 0, 0, 30) dt2 = DateTime.new(2018, 1, 1, 0, 1, 29.000001) ((dt2 - dt1) * 24 * 60 * 60).should == 59.000001 end end
intesight/Panorama4AIWAYS
s32v234_sdk/libs/dnn/squeeze_net_v10/graphs/fire3_e1e3s1_graph.hpp
/***************************************************************************** * * NXP Confidential Proprietary * * Copyright (c) 2015-2016 Freescale Semiconductor * Copyright 2017-2018 NXP * All Rights Reserved * ****************************************************************************** * * THIS SOFTWARE IS PROVIDED BY NXP "AS IS" AND ANY EXPRESSED OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL NXP OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ /*!********************************************************************************* * * @file fire3_e1e3s1_graph.hpp * * @brief Graph for FIRE3_E1E3S1 * ***********************************************************************************/ #ifndef FIRE3E1E3S1GRAPH_HPP #define FIRE3E1E3S1GRAPH_HPP #include <acf_graph.hpp> #include "neural_network_acf.h" class fire3_e1e3s1_graph : public ACF_Graph { public: fire3_e1e3s1_graph() : ACF_Graph() { XREGISTER_ACF_KERNEL(APU_FIRE3_E1E3S1_FORWARD_K) } void Create() { /* set identifier for graph */ SetIdentifier("fire3_e1e3s1_graph"); /* add kernels */ AddKernel("FIRE3_E1E3S1", APU_FIRE3_E1E3S1_FORWARD_KN); /* add graph port */ AddInputPort("FIRE3_E1E3S1_INPUT_IMAGE"); AddInputPort("FIRE3_E1E3S1_INPUT_WEIGHT"); AddInputPort("FIRE3_E1E3S1_INPUT_PARAMS"); AddOutputPort("FIRE3_E1E3S1_OUTPUT_S1"); /* specify connection */ Connect(GraphPort("FIRE3_E1E3S1_INPUT_IMAGE"), KernelPort("FIRE3_E1E3S1", "INPUT_IMAGE")); Connect(GraphPort("FIRE3_E1E3S1_INPUT_WEIGHT"), KernelPort("FIRE3_E1E3S1", "INPUT_WEIGHT")); Connect(GraphPort("FIRE3_E1E3S1_INPUT_PARAMS"), KernelPort("FIRE3_E1E3S1", "INPUT_PARAMS")); Connect(KernelPort("FIRE3_E1E3S1", "OUTPUT_S1"), GraphPort("FIRE3_E1E3S1_OUTPUT_S1")); } }; #endif /* FIRE3E1E3S1GRAPH_HPP */
clockworkgr/tmmonorepo
packages/vuex/src/chain/cosmos/cosmos-sdk/bank/bank.js
<filename>packages/vuex/src/chain/cosmos/cosmos-sdk/bank/bank.js<gh_stars>1-10 //import { Type, Field } from 'protobufjs' If registering custom message types const getDefaultState = () => { return { Balance: {}, AllBalances: {}, TotalSupply: {}, SupplyOf: {}, Params: {}, DenomsMetadata: {}, DenomMetadata: {}, _Subscriptions: new Set() } } // initial state const state = getDefaultState() export default { namespaced: true, state, mutations: { RESET_STATE(state) { Object.assign(state, getDefaultState()) }, BALANCE(state, { queryParams, balance }) { state.Balance[queryParams] = balance }, ALL_BALANCES(state, { queryParams, balances }) { state.AllBalances[queryParams] = balances }, TOTAL_SUPPLY(state, { supply }) { state.TotalSupply = supply }, SUPPLY_OF(state, { queryParams, amount }) { state.SupplyOf[queryParams] = amount }, PARAMS(state, { params }) { state.Params = params }, DENOMS_METADATA(state, { metadata }) { state.DenomsMetadata = metadata }, DENOM_METADATA(state, { queryParams, metadata }) { state.DenomsMetadata[queryParams] = metadata }, SUBSCRIBE(state, subscription) { state._Subscriptions.add(subscription) }, UNSUBSCRIBE(state, subscription) { state._Subscriptions.delete(subscription) } }, getters: { getAllBalances: state => address => { if (address != '' && state.AllBalances['/' + address]) { return state.AllBalances['/' + address].balances } else { return [] } }, getBalance: state => (address, denom) => { if (address != '' && state.Balance['/' + address + '/' + denom]) { return state.Balance['/' + address + '/' + denom].balance } else { return {} } }, getTotalSupply: state => () => { return state.TotalSupply }, getSupplyOf: state => denom => { if (denom != '' && state.SupplyOf['/' + denom]) { return state.SupplyOf['/' + denom].amount } else { return {} } }, getParams: state => () => { return state.Params }, getDenomsMetadata: state => () => { return state.DenomsMetadata }, getDenomMetadata: state => denom => { if (denom != '' && state.DenomMetadata['/' + denom]) { return state.DenomMetadata['/' + denom].metadata } else { return {} } } }, actions: { init({ dispatch, rootGetters }) { if (rootGetters['chain/common/env/wsClient']) { rootGetters['chain/common/env/wsClient'].on('newblock', () => { dispatch('StoreUpdate') }) } }, resetState({ commit }) { commit('RESET_STATE') }, async StoreUpdate({ state, dispatch }) { state._Subscriptions.forEach(subscription => { dispatch(subscription.action, subscription.payload) }) }, unsubscribe({ commit }, subscription) { commit('UNSUBSCRIBE', subscription) }, async QueryBalance( { commit, rootGetters }, { address, denom, subscribe = false } ) { const queryUrl = '/cosmos/bank/v1beta1/balances' const queryParams = '/' + address + '/' + denom try { const balance = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('BALANCE', { queryParams, balance }) if (subscribe) { commit('SUBSCRIBE', { action: 'QueryBalance', payload: { address, denom } }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, async QueryAllBalances( { commit, rootGetters }, { address, subscribe = false } ) { const queryUrl = '/cosmos/bank/v1beta1/balances' const queryParams = '/' + address try { const balances = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('ALL_BALANCES', { queryParams, balances }) if (subscribe) { commit('UNSUBSCRIBE', { action: 'QueryAllBalances', payload: { address } }) } } catch (e) { console.log(e) console.log('Query Failed: API node unavailable') } }, async QueryTotalSupply({ commit, rootGetters }, { subscribe = false }) { const queryUrl = '/cosmos/bank/v1beta1/supply' const queryParams = '' try { const supply = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('TOTAL_SUPPLY', { queryParams, supply }) if (subscribe) { commit('SUBSCRIBE', { action: 'QueryTotalSupply', payload: null }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, async QuerySupplyOf({ commit, rootGetters }, { denom, subscribe = false }) { const queryUrl = '/cosmos/bank/v1beta1/supply' const queryParams = '/' + denom try { const amount = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('SUPPLY_OF', { queryParams, amount }) if (subscribe) { commit('SUBSCRIBE', { action: 'QuerySupplyOf', payload: { denom } }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, async QueryParams({ commit, rootGetters }, { subscribe = false }) { const queryUrl = '/cosmos/bank/v1beta1/params' const queryParams = '' try { const params = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('PARAMS', { params }) if (subscribe) { commit('SUBSCRIBE', { action: 'QueryParams', payload: null }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, async QueryDenomsMetadata({ commit, rootGetters }, { subscribe = false }) { const queryUrl = '/cosmos/bank/v1beta1/denoms_metadata' const queryParams = '' try { const supply = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('DENOMS_METADATA', { queryParams, supply }) if (subscribe) { commit('SUBSCRIBE', { action: 'QueryDenomsMetadata', payload: null }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, async QueryDenomMetadata( { commit, rootGetters }, { denom, subscribe = false } ) { const queryUrl = '/cosmos/bank/v1beta1/denoms_metadata' const queryParams = '/' + denom try { const amount = await rootGetters['chain/common/env/apiClient'].query( queryUrl, queryParams ) commit('DENOM_METADATA', { queryParams, amount }) if (subscribe) { commit('SUBSCRIBE', { action: 'QueryDenomMetadata', payload: { denom } }) } } catch (e) { console.log('Query Failed: API node unavailable') } }, registerTypes() {}, /* registerTypes({ dispatch }) { /* Generate type definitions and dispatch action to register them const MsgCreatePost = new Type("MsgCreatePost") .add(new Field("creator", 1, "string")) .add(new Field("title", 2, "string")) .add(new Field("body", 3, "string")); dispatch('chain/common/wallet/registerType', {typeUrl: '/foo.foo.MsgCreatePost' , type: MsgCreatePost }, { root: true }) }, */ async MsgSend( { dispatch }, { from_address, to_address, amount, denom, memo } ) { const typeUrl = '/cosmos.bank.v1beta1.MsgSend' const value = { amount: [{ amount, denom }], fromAddress: from_address, toAddress: to_address } try { await dispatch( 'chain/common/wallet/sendTransaction', { message: { typeUrl, value }, memo, denom }, { root: true } ) } catch (e) { throw 'Failed to broadcast transaction' } }, async MsgMultiSend({ dispatch }, { inputs, outputs, denom, memo }) { const typeUrl = '/cosmos.bank.v1beta1.MsgMultiSend' const value = { inputs, outputs } try { await dispatch( 'chain/common/wallet/sendTransaction', { message: { typeUrl, value }, memo, denom }, { root: true } ) } catch (e) { throw 'Failed to broadcast transaction' } } } }
ComputeWorks/Xbox-ATG-Samples
XDKSamples/Tools/OSPrimitiveTool/Libraries/Logging/BaseLogger.cpp
<reponame>ComputeWorks/Xbox-ATG-Samples //-------------------------------------------------------------------------------------- // BaseLogger.cpp // // Advanced Technology Group (ATG) // Copyright (C) Microsoft Corporation. All rights reserved. //-------------------------------------------------------------------------------------- #include "BaseLogger.h" using namespace ATG; using namespace DebugLog; BaseLogger::BaseLogger() { m_outputQueue[0].reserve(1000); m_outputQueue[1].reserve(1000); m_currentOutputQueue = 0; m_outputThread = nullptr; } BaseLogger::~BaseLogger() { ShutdownLogger(); } void BaseLogger::StartupLogger() { m_killFlag.store(0); m_outputThread = new std::thread(&BaseLogger::SaveLogThread, this); } void BaseLogger::ShutdownLogger() { if (m_outputThread) { m_killFlag.store(1); m_outputThread->join(); DumpQueue(m_currentOutputQueue); DumpQueue(!m_currentOutputQueue); m_outputThread = nullptr; } } void BaseLogger::Log(const std::wstring& logLine) { std::lock_guard<std::mutex> lg(m_queueCrit); m_outputQueue[m_currentOutputQueue].emplace(m_outputQueue[m_currentOutputQueue].end(), logLine); } void BaseLogger::SaveLogThread(void) { while (m_killFlag.load() == 0) { uint32_t outputQueue = m_currentOutputQueue; { std::lock_guard<std::mutex> lg(m_queueCrit); m_currentOutputQueue = !m_currentOutputQueue; } DumpQueue(outputQueue); std::this_thread::sleep_for(std::chrono::milliseconds(250)); } }
Uvacoder/wc-lib
rotation-input/wc-rotation-input-2.js
function fireEvent(element, eventName, data, bubbles = true, cancelable = true) { const event = document.createEvent("HTMLEvents"); event.initEvent(eventName, bubbles, cancelable); if (data) { event.data = data; } return element.dispatchEvent(event); } function validateEnum(val, choices) { if (choices.includes(val)) { return val; } throw new Error(`invalid type, only ${choices.join(",")} allowed.`); } const TWO_PI = Math.PI * 2; function normalizeAngle(angle) { if (angle < 0) { return TWO_PI - (Math.abs(angle) % TWO_PI); } return angle % TWO_PI; } function degreesToRadians(deg) { return deg * (Math.PI / 180); } function radiansToDegrees(rad) { return rad * (180 / Math.PI); } function getSteps(step, end, start = 0) { const steps = [start]; let current = start + step; while (current < end) { steps.push(current); current += step; } steps.push(end); return steps; } export function getClosest(value, possibleValues) { let highIndex = possibleValues.length; let lowIndex = 0; let midIndex; while (lowIndex < highIndex) { midIndex = Math.floor((highIndex + lowIndex) / 2); if (value === possibleValues[midIndex]) return possibleValues[midIndex]; if (value < possibleValues[midIndex]) { if (midIndex > 0 && value > possibleValues[midIndex - 1]) { return value - possibleValues[midIndex + 1] >= possibleValues[midIndex] - value ? possibleValues[midIndex] : possibleValues[midIndex - 1] } highIndex = midIndex; } else { if (midIndex < highIndex - 1 && value < possibleValues[midIndex + 1]) { return value - possibleValues[midIndex] >= possibleValues[midIndex + 1] - value ? possibleValues[midIndex + 1] : possibleValues[midIndex] } lowIndex = midIndex + 1; } } return possibleValues[midIndex] } export class WcRotationInput2 extends HTMLElement { #center = {}; #precision = 2; #unit = "deg"; #currentValue = 0; #trigger = "manipulate"; #stepAmount = 1; #steps = null; static #triggerType = ["manipulate", "settled"]; static #unitType = ["deg", "rad"]; static observedAttributes = ["precision", "unit", "trigger", "value", "step"]; constructor() { super(); this.bind(this); } bind(element) { this.render = this.render.bind(element); this.cacheDom = this.cacheDom.bind(element); this.attachEvents = this.attachEvents.bind(element); this.onPointerDown = this.onPointerDown.bind(element); this.onPointerMove = this.onPointerMove.bind(element); this.onPointerUp = this.onPointerUp.bind(element); this.onWheel = this.onWheel.bind(element); this.onKeydown = this.onKeydown.bind(element); } render() { this.attachShadow({ mode: "open" }); this.shadowRoot.innerHTML = ` <style> :host { display: inline-flex; flex-flow: row nowrap; gap: 0.5rem; align-items: center; width: 8rem; height: 2rem; --half-stroke: calc(var(--stroke-width, 1px) / 2); } svg { width: auto; height: 100%; } circle { r : calc(50% - var(--half-stroke)); cx : 50%; cy : 50%; fill: var(--fill-color, #fff); stroke-width: var(--stoke-width, 1px); stroke: var(--stroke-color, #000); } #pointer { stroke-width: var(--stoke-width, 1px); stroke: var(--stroke-color, #000); transform-origin: center center; } #value { user-select: none; } </style> <svg viewBox="0 0 16 16"> <circle /> <line x1="50%" y1="50%" x2="100%" y2="50%" id="pointer"/> </svg> <div id="value"></div> `; if (this.tabIndex <= 0) { this.tabIndex = 0; } this.setAttribute("aria-role", "slider"); } connectedCallback() { this.render(); this.cacheDom(); this.attachEvents(); } cacheDom() { this.dom = { pointer: this.shadowRoot.querySelector("#pointer"), value: this.shadow.querySelector("#value"), svg: this.shadow.querySelector("svg") }; } attachEvents() { this.dom.svg.addEventListener("pointerdown", this.onPointerDown); this.addEventListener("wheel", this.onWheel); this.addEventListener("keydown", this.onKeydown); } onPointerDown(e) { const rect = this.dom.svg.getBoundingClientRect(); this.#center = { x: rect.x + (rect.width / 2), y: rect.y + (rect.height / 2) }; document.addEventListener("pointermove", this.onPointerMove); document.addEventListener("pointerup", this.onPointerUp); } onPointerMove(e) { const offsetX = e.clientX - this.#center.x; const offsetY = this.#center.y - e.clientY; //y-coords flipped let rad; if (offsetX >= 0 && offsetY >= 0) { rad = Math.atan(offsetY / offsetX); } else if (offsetX < 0 && offsetY >= 0) { rad = (Math.PI / 2) + Math.atan(-offsetX / offsetY); } else if (offsetX < 0 && offsetY < 0) { rad = Math.PI + Math.atan(offsetY / offsetX); } else { rad = (3 * Math.PI / 2) + Math.atan(offsetX / -offsetY); } rad = this.#steps === null ? rad : getClosest(rad, this.#steps); const deg = radiansToDegrees(rad); const finalValue = (this.#unit === "rad" ? rad : deg).toFixed(this.#precision); this.dom.pointer.style = `transform: rotateZ(-${deg}deg)`; this.dom.value.textContent = finalValue; if (this.#trigger === "manipulate") { this.value = rad; fireEvent(this, "change", this.#value); } else { this.#currentValue = rad; } } onPointerUp() { document.removeEventListener("pointermove", this.onPointerMove); document.removeEventListener("pointerup", this.onPointerUp); if (this.#trigger === "settled") { this.value = this.#currentValue; fireEvent(this, "change", this.#value); } } onWheel(e) { const delta = (e.deltaY * this.stepAmount) / 100; this.value = this.#value + delta; fireEvent(this, "change", this.#value); } onKeydown(e) { if (e.which !== 38 && e.which !== 40) return; const delta = this.stepAmount * (e.which === 40 ? -1 : 1); this.value = this.#value + delta; fireEvent(this, "change", this.#value); } attributeChangedCallback(name, oldValue, newValue) { this[name] = newValue; } set precision(val) { this.#precision = parseInt(val); } set unit(val) { this.#unit = validateEnum(val, WcRotationInput.#unitType); } set trigger(val) { this.#trigger = validateEnum(val, WcRotationInput.#triggerType); } set step(val){ this.#stepAmount = parseFloat(val || 1); const stepsAmountRad = this.#unit === "rad" ? this.#stepAmount : degreesToRadians(this.#stepAmount); this.#steps = getSteps(stepsAmountRad, TWO_PI); } set value(val){ const valueRad = this.#unit === "rad" ? val : degreesToRadians(val); const valueDeg = this.#unit === "rad" ? radiansToDegrees(val) : val; const valueDisplay = (this.#unit === "rad" ? valueRad : valueDeg).toFixed(this.#precision); this.#value = normalizeAngle(valueRad); this.dom.value.textContent = valueDisplay; this.dom.pointer.style = `transform: rotateZ(-${valueDeg}deg)`; this.setAttribute("aria-valuenow", valueDisplay); this.setAttribute("aria-valuetext", valueDisplay); } get stepAmount(){ return this.#unit === "rad" ? this.#stepAmount : degreesToRadians(this.#stepAmount); } } customElements.define("wc-rotation-input-2", WcRotationInput2);
nicolasgustafsson/WonderMake
WonderMake/Imgui/JsonInspector.cpp
<gh_stars>1-10 #include "pch.h" #include "JsonInspector.h" #include <json/json.hpp> void ImGui::JsonInspector::Inspect(nlohmann::json& aJsonDocumentToInspect, const std::string aDocumentName) { ImGui::Begin(aDocumentName.c_str()); ImGui::PushStyleVar(ImGuiStyleVar_FramePadding, ImVec2(2, 2)); ImGui::Columns(2); ImGui::Separator(); for (nlohmann::json::iterator element = aJsonDocumentToInspect.begin(); element != aJsonDocumentToInspect.end(); element++) { InspectElement(element); } ImGui::Columns(1); ImGui::Separator(); ImGui::PopStyleVar(); ImGui::End(); } void ImGui::JsonInspector::InspectElement(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; if (element.type() == nlohmann::json::value_t::object) { InspectObject(aElementIterator); return; } ImGui::TreeNodeEx("Field", ImGuiTreeNodeFlags_Leaf | ImGuiTreeNodeFlags_NoTreePushOnOpen | ImGuiTreeNodeFlags_Bullet, aElementIterator.key().c_str()); ImGui::NextColumn(); ImGui::PushItemWidth(-1); ImGui::PushID(&element); switch (element.type()) { case nlohmann::json::value_t::number_float: InspectFloat(aElementIterator); break; case nlohmann::json::value_t::number_integer: InspectInteger(aElementIterator); break; case nlohmann::json::value_t::string: InspectString(aElementIterator); break; case nlohmann::json::value_t::boolean: InspectBool(aElementIterator); break; default: InspectTheUninspectable(aElementIterator); break; } ImGui::PopID(); ImGui::PopItemWidth(); ImGui::NextColumn(); } void ImGui::JsonInspector::InspectFloat(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; f64& ref = element.get_ref<f64&>(); ImGui::InputDouble("##value", &ref, 1.0f); } void ImGui::JsonInspector::InspectObject(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; ImGui::PushID(&element); bool node_open = ImGui::TreeNode("Object", aElementIterator.key().c_str()); ImGui::NextColumn(); ImGui::NextColumn(); if (node_open) { for (nlohmann::json::iterator childElement = element.begin(); childElement != element.end(); childElement++) { InspectElement(childElement); } ImGui::TreePop(); } ImGui::PopID(); } void ImGui::JsonInspector::InspectInteger(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; i64& ref = element.get_ref<i64&>(); i32 integer = static_cast<i32>(ref); ImGui::InputInt("##value", &integer, 1); ref = integer; } void ImGui::JsonInspector::InspectString(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; std::string& ref = element.get_ref<std::string&>(); ImGui::InputText("##value", &ref[0], ref.capacity()); } void ImGui::JsonInspector::InspectTheUninspectable(nlohmann::json::iterator /*aElementIterator*/) { ImGui::Text("Uninspectable"); } void ImGui::JsonInspector::InspectBool(nlohmann::json::iterator aElementIterator) { nlohmann::json& element = *aElementIterator; bool& ref = element.get_ref<bool&>(); ImGui::Checkbox("##value", &ref); }
70Song/Collections-Song
src/test/java/org/apache/commons/collections4/comparators/ComparatorChainTest.java
<gh_stars>0 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.collections4.comparators; import java.io.Serializable; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import org.junit.Test; /** * Tests for ComparatorChain. * */ public class ComparatorChainTest extends AbstractComparatorTest<ComparatorChainTest.PseudoRow> { public ComparatorChainTest(final String testName) { super(testName); } @Override public Comparator<PseudoRow> makeObject() { final ComparatorChain<PseudoRow> chain = new ComparatorChain<>(new ColumnComparator(0)); chain.addComparator(new ColumnComparator(1), true); // reverse the second column chain.addComparator(new ColumnComparator(2), false); return chain; } @Override public String getCompatibilityVersion() { return "4"; } // public void testCreate() throws Exception { // writeExternalFormToDisk((java.io.Serializable) makeObject(), "src/test/resources/data/test/ComparatorChain.version4.obj"); // } @Test public void testNoopComparatorChain() { final ComparatorChain<Integer> chain = new ComparatorChain<>(); final Integer i1 = Integer.valueOf(4); final Integer i2 = Integer.valueOf(6); chain.addComparator(new ComparableComparator<Integer>()); final int correctValue = i1.compareTo(i2); assertTrue("Comparison returns the right order", chain.compare(i1, i2) == correctValue); } @Test public void testBadNoopComparatorChain() { final ComparatorChain<Integer> chain = new ComparatorChain<>(); final Integer i1 = Integer.valueOf(4); final Integer i2 = Integer.valueOf(6); try { chain.compare(i1,i2); fail("An exception should be thrown when a chain contains zero comparators."); } catch (final UnsupportedOperationException e) { } } @Test public void testListComparatorChain() { final List<Comparator<Integer>> list = new LinkedList<>(); list.add(new ComparableComparator<Integer>()); final ComparatorChain<Integer> chain = new ComparatorChain<>(list); final Integer i1 = Integer.valueOf(4); final Integer i2 = Integer.valueOf(6); final int correctValue = i1.compareTo(i2); assertTrue("Comparison returns the right order", chain.compare(i1, i2) == correctValue); } @Test public void testBadListComparatorChain() { final List<Comparator<Integer>> list = new LinkedList<>(); final ComparatorChain<Integer> chain = new ComparatorChain<>(list); final Integer i1 = Integer.valueOf(4); final Integer i2 = Integer.valueOf(6); try { chain.compare(i1, i2); fail("An exception should be thrown when a chain contains zero comparators."); } catch (final UnsupportedOperationException e) { } } @Test public void testComparatorChainOnMinvaluedCompatator() { // -1 * Integer.MIN_VALUE is less than 0, // test that ComparatorChain handles this edge case correctly final ComparatorChain<Integer> chain = new ComparatorChain<>(); chain.addComparator((a, b) -> { final int result = a.compareTo(b); if (result < 0) { return Integer.MIN_VALUE; } if (result > 0) { return Integer.MAX_VALUE; } return 0; }, true); assertTrue(chain.compare(Integer.valueOf(4), Integer.valueOf(5)) > 0); assertTrue(chain.compare(Integer.valueOf(5), Integer.valueOf(4)) < 0); assertTrue(chain.compare(Integer.valueOf(4), Integer.valueOf(4)) == 0); } @Override public List<PseudoRow> getComparableObjectsOrdered() { final List<PseudoRow> list = new LinkedList<>(); // this is the correct order assuming a // "0th forward, 1st reverse, 2nd forward" sort list.add(new PseudoRow(1, 2, 3)); list.add(new PseudoRow(2, 3, 5)); list.add(new PseudoRow(2, 2, 4)); list.add(new PseudoRow(2, 2, 8)); list.add(new PseudoRow(3, 1, 0)); list.add(new PseudoRow(4, 4, 4)); list.add(new PseudoRow(4, 4, 7)); return list; } public static class PseudoRow implements Serializable { /** * Generated serial version ID. */ private static final long serialVersionUID = 8085570439751032499L; public int cols[] = new int[3]; public PseudoRow(final int col1, final int col2, final int col3) { cols[0] = col1; cols[1] = col2; cols[2] = col3; } public int getColumn(final int colIndex) { return cols[colIndex]; } @Override public String toString() { final StringBuilder buf = new StringBuilder(); buf.append("["); buf.append(cols[0]); buf.append(","); buf.append(cols[1]); buf.append(","); buf.append(cols[2]); buf.append("]"); return buf.toString(); } @Override public boolean equals(final Object o) { if (!(o instanceof PseudoRow)) { return false; } final PseudoRow row = (PseudoRow) o; if (getColumn(0) != row.getColumn(0)) { return false; } if (getColumn(1) != row.getColumn(1)) { return false; } if (getColumn(2) != row.getColumn(2)) { return false; } return true; } } public static class ColumnComparator implements Comparator<PseudoRow>, Serializable { private static final long serialVersionUID = -2284880866328872105L; protected int colIndex = 0; public ColumnComparator(final int colIndex) { this.colIndex = colIndex; } @Override public int compare(final PseudoRow o1, final PseudoRow o2) { final int col1 = o1.getColumn(colIndex); final int col2 = o2.getColumn(colIndex); if (col1 > col2) { return 1; } if (col1 < col2) { return -1; } return 0; } @Override public int hashCode() { return colIndex; } @Override public boolean equals(final Object that) { return that instanceof ColumnComparator && colIndex == ((ColumnComparator) that).colIndex; } } }
morozovcookie/agat-banking-public
ui/src/services/AuthenticationService/AuthenticationService.js
const AuthenticationService = { signIn: () => {}, refreshToken: () => {}, signOut: () => {} }; export default AuthenticationService;
JzGo/hyrax
spec/wings/hydra/works/models/concerns/file_set_valkyrie_behavior_spec.rb
<filename>spec/wings/hydra/works/models/concerns/file_set_valkyrie_behavior_spec.rb<gh_stars>1-10 # frozen_string_literal: true require 'wings_helper' require 'wings/model_transformer' RSpec.describe Wings::Works::FileSetValkyrieBehavior, :clean_repo do subject(:factory) { Wings::ModelTransformer.new(pcdm_object: pcdm_object) } let(:resource) { subject.build } let(:work1) { build(:work, id: 'wk1', title: ['Work 1']) } let(:work2) { build(:work, id: 'wk2', title: ['Work 2']) } let(:fileset1) { build(:file_set, id: 'fs1', title: ['Fileset 1']) } describe 'type check methods on valkyrie resource' do let(:pcdm_object) { fileset1 } it 'returns appropriate response from type check methods' do expect(resource.pcdm_collection?).to be false expect(resource.pcdm_object?).to be true expect(resource.collection?).to be false expect(resource.work?).to be false expect(resource.file_set?).to be true end end describe '#parent_works' do let(:pcdm_object) { fileset1 } let(:child_file_set_resource) { resource } before do work1.ordered_members = [fileset1] work2.ordered_members = [fileset1] work1.save! work2.save! end context 'when valkyrie resources requested' do it 'returns parent works as valkyrie resources through file_set_valkyrie_behavior' do resources = child_file_set_resource.parent_works(valkyrie: true) expect(resources.map(&:work?)).to all(be true) expect(resources.map(&:id)).to match_valkyrie_ids_with_active_fedora_ids([work1.id, work2.id]) end end context 'when active fedora objects requested' do it 'returns parent works as fedora objects through file_set_valkyrie_behavior' do af_objects = child_file_set_resource.parent_works(valkyrie: false) expect(af_objects.map(&:work?)).to all(be true) expect(af_objects.map(&:id)).to match_array [work1.id, work2.id] end end context 'when return type is not specified' do it 'returns parent works as fedora objects through file_set_valkyrie_behavior' do af_objects = child_file_set_resource.parent_works expect(af_objects.map(&:work?)).to all(be true) expect(af_objects.map(&:id)).to match_array [work1.id, work2.id] end end end describe '#parent_work_ids' do let(:pcdm_object) { fileset1 } let(:child_file_set_resource) { resource } before do work1.ordered_members = [fileset1] work2.ordered_members = [fileset1] work1.save! work2.save! end context 'when valkyrie resources requested' do it 'returns parent works as valkyrie resources through file_set_valkyrie_behavior' do resource_ids = child_file_set_resource.parent_work_ids(valkyrie: true) expect(resource_ids).to match_valkyrie_ids_with_active_fedora_ids([work1.id, work2.id]) end end context 'when active fedora objects requested' do it 'returns parent works as fedora objects through file_set_valkyrie_behavior' do af_object_ids = child_file_set_resource.parent_work_ids(valkyrie: false) expect(af_object_ids).to match_array [work1.id, work2.id] end end context 'when return type is not specified' do it 'returns parent works as fedora objects through file_set_valkyrie_behavior' do af_object_ids = child_file_set_resource.parent_work_ids expect(af_object_ids).to match_array [work1.id, work2.id] end end end end
liufu1986007/tis
tis-common/src/main/java/com/qlangtech/tis/offline/pojo/TISDb.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qlangtech.tis.offline.pojo; import com.qlangtech.tis.git.GitUtils; import com.qlangtech.tis.manage.common.Secret; /** * @author 百岁(<EMAIL>) * @date 2020/04/13 */ public class TISDb { String dbId; String dbName; String dbType; String userName; String password; String port; String encoding; String extraParams; String shardingType; String shardingEnum; String host; // 是否是cobar类型的 private boolean facade; public String createDBConfigDesc() { StringBuffer desc = new StringBuffer(); // mysql order { // host:127.0.0.1[00-31],127.0.0.2[32-63],127.0.0.3,127.0.0.4[9],baisui.com[0-9] // username:root // password:<PASSWORD>%&*())))** // port:3306 // } // Secret desc.append("mysql ").append(this.dbName).append(" { \n"); desc.append(" host:").append(this.getHost()).append(" \n"); desc.append(" username:").append(this.getUserName()).append(" \n"); desc.append(" password:").append(Secret.encrypt(this.password, GitUtils.cryptKey)).append(" \n"); desc.append(" port:").append(this.getPort()).append("\n"); desc.append("}"); return desc.toString(); } // 是否是cobar类型的 public boolean isFacade() { return this.facade; } public void setFacade(boolean facade) { this.facade = facade; } public String getDbName() { return dbName; } public void setDbName(String dbName) { this.dbName = dbName; } public String getDbType() { return dbType; } public void setDbType(String dbType) { this.dbType = dbType; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getPort() { return port; } public void setPort(String port) { this.port = port; } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public String getExtraParams() { return extraParams; } public void setExtraParams(String extraParams) { this.extraParams = extraParams; } public String getShardingType() { return shardingType; } public void setShardingType(String shardingType) { this.shardingType = shardingType; } public String getShardingEnum() { return shardingEnum; } public void setShardingEnum(String shardingEnum) { this.shardingEnum = shardingEnum; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public void setDbId(String dbId) { this.dbId = dbId; } public String getDbId() { return this.dbId; } }
Telenav/kivakit
kivakit-kernel/src/main/java/com/telenav/kivakit/kernel/logging/LoggerFactory.java
<filename>kivakit-kernel/src/main/java/com/telenav/kivakit/kernel/logging/LoggerFactory.java<gh_stars>10-100 //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // // © 2011-2021 Telenav, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// package com.telenav.kivakit.kernel.logging; import com.telenav.kivakit.kernel.interfaces.factory.Factory; import com.telenav.kivakit.kernel.logging.loggers.LogServiceLogger; import com.telenav.kivakit.kernel.project.lexakai.diagrams.DiagramLogging; import com.telenav.lexakai.annotations.UmlClassDiagram; import com.telenav.lexakai.annotations.associations.UmlRelation; /** * Creates new {@link Logger} instances via {@link #newLogger()}. By default, instances of {@link LogServiceLogger} are * created, but a new factory can be installed with {@link #factory}. * * @author jonathanl (shibo) * @see Logger * @see LogServiceLogger */ @UmlClassDiagram(diagram = DiagramLogging.class) @UmlRelation(label = "creates", referent = Logger.class) public class LoggerFactory { private static Factory<Logger> factory = LogServiceLogger::new; /** * Installs a custom logger factory * * @param factory The logger factory to use */ public static void factory(Factory<Logger> factory) { LoggerFactory.factory = factory; } /** * @return A new logger instance */ public static Logger newLogger() { return factory.newInstance(); } }
MarkWaldron/topaz
src/common/detour/DetourNavMesh.h
// // Copyright (c) 2009-2010 <NAME> <EMAIL> // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. // #ifndef DETOURNAVMESH_H #define DETOURNAVMESH_H #include "DetourAlloc.h" #include "DetourStatus.h" // Undefine (or define in a build cofnig) the following line to use 64bit polyref. // Generally not needed, useful for very large worlds. // Note: tiles build using 32bit refs are not compatible with 64bit refs! //#define DT_POLYREF64 1 #ifdef DT_POLYREF64 // TODO: figure out a multiplatform version of uint64_t // - maybe: https://code.google.com/p/msinttypes/ // - or: http://www.azillionmonkeys.com/qed/pstdint.h #include <stdint.h> #endif // Note: If you want to use 64-bit refs, change the types of both dtPolyRef & dtTileRef. // It is also recommended that you change dtHashRef() to a proper 64-bit hash. /// A handle to a polygon within a navigation mesh tile. /// @ingroup detour #ifdef DT_POLYREF64 static const unsigned int DT_SALT_BITS = 16; static const unsigned int DT_TILE_BITS = 28; static const unsigned int DT_POLY_BITS = 20; typedef uint64_t dtPolyRef; #else typedef unsigned int dtPolyRef; #endif /// A handle to a tile within a navigation mesh. /// @ingroup detour #ifdef DT_POLYREF64 typedef uint64_t dtTileRef; #else typedef unsigned int dtTileRef; #endif /// The maximum number of vertices per navigation polygon. /// @ingroup detour static const int DT_VERTS_PER_POLYGON = 6; /// @{ /// @name Tile Serialization Constants /// These constants are used to detect whether a navigation tile's data /// and state format is compatible with the current build. /// /// A magic number used to detect compatibility of navigation tile data. static const int DT_NAVMESH_MAGIC = 'D'<<24 | 'N'<<16 | 'A'<<8 | 'V'; /// A version number used to detect compatibility of navigation tile data. static const int DT_NAVMESH_VERSION = 7; /// A magic number used to detect the compatibility of navigation tile states. static const int DT_NAVMESH_STATE_MAGIC = 'D'<<24 | 'N'<<16 | 'M'<<8 | 'S'; /// A version number used to detect compatibility of navigation tile states. static const int DT_NAVMESH_STATE_VERSION = 1; /// @} /// A flag that indicates that an entity links to an external entity. /// (E.g. A polygon edge is a portal that links to another polygon.) static const unsigned short DT_EXT_LINK = 0x8000; /// A value that indicates the entity does not link to anything. static const unsigned int DT_NULL_LINK = 0xffffffff; /// A flag that indicates that an off-mesh connection can be traversed in both directions. (Is bidirectional.) static const unsigned int DT_OFFMESH_CON_BIDIR = 1; /// The maximum number of user defined area ids. /// @ingroup detour static const int DT_MAX_AREAS = 64; /// Tile flags used for various functions and fields. /// For an example, see dtNavMesh::addTile(). enum dtTileFlags { /// The navigation mesh owns the tile memory and is responsible for freeing it. DT_TILE_FREE_DATA = 0x01, }; /// Vertex flags returned by dtNavMeshQuery::findStraightPath. enum dtStraightPathFlags { DT_STRAIGHTPATH_START = 0x01, ///< The vertex is the start position in the path. DT_STRAIGHTPATH_END = 0x02, ///< The vertex is the end position in the path. DT_STRAIGHTPATH_OFFMESH_CONNECTION = 0x04, ///< The vertex is the start of an off-mesh connection. }; /// Options for dtNavMeshQuery::findStraightPath. enum dtStraightPathOptions { DT_STRAIGHTPATH_AREA_CROSSINGS = 0x01, ///< Add a vertex at every polygon edge crossing where area changes. DT_STRAIGHTPATH_ALL_CROSSINGS = 0x02, ///< Add a vertex at every polygon edge crossing. }; /// Options for dtNavMeshQuery::findPath enum dtFindPathOptions { DT_FINDPATH_LOW_QUALITY_FAR = 0x01, ///< [provisional] trade quality for performance far from the origin. The idea is that by then a new query will be issued DT_FINDPATH_ANY_ANGLE = 0x02, ///< use raycasts during pathfind to "shortcut" (raycast still consider costs) }; /// Options for dtNavMeshQuery::raycast enum dtRaycastOptions { DT_RAYCAST_USE_COSTS = 0x01, ///< Raycast should calculate movement cost along the ray and fill RaycastHit::cost }; /// Limit raycasting during any angle pahfinding /// The limit is given as a multiple of the character radius static const float DT_RAY_CAST_LIMIT_PROPORTIONS = 50.0f; /// Flags representing the type of a navigation mesh polygon. enum dtPolyTypes { /// The polygon is a standard convex polygon that is part of the surface of the mesh. DT_POLYTYPE_GROUND = 0, /// The polygon is an off-mesh connection consisting of two vertices. DT_POLYTYPE_OFFMESH_CONNECTION = 1, }; /// Defines a polyogn within a dtMeshTile object. /// @ingroup detour struct dtPoly { /// Index to first link in linked list. (Or #DT_NULL_LINK if there is no link.) unsigned int firstLink; /// The indices of the polygon's vertices. /// The actual vertices are located in dtMeshTile::verts. unsigned short verts[DT_VERTS_PER_POLYGON]; /// Packed data representing neighbor polygons references and flags for each edge. unsigned short neis[DT_VERTS_PER_POLYGON]; /// The user defined polygon flags. unsigned short flags; /// The number of vertices in the polygon. unsigned char vertCount; /// The bit packed area id and polygon type. /// @note Use the structure's set and get methods to acess this value. unsigned char areaAndtype; /// Sets the user defined area id. [Limit: < #DT_MAX_AREAS] inline void setArea(unsigned char a) { areaAndtype = (areaAndtype & 0xc0) | (a & 0x3f); } /// Sets the polygon type. (See: #dtPolyTypes.) inline void setType(unsigned char t) { areaAndtype = (areaAndtype & 0x3f) | (t << 6); } /// Gets the user defined area id. inline unsigned char getArea() const { return areaAndtype & 0x3f; } /// Gets the polygon type. (See: #dtPolyTypes) inline unsigned char getType() const { return areaAndtype >> 6; } }; /// Defines the location of detail sub-mesh data within a dtMeshTile. struct dtPolyDetail { unsigned int vertBase; ///< The offset of the vertices in the dtMeshTile::detailVerts array. unsigned int triBase; ///< The offset of the triangles in the dtMeshTile::detailTris array. unsigned char vertCount; ///< The number of vertices in the sub-mesh. unsigned char triCount; ///< The number of triangles in the sub-mesh. }; /// Defines a link between polygons. /// @note This structure is rarely if ever used by the end user. /// @see dtMeshTile struct dtLink { dtPolyRef ref; ///< Neighbour reference. (The neighbor that is linked to.) unsigned int next; ///< Index of the next link. unsigned char edge; ///< Index of the polygon edge that owns this link. unsigned char side; ///< If a boundary link, defines on which side the link is. unsigned char bmin; ///< If a boundary link, defines the minimum sub-edge area. unsigned char bmax; ///< If a boundary link, defines the maximum sub-edge area. }; /// Bounding volume node. /// @note This structure is rarely if ever used by the end user. /// @see dtMeshTile struct dtBVNode { unsigned short bmin[3]; ///< Minimum bounds of the node's AABB. [(x, y, z)] unsigned short bmax[3]; ///< Maximum bounds of the node's AABB. [(x, y, z)] int i; ///< The node's index. (Negative for escape sequence.) }; /// Defines an navigation mesh off-mesh connection within a dtMeshTile object. /// An off-mesh connection is a user defined traversable connection made up to two vertices. struct dtOffMeshConnection { /// The endpoints of the connection. [(ax, ay, az, bx, by, bz)] float pos[6]; /// The radius of the endpoints. [Limit: >= 0] float rad; /// The polygon reference of the connection within the tile. unsigned short poly; /// Link flags. /// @note These are not the connection's user defined flags. Those are assigned via the /// connection's dtPoly definition. These are link flags used for internal purposes. unsigned char flags; /// End point side. unsigned char side; /// The id of the offmesh connection. (User assigned when the navigation mesh is built.) unsigned int userId; }; /// Provides high level information related to a dtMeshTile object. /// @ingroup detour struct dtMeshHeader { int magic; ///< Tile magic number. (Used to identify the data format.) int version; ///< Tile data format version number. int x; ///< The x-position of the tile within the dtNavMesh tile grid. (x, y, layer) int y; ///< The y-position of the tile within the dtNavMesh tile grid. (x, y, layer) int layer; ///< The layer of the tile within the dtNavMesh tile grid. (x, y, layer) unsigned int userId; ///< The user defined id of the tile. int polyCount; ///< The number of polygons in the tile. int vertCount; ///< The number of vertices in the tile. int maxLinkCount; ///< The number of allocated links. int detailMeshCount; ///< The number of sub-meshes in the detail mesh. /// The number of unique vertices in the detail mesh. (In addition to the polygon vertices.) int detailVertCount; int detailTriCount; ///< The number of triangles in the detail mesh. int bvNodeCount; ///< The number of bounding volume nodes. (Zero if bounding volumes are disabled.) int offMeshConCount; ///< The number of off-mesh connections. int offMeshBase; ///< The index of the first polygon which is an off-mesh connection. float walkableHeight; ///< The height of the agents using the tile. float walkableRadius; ///< The radius of the agents using the tile. float walkableClimb; ///< The maximum climb height of the agents using the tile. float bmin[3]; ///< The minimum bounds of the tile's AABB. [(x, y, z)] float bmax[3]; ///< The maximum bounds of the tile's AABB. [(x, y, z)] /// The bounding volume quantization factor. float bvQuantFactor; }; /// Defines a navigation mesh tile. /// @ingroup detour struct dtMeshTile { unsigned int salt; ///< Counter describing modifications to the tile. unsigned int linksFreeList; ///< Index to the next free link. dtMeshHeader* header; ///< The tile header. dtPoly* polys; ///< The tile polygons. [Size: dtMeshHeader::polyCount] float* verts; ///< The tile vertices. [Size: dtMeshHeader::vertCount] dtLink* links; ///< The tile links. [Size: dtMeshHeader::maxLinkCount] dtPolyDetail* detailMeshes; ///< The tile's detail sub-meshes. [Size: dtMeshHeader::detailMeshCount] /// The detail mesh's unique vertices. [(x, y, z) * dtMeshHeader::detailVertCount] float* detailVerts; /// The detail mesh's triangles. [(vertA, vertB, vertC) * dtMeshHeader::detailTriCount] unsigned char* detailTris; /// The tile bounding volume nodes. [Size: dtMeshHeader::bvNodeCount] /// (Will be null if bounding volumes are disabled.) dtBVNode* bvTree; dtOffMeshConnection* offMeshCons; ///< The tile off-mesh connections. [Size: dtMeshHeader::offMeshConCount] unsigned char* data; ///< The tile data. (Not directly accessed under normal situations.) int dataSize; ///< Size of the tile data. int flags; ///< Tile flags. (See: #dtTileFlags) dtMeshTile* next; ///< The next free tile, or the next tile in the spatial grid. }; /// Configuration parameters used to define multi-tile navigation meshes. /// The values are used to allocate space during the initialization of a navigation mesh. /// @see dtNavMesh::init() /// @ingroup detour struct dtNavMeshParams { float orig[3]; ///< The world space origin of the navigation mesh's tile space. [(x, y, z)] float tileWidth; ///< The width of each tile. (Along the x-axis.) float tileHeight; ///< The height of each tile. (Along the z-axis.) int maxTiles; ///< The maximum number of tiles the navigation mesh can contain. int maxPolys; ///< The maximum number of polygons each tile can contain. }; /// A navigation mesh based on tiles of convex polygons. /// @ingroup detour class dtNavMesh { public: dtNavMesh(); ~dtNavMesh(); /// @{ /// @name Initialization and Tile Management /// Initializes the navigation mesh for tiled use. /// @param[in] params Initialization parameters. /// @return The status flags for the operation. dtStatus init(const dtNavMeshParams* params); /// Initializes the navigation mesh for single tile use. /// @param[in] data Data of the new tile. (See: #dtCreateNavMeshData) /// @param[in] dataSize The data size of the new tile. /// @param[in] flags The tile flags. (See: #dtTileFlags) /// @return The status flags for the operation. /// @see dtCreateNavMeshData dtStatus init(unsigned char* data, const int dataSize, const int flags); /// The navigation mesh initialization params. const dtNavMeshParams* getParams() const; /// Adds a tile to the navigation mesh. /// @param[in] data Data for the new tile mesh. (See: #dtCreateNavMeshData) /// @param[in] dataSize Data size of the new tile mesh. /// @param[in] flags Tile flags. (See: #dtTileFlags) /// @param[in] lastRef The desired reference for the tile. (When reloading a tile.) [opt] [Default: 0] /// @param[out] result The tile reference. (If the tile was succesfully added.) [opt] /// @return The status flags for the operation. dtStatus addTile(unsigned char* data, int dataSize, int flags, dtTileRef lastRef, dtTileRef* result); /// Removes the specified tile from the navigation mesh. /// @param[in] ref The reference of the tile to remove. /// @param[out] data Data associated with deleted tile. /// @param[out] dataSize Size of the data associated with deleted tile. /// @return The status flags for the operation. dtStatus removeTile(dtTileRef ref, unsigned char** data, int* dataSize); /// @} /// @{ /// @name Query Functions /// Calculates the tile grid location for the specified world position. /// @param[in] pos The world position for the query. [(x, y, z)] /// @param[out] tx The tile's x-location. (x, y) /// @param[out] ty The tile's y-location. (x, y) void calcTileLoc(const float* pos, int* tx, int* ty) const; /// Gets the tile at the specified grid location. /// @param[in] x The tile's x-location. (x, y, layer) /// @param[in] y The tile's y-location. (x, y, layer) /// @param[in] layer The tile's layer. (x, y, layer) /// @return The tile, or null if the tile does not exist. const dtMeshTile* getTileAt(const int x, const int y, const int layer) const; /// Gets all tiles at the specified grid location. (All layers.) /// @param[in] x The tile's x-location. (x, y) /// @param[in] y The tile's y-location. (x, y) /// @param[out] tiles A pointer to an array of tiles that will hold the result. /// @param[in] maxTiles The maximum tiles the tiles parameter can hold. /// @return The number of tiles returned in the tiles array. int getTilesAt(const int x, const int y, dtMeshTile const** tiles, const int maxTiles) const; /// Gets the tile reference for the tile at specified grid location. /// @param[in] x The tile's x-location. (x, y, layer) /// @param[in] y The tile's y-location. (x, y, layer) /// @param[in] layer The tile's layer. (x, y, layer) /// @return The tile reference of the tile, or 0 if there is none. dtTileRef getTileRefAt(int x, int y, int layer) const; /// Gets the tile reference for the specified tile. /// @param[in] tile The tile. /// @return The tile reference of the tile. dtTileRef getTileRef(const dtMeshTile* tile) const; /// Gets the tile for the specified tile reference. /// @param[in] ref The tile reference of the tile to retrieve. /// @return The tile for the specified reference, or null if the /// reference is invalid. const dtMeshTile* getTileByRef(dtTileRef ref) const; /// The maximum number of tiles supported by the navigation mesh. /// @return The maximum number of tiles supported by the navigation mesh. int getMaxTiles() const; /// Gets the tile at the specified index. /// @param[in] i The tile index. [Limit: 0 >= index < #getMaxTiles()] /// @return The tile at the specified index. const dtMeshTile* getTile(int i) const; /// Gets the tile and polygon for the specified polygon reference. /// @param[in] ref The reference for the a polygon. /// @param[out] tile The tile containing the polygon. /// @param[out] poly The polygon. /// @return The status flags for the operation. dtStatus getTileAndPolyByRef(const dtPolyRef ref, const dtMeshTile** tile, const dtPoly** poly) const; /// Returns the tile and polygon for the specified polygon reference. /// @param[in] ref A known valid reference for a polygon. /// @param[out] tile The tile containing the polygon. /// @param[out] poly The polygon. void getTileAndPolyByRefUnsafe(const dtPolyRef ref, const dtMeshTile** tile, const dtPoly** poly) const; /// Checks the validity of a polygon reference. /// @param[in] ref The polygon reference to check. /// @return True if polygon reference is valid for the navigation mesh. bool isValidPolyRef(dtPolyRef ref) const; /// Gets the polygon reference for the tile's base polygon. /// @param[in] tile The tile. /// @return The polygon reference for the base polygon in the specified tile. dtPolyRef getPolyRefBase(const dtMeshTile* tile) const; /// Gets the endpoints for an off-mesh connection, ordered by "direction of travel". /// @param[in] prevRef The reference of the polygon before the connection. /// @param[in] polyRef The reference of the off-mesh connection polygon. /// @param[out] startPos The start position of the off-mesh connection. [(x, y, z)] /// @param[out] endPos The end position of the off-mesh connection. [(x, y, z)] /// @return The status flags for the operation. dtStatus getOffMeshConnectionPolyEndPoints(dtPolyRef prevRef, dtPolyRef polyRef, float* startPos, float* endPos) const; /// Gets the specified off-mesh connection. /// @param[in] ref The polygon reference of the off-mesh connection. /// @return The specified off-mesh connection, or null if the polygon reference is not valid. const dtOffMeshConnection* getOffMeshConnectionByRef(dtPolyRef ref) const; /// @} /// @{ /// @name State Management /// These functions do not effect #dtTileRef or #dtPolyRef's. /// Sets the user defined flags for the specified polygon. /// @param[in] ref The polygon reference. /// @param[in] flags The new flags for the polygon. /// @return The status flags for the operation. dtStatus setPolyFlags(dtPolyRef ref, unsigned short flags); /// Gets the user defined flags for the specified polygon. /// @param[in] ref The polygon reference. /// @param[out] resultFlags The polygon flags. /// @return The status flags for the operation. dtStatus getPolyFlags(dtPolyRef ref, unsigned short* resultFlags) const; /// Sets the user defined area for the specified polygon. /// @param[in] ref The polygon reference. /// @param[in] area The new area id for the polygon. [Limit: < #DT_MAX_AREAS] /// @return The status flags for the operation. dtStatus setPolyArea(dtPolyRef ref, unsigned char area); /// Gets the user defined area for the specified polygon. /// @param[in] ref The polygon reference. /// @param[out] resultArea The area id for the polygon. /// @return The status flags for the operation. dtStatus getPolyArea(dtPolyRef ref, unsigned char* resultArea) const; /// Gets the size of the buffer required by #storeTileState to store the specified tile's state. /// @param[in] tile The tile. /// @return The size of the buffer required to store the state. int getTileStateSize(const dtMeshTile* tile) const; /// Stores the non-structural state of the tile in the specified buffer. (Flags, area ids, etc.) /// @param[in] tile The tile. /// @param[out] data The buffer to store the tile's state in. /// @param[in] maxDataSize The size of the data buffer. [Limit: >= #getTileStateSize] /// @return The status flags for the operation. dtStatus storeTileState(const dtMeshTile* tile, unsigned char* data, const int maxDataSize) const; /// Restores the state of the tile. /// @param[in] tile The tile. /// @param[in] data The new state. (Obtained from #storeTileState.) /// @param[in] maxDataSize The size of the state within the data buffer. /// @return The status flags for the operation. dtStatus restoreTileState(dtMeshTile* tile, const unsigned char* data, const int maxDataSize); /// @} /// @{ /// @name Encoding and Decoding /// These functions are generally meant for internal use only. /// Derives a standard polygon reference. /// @note This function is generally meant for internal use only. /// @param[in] salt The tile's salt value. /// @param[in] it The index of the tile. /// @param[in] ip The index of the polygon within the tile. inline dtPolyRef encodePolyId(unsigned int salt, unsigned int it, unsigned int ip) const { #ifdef DT_POLYREF64 return ((dtPolyRef)salt << (DT_POLY_BITS+DT_TILE_BITS)) | ((dtPolyRef)it << DT_POLY_BITS) | (dtPolyRef)ip; #else return ((dtPolyRef)salt << (m_polyBits+m_tileBits)) | ((dtPolyRef)it << m_polyBits) | (dtPolyRef)ip; #endif } /// Decodes a standard polygon reference. /// @note This function is generally meant for internal use only. /// @param[in] ref The polygon reference to decode. /// @param[out] salt The tile's salt value. /// @param[out] it The index of the tile. /// @param[out] ip The index of the polygon within the tile. /// @see #encodePolyId inline void decodePolyId(dtPolyRef ref, unsigned int& salt, unsigned int& it, unsigned int& ip) const { #ifdef DT_POLYREF64 const dtPolyRef saltMask = ((dtPolyRef)1<<DT_SALT_BITS)-1; const dtPolyRef tileMask = ((dtPolyRef)1<<DT_TILE_BITS)-1; const dtPolyRef polyMask = ((dtPolyRef)1<<DT_POLY_BITS)-1; salt = (unsigned int)((ref >> (DT_POLY_BITS+DT_TILE_BITS)) & saltMask); it = (unsigned int)((ref >> DT_POLY_BITS) & tileMask); ip = (unsigned int)(ref & polyMask); #else const dtPolyRef saltMask = ((dtPolyRef)1<<m_saltBits)-1; const dtPolyRef tileMask = ((dtPolyRef)1<<m_tileBits)-1; const dtPolyRef polyMask = ((dtPolyRef)1<<m_polyBits)-1; salt = (unsigned int)((ref >> (m_polyBits+m_tileBits)) & saltMask); it = (unsigned int)((ref >> m_polyBits) & tileMask); ip = (unsigned int)(ref & polyMask); #endif } /// Extracts a tile's salt value from the specified polygon reference. /// @note This function is generally meant for internal use only. /// @param[in] ref The polygon reference. /// @see #encodePolyId inline unsigned int decodePolyIdSalt(dtPolyRef ref) const { #ifdef DT_POLYREF64 const dtPolyRef saltMask = ((dtPolyRef)1<<DT_SALT_BITS)-1; return (unsigned int)((ref >> (DT_POLY_BITS+DT_TILE_BITS)) & saltMask); #else const dtPolyRef saltMask = ((dtPolyRef)1<<m_saltBits)-1; return (unsigned int)((ref >> (m_polyBits+m_tileBits)) & saltMask); #endif } /// Extracts the tile's index from the specified polygon reference. /// @note This function is generally meant for internal use only. /// @param[in] ref The polygon reference. /// @see #encodePolyId inline unsigned int decodePolyIdTile(dtPolyRef ref) const { #ifdef DT_POLYREF64 const dtPolyRef tileMask = ((dtPolyRef)1<<DT_TILE_BITS)-1; return (unsigned int)((ref >> DT_POLY_BITS) & tileMask); #else const dtPolyRef tileMask = ((dtPolyRef)1<<m_tileBits)-1; return (unsigned int)((ref >> m_polyBits) & tileMask); #endif } /// Extracts the polygon's index (within its tile) from the specified polygon reference. /// @note This function is generally meant for internal use only. /// @param[in] ref The polygon reference. /// @see #encodePolyId inline unsigned int decodePolyIdPoly(dtPolyRef ref) const { #ifdef DT_POLYREF64 const dtPolyRef polyMask = ((dtPolyRef)1<<DT_POLY_BITS)-1; return (unsigned int)(ref & polyMask); #else const dtPolyRef polyMask = ((dtPolyRef)1<<m_polyBits)-1; return (unsigned int)(ref & polyMask); #endif } /// @} private: /// Returns pointer to tile in the tile array. dtMeshTile* getTile(int i); /// Returns neighbour tile based on side. int getTilesAt(const int x, const int y, dtMeshTile** tiles, const int maxTiles) const; /// Returns neighbour tile based on side. int getNeighbourTilesAt(const int x, const int y, const int side, dtMeshTile** tiles, const int maxTiles) const; /// Returns all polygons in neighbour tile based on portal defined by the segment. int findConnectingPolys(const float* va, const float* vb, const dtMeshTile* tile, int side, dtPolyRef* con, float* conarea, int maxcon) const; /// Builds internal polygons links for a tile. void connectIntLinks(dtMeshTile* tile); /// Builds internal polygons links for a tile. void baseOffMeshLinks(dtMeshTile* tile); /// Builds external polygon links for a tile. void connectExtLinks(dtMeshTile* tile, dtMeshTile* target, int side); /// Builds external polygon links for a tile. void connectExtOffMeshLinks(dtMeshTile* tile, dtMeshTile* target, int side); /// Removes external links at specified side. void unconnectExtLinks(dtMeshTile* tile, dtMeshTile* target); // TODO: These methods are duplicates from dtNavMeshQuery, but are needed for off-mesh connection finding. /// Queries polygons within a tile. int queryPolygonsInTile(const dtMeshTile* tile, const float* qmin, const float* qmax, dtPolyRef* polys, const int maxPolys) const; /// Find nearest polygon within a tile. dtPolyRef findNearestPolyInTile(const dtMeshTile* tile, const float* center, const float* extents, float* nearestPt) const; /// Returns closest point on polygon. void closestPointOnPoly(dtPolyRef ref, const float* pos, float* closest, bool* posOverPoly) const; dtNavMeshParams m_params; ///< Current initialization params. TODO: do not store this info twice. float m_orig[3]; ///< Origin of the tile (0,0) float m_tileWidth, m_tileHeight; ///< Dimensions of each tile. int m_maxTiles; ///< Max number of tiles. int m_tileLutSize; ///< Tile hash lookup size (must be pot). int m_tileLutMask; ///< Tile hash lookup mask. dtMeshTile** m_posLookup; ///< Tile hash lookup. dtMeshTile* m_nextFree; ///< Freelist of tiles. dtMeshTile* m_tiles; ///< List of tiles. #ifndef DT_POLYREF64 unsigned int m_saltBits; ///< Number of salt bits in the tile ID. unsigned int m_tileBits; ///< Number of tile bits in the tile ID. unsigned int m_polyBits; ///< Number of poly bits in the tile ID. #endif }; /// Allocates a navigation mesh object using the Detour allocator. /// @return A navigation mesh that is ready for initialization, or null on failure. /// @ingroup detour dtNavMesh* dtAllocNavMesh(); /// Frees the specified navigation mesh object using the Detour allocator. /// @param[in] navmesh A navigation mesh allocated using #dtAllocNavMesh /// @ingroup detour void dtFreeNavMesh(dtNavMesh* navmesh); #endif // DETOURNAVMESH_H /////////////////////////////////////////////////////////////////////////// // This section contains detailed documentation for members that don't have // a source file. It reduces clutter in the main section of the header. /** @typedef dtPolyRef @par Polygon references are subject to the same invalidate/preserve/restore rules that apply to #dtTileRef's. If the #dtTileRef for the polygon's tile changes, the polygon reference becomes invalid. Changing a polygon's flags, area id, etc. does not impact its polygon reference. @typedef dtTileRef @par The following changes will invalidate a tile reference: - The referenced tile has been removed from the navigation mesh. - The navigation mesh has been initialized using a different set of #dtNavMeshParams. A tile reference is preserved/restored if the tile is added to a navigation mesh initialized with the original #dtNavMeshParams and is added at the original reference location. (E.g. The lastRef parameter is used with dtNavMesh::addTile.) Basically, if the storage structure of a tile changes, its associated tile reference changes. @var unsigned short dtPoly::neis[DT_VERTS_PER_POLYGON] @par Each entry represents data for the edge starting at the vertex of the same index. E.g. The entry at index n represents the edge data for vertex[n] to vertex[n+1]. A value of zero indicates the edge has no polygon connection. (It makes up the border of the navigation mesh.) The information can be extracted as follows: @code neighborRef = neis[n] & 0xff; // Get the neighbor polygon reference. if (neis[n] & #DT_EX_LINK) { // The edge is an external (portal) edge. } @endcode @var float dtMeshHeader::bvQuantFactor @par This value is used for converting between world and bounding volume coordinates. For example: @code const float cs = 1.0f / tile->header->bvQuantFactor; const dtBVNode* n = &tile->bvTree[i]; if (n->i >= 0) { // This is a leaf node. float worldMinX = tile->header->bmin[0] + n->bmin[0]*cs; float worldMinY = tile->header->bmin[0] + n->bmin[1]*cs; // Etc... } @endcode @struct dtMeshTile @par Tiles generally only exist within the context of a dtNavMesh object. Some tile content is optional. For example, a tile may not contain any off-mesh connections. In this case the associated pointer will be null. If a detail mesh exists it will share vertices with the base polygon mesh. Only the vertices unique to the detail mesh will be stored in #detailVerts. @warning Tiles returned by a dtNavMesh object are not guarenteed to be populated. For example: The tile at a location might not have been loaded yet, or may have been removed. In this case, pointers will be null. So if in doubt, check the polygon count in the tile's header to determine if a tile has polygons defined. @var float dtOffMeshConnection::pos[6] @par For a properly built navigation mesh, vertex A will always be within the bounds of the mesh. Vertex B is not required to be within the bounds of the mesh. */
ZhuoZhuoCrayon/bk-sops
auth_backend/plugins/tastypie/resources.py
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import, unicode_literals import logging from builtins import object, str from auth_backend.backends.utils import get_backend_from_config backend = get_backend_from_config() logger = logging.getLogger("root") class BkSaaSLabeledDataResourceMixin(object): def alter_list_data_to_serialize(self, request, data): auth_resource = getattr(self._meta, "auth_resource", None) if auth_resource is None: return data inspect = getattr(self._meta, "inspect", None) get_instance = inspect.instance if inspect else lambda bundle: bundle.obj # set meta info data["meta"]["auth_operations"] = auth_resource.operations data["meta"]["auth_resource"] = auth_resource.base_info() # assemble batch-verify params action_ids = [act.id for act in auth_resource.actions] instances = [get_instance(bundle) for bundle in data["objects"]] verify_result = backend.batch_verify_perms(auth_resource, "user", request.user.username, action_ids, instances) if not verify_result["result"]: logger.error( "alter_list_data_to_serialize for resource({}) failed: {}".format( auth_resource.rtype, verify_result["message"] ) ) for bundle in data["objects"]: bundle.data["auth_actions"] = [] return data # process verify result passed_inst_irrelevant_actions = [] inst_auth_actions = {} for verify_item in verify_result["data"]: # ignore verify fail action if not verify_item["is_pass"]: continue action_id = verify_item["action_id"] if not verify_item["resource_id"]: passed_inst_irrelevant_actions.append(action_id) else: # collect instance auth actions for resource in verify_item["resource_id"]: if resource["resource_type"] == auth_resource.rtype: rid = str(resource["resource_id"]) inst_auth_actions.setdefault(rid, []).append(action_id) break # set auth actions for bundle in data["objects"]: obj_id = str(inspect.resource_id(bundle)) if inspect else str(bundle.obj.pk) auth_actions = inst_auth_actions.get(obj_id, []) auth_actions.extend(passed_inst_irrelevant_actions) bundle.data["auth_actions"] = auth_actions return data def alter_detail_data_to_serialize(self, request, data): bundle = data auth_resource = getattr(self._meta, "auth_resource", None) if auth_resource is None: return data resource_info = auth_resource.base_info() inspect = getattr(self._meta, "inspect", None) if not resource_info["scope_id"]: resource_info["scope_id"] = inspect.scope_id(data) if inspect else None get_instance = inspect.instance if inspect else lambda bundle: bundle.obj # set meta data.data["auth_operations"] = auth_resource.operations data.data["auth_resource"] = resource_info # assemble batch-verify params action_ids = [act.id for act in auth_resource.actions] instances = [get_instance(bundle)] verify_result = backend.batch_verify_perms(auth_resource, "user", request.user.username, action_ids, instances) if not verify_result["result"]: logger.error( "alter_detail_data_to_serialize for resource({}) failed: {}".format( auth_resource.rtype, verify_result["message"] ) ) bundle.data["auth_actions"] = [] return data auth_actions = [] for verify_item in verify_result["data"]: # ignore verify fail action if not verify_item["is_pass"]: continue auth_actions.append(verify_item["action_id"]) bundle.data["auth_actions"] = auth_actions return data
tallence/core-forms-frontend
src/vue/plugins/core-forms/store/mutations.js
import {FORM_DEFINITION, FORM_LOADING, FORM_PROGRESS, FORM_SOURCE, FORM_SUBMITTING, RESET_FORM_VALUES, SET_FORM_VALUE} from "./types"; export default { [FORM_SOURCE](state, newValue) { state.formSource = newValue; }, [FORM_LOADING](state, newValue) { state.formLoading = newValue; }, [FORM_SUBMITTING](state, newValue) { state.formSubmitting = newValue; }, [FORM_PROGRESS](state, newValue) { state.formProgress = newValue; }, [FORM_DEFINITION](state, newValue) { state.formDefinition = newValue; }, [SET_FORM_VALUE](state, payload) { if (payload != null) { let newValue = {}; newValue[payload.field] = payload.value; state.formValues = {...state.formValues, ...newValue}; } }, [RESET_FORM_VALUES](state) { state.formValues = {}; } }
vstconsulting/vstutils
test_src/test_proj/migrations/0024_masked_and_phone_fields.py
# Generated by Django 3.2.6 on 2021-08-18 06:58 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('test_proj', '0023_cachableproxymodel'), ] operations = [ migrations.AddField( model_name='author', name='masked', field=models.CharField(max_length=255, null=True), ), migrations.AddField( model_name='author', name='phone', field=models.CharField(max_length=16, null=True), ), ]
chfrank-cgn/FOM
AD/02-01/EinfacheListe.java
<filename>AD/02-01/EinfacheListe.java class Element { char wert; Element naechstes; // Konstruktor public Element(char w) { wert = w; naechstes = null; } } public class EinfacheListe { // Verweis auf Listenkopf Element kopf; public EinfacheListe(){ kopf = new Element('L'); kopf.naechstes = new Element('I'); kopf.naechstes.naechstes = new Element('S'); kopf.naechstes.naechstes.naechstes = new Element('T'); kopf.naechstes.naechstes.naechstes.naechstes = new Element('E'); showListe(); // das k-te Element bestimmen char wert = getElement(3); System.out.println("3-tes Element: "+wert); // Element mit bestimmtem Wert suchen int i = searchElement('T'); System.out.println("Gesuchtes Element T an Position: "+i); // das k-te Element loeschen deleteElement(3); showListe(); } /* das k-te Element bestimmen */ private char getElement(int k) { Element ptr = kopf; int i = 1; while ((i<k) && (ptr.naechstes != null)) { i = i + 1; ptr = ptr.naechstes; } return ptr.wert; } /* Element mit bestimmtem Wert suchen */ private int searchElement(char wert){ Element ptr = kopf; int i = 1; while ((ptr.wert != wert) && (ptr.naechstes != null)) { i = i + 1; ptr = ptr.naechstes; } return i; } /* das k-te Element loeschen */ private void deleteElement(int k){ Element ptr = kopf; Element prev = kopf; int i = 1; while ((i<k) && (ptr.naechstes != null)) { i = i + 1; prev = ptr; ptr = ptr.naechstes; } prev.naechstes = ptr.naechstes; } /* Liste ausgeben */ private void showListe(){ Element ptr = kopf; System.out.println("Value: "+ptr.wert); while (ptr.naechstes != null) { ptr = ptr.naechstes; System.out.println("Value: "+ptr.wert); } } public static void main(String[] args) { new EinfacheListe(); } }
rceet/TencentOS-tiny
components/connectivity/qcloud-iot-explorer-sdk/3rdparty/sdk_src/protocol/mqtt/mqtt_client_common.c
<reponame>rceet/TencentOS-tiny<gh_stars>1-10 /******************************************************************************* * Copyright (c) 2014 IBM Corp. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v1.0 which accompany this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * <NAME> - initial API and implementation and/or initial documentation * <NAME> - non-blocking packet read functions for stream transport *******************************************************************************/ #ifdef __cplusplus extern "C" { #endif #include <string.h> #include <time.h> #include "mqtt_client.h" #include "utils_list.h" /* remain waiting time after MQTT header is received (unit: ms) */ #define QCLOUD_IOT_MQTT_MAX_REMAIN_WAIT_MS (2000) #define MAX_NO_OF_REMAINING_LENGTH_BYTES 4 /* return: 0, identical; NOT 0, different. */ static int _check_handle_is_identical(SubTopicHandle *sub_handle1, SubTopicHandle *sub_handle2) { if (!sub_handle1 || !sub_handle2) { return 1; } int topic_name_Len = strlen(sub_handle1->topic_filter); if (topic_name_Len != strlen(sub_handle2->topic_filter)) { return 1; } if (0 != strncmp(sub_handle1->topic_filter, sub_handle2->topic_filter, topic_name_Len)) { return 1; } if (sub_handle1->message_handler != sub_handle2->message_handler) { return 1; } if (sub_handle1->sub_event_handler != sub_handle2->sub_event_handler) { return 1; } return 0; } uint16_t get_next_packet_id(Qcloud_IoT_Client *pClient) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); HAL_MutexLock(pClient->lock_generic); pClient->next_packet_id = (uint16_t) ((MAX_PACKET_ID == pClient->next_packet_id) ? 1 : (pClient->next_packet_id + 1)); HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT_RC(pClient->next_packet_id); } void get_next_conn_id(char *conn_id) { int i; srand((unsigned)HAL_GetTimeMs()); for (i = 0; i < MAX_CONN_ID_LEN - 1; i++) { int flag = rand() % 3; switch (flag) { case 0: conn_id[i] = (rand() % 26) + 'a'; break; case 1: conn_id[i] = (rand() % 26) + 'A'; break; case 2: conn_id[i] = (rand() % 10) + '0'; break; } } conn_id[MAX_CONN_ID_LEN - 1] = '\0'; } /** * Encodes the message length according to the MQTT algorithm * @param buf the buffer into which the encoded data is written * @param length the length to be encoded * @return the number of bytes written to buffer */ size_t mqtt_write_packet_rem_len(unsigned char *buf, uint32_t length) { IOT_FUNC_ENTRY; size_t outLen = 0; do { unsigned char encodeByte; encodeByte = (unsigned char) (length % 128); length /= 128; /* if there are more digits to encode, set the top bit of this digit */ if (length > 0) { encodeByte |= 0x80; } buf[outLen++] = encodeByte; } while (length > 0); IOT_FUNC_EXIT_RC((int)outLen); } size_t get_mqtt_packet_len(size_t rem_len) { rem_len += 1; /* header byte */ /* now remaining_length field */ if (rem_len < 128) { rem_len += 1; } else if (rem_len < 16384) { rem_len += 2; } else if (rem_len < 2097151) { rem_len += 3; } else { rem_len += 4; } return rem_len; } /** * Decodes the message length according to the MQTT algorithm * @param getcharfn pointer to function to read the next character from the data source * @param value the decoded length returned * @return the number of bytes read from the socket */ static int _decode_packet_rem_len_from_buf_read(uint32_t (*getcharfn)(unsigned char *, uint32_t), uint32_t *value, uint32_t *readBytesLen) { IOT_FUNC_ENTRY; unsigned char c; uint32_t multiplier = 1; uint32_t len = 0; *value = 0; do { if (++len > MAX_NO_OF_REMAINING_LENGTH_BYTES) { /* bad data */ IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_PACKET_READ); } uint32_t getLen = 0; getLen = (*getcharfn)(&c, 1); if (1 != getLen) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } *value += (c & 127) * multiplier; multiplier *= 128; } while ((c & 128) != 0); *readBytesLen = len; IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static unsigned char *bufptr; uint32_t bufchar(unsigned char *c, uint32_t count) { uint32_t i; for (i = 0; i < count; ++i) { *c = *bufptr++; } return count; } int mqtt_read_packet_rem_len_form_buf(unsigned char *buf, uint32_t *value, uint32_t *readBytesLen) { bufptr = buf; return _decode_packet_rem_len_from_buf_read(bufchar, value, readBytesLen); } /** * Calculates uint16 packet id from two bytes read from the input buffer * @param pptr pointer to the input buffer - incremented by the number of bytes used & returned * @return the value calculated */ uint16_t mqtt_read_uint16_t(unsigned char **pptr) { unsigned char *ptr = *pptr; uint8_t firstByte = (uint8_t) (*ptr); uint8_t secondByte = (uint8_t) (*(ptr + 1)); uint16_t len = (uint16_t) (secondByte + (256 * firstByte)); *pptr += 2; return len; } /** * Reads one character from the input buffer. * @param pptr pointer to the input buffer - incremented by the number of bytes used & returned * @return the character read */ unsigned char mqtt_read_char(unsigned char **pptr) { unsigned char c = **pptr; (*pptr)++; return c; } /** * Writes one character to an output buffer. * @param pptr pointer to the output buffer - incremented by the number of bytes used & returned * @param c the character to write */ void mqtt_write_char(unsigned char **pptr, unsigned char c) { **pptr = c; (*pptr)++; } /** * Writes an integer as 2 bytes to an output buffer. * @param pptr pointer to the output buffer - incremented by the number of bytes used & returned * @param anInt the integer to write */ void mqtt_write_uint_16(unsigned char **pptr, uint16_t anInt) { **pptr = (unsigned char) (anInt / 256); (*pptr)++; **pptr = (unsigned char) (anInt % 256); (*pptr)++; } /** * Writes a "UTF" string to an output buffer. Converts C string to length-delimited. * @param pptr pointer to the output buffer - incremented by the number of bytes used & returned * @param string the C string to write */ void mqtt_write_utf8_string(unsigned char **pptr, const char *string) { size_t len = strlen(string); mqtt_write_uint_16(pptr, (uint16_t) len); memcpy(*pptr, string, len); *pptr += len; } /** * Initialize the MQTT Header fixed byte. Used to ensure that Header bits are */ int mqtt_init_packet_header(unsigned char *header, MessageTypes message_type, QoS Qos, uint8_t dup, uint8_t retained) { POINTER_SANITY_CHECK(header, QCLOUD_ERR_INVAL); unsigned char type, qos; switch (message_type) { case RESERVED: /* Should never happen */ return QCLOUD_ERR_MQTT_UNKNOWN; case CONNECT: type = 0x01; break; case CONNACK: type = 0x02; break; case PUBLISH: type = 0x03; break; case PUBACK: type = 0x04; break; case PUBREC: type = 0x05; break; case PUBREL: type = 0x06; break; case PUBCOMP: type = 0x07; break; case SUBSCRIBE: type = 0x08; break; case SUBACK: type = 0x09; break; case UNSUBSCRIBE: type = 0x0A; break; case UNSUBACK: type = 0x0B; break; case PINGREQ: type = 0x0C; break; case PINGRESP: type = 0x0D; break; case DISCONNECT: type = 0x0E; break; default: /* Should never happen */ return QCLOUD_ERR_MQTT_UNKNOWN; } switch (Qos) { case QOS0: qos = 0x00; break; case QOS1: qos = 0x01; break; case QOS2: qos = 0x02; break; default: /* Using QOS0 as default */ qos = 0x00; break; } /* Generate the final protocol header by using bitwise operator */ *header = ((type << MQTT_HEADER_TYPE_SHIFT)&MQTT_HEADER_TYPE_MASK) | ((dup << MQTT_HEADER_DUP_SHIFT)&MQTT_HEADER_DUP_MASK) | ((qos << MQTT_HEADER_QOS_SHIFT)&MQTT_HEADER_QOS_MASK) | (retained & MQTT_HEADER_RETAIN_MASK); return QCLOUD_RET_SUCCESS; } /** * Deserializes the supplied (wire) buffer into an ack * @param packet_type returned integer - the MQTT packet type * @param dup returned integer - the MQTT dup flag * @param packet_id returned integer - the MQTT packet identifier * @param buf the raw buffer data, of the correct length determined by the remaining length field * @param buf_len the length in bytes of the data in the supplied buffer * @return error code. 1 is success, 0 is failure */ int deserialize_ack_packet(uint8_t *packet_type, uint8_t *dup, uint16_t *packet_id, unsigned char *buf, size_t buf_len) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(packet_type, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(dup, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(packet_id, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(buf, QCLOUD_ERR_INVAL); int rc; unsigned char header = 0; unsigned char *curdata = buf; unsigned char *enddata = NULL; uint32_t decodedLen = 0, readBytesLen = 0; /* PUBACK fixed header size is two bytes, variable header is 2 bytes, MQTT v3.1.1 Specification 3.4.1 */ if (4 > buf_len) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } header = mqtt_read_char(&curdata); *packet_type = ((header & MQTT_HEADER_TYPE_MASK) >> MQTT_HEADER_TYPE_SHIFT); *dup = ((header & MQTT_HEADER_DUP_MASK) >> MQTT_HEADER_DUP_SHIFT); /* read remaining length */ rc = mqtt_read_packet_rem_len_form_buf(curdata, &decodedLen, &readBytesLen); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } curdata += (readBytesLen); enddata = curdata + decodedLen; if (enddata - curdata < 2) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } *packet_id = mqtt_read_uint16_t(&curdata); if (enddata - curdata >= 1) { unsigned char ack_code = mqtt_read_char(&curdata); if (ack_code != 0) { Log_e("deserialize_ack_packet failure! ack_code = 0x%02x", ack_code); IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } } IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * Deserializes the supplied (wire) buffer into suback data * @param packet_id returned integer - the MQTT packet identifier * @param max_count - the maximum number of members allowed in the grantedQoSs array * @param count returned integer - number of members in the grantedQoSs array * @param grantedQoSs returned array of integers - the granted qualities of service * @param buf the raw buffer data, of the correct length determined by the remaining length field * @param buf_len the length in bytes of the data in the supplied buffer * @return error code. 1 is success, 0 is failure */ int deserialize_suback_packet(uint16_t *packet_id, uint32_t max_count, uint32_t *count, QoS *grantedQoSs, unsigned char *buf, size_t buf_len) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(packet_id, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(count, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(grantedQoSs, QCLOUD_ERR_INVAL); unsigned char header, type = 0; unsigned char *curdata = buf; unsigned char *enddata = NULL; int decodeRc; uint32_t decodedLen = 0; uint32_t readBytesLen = 0; // 4 bytes of SUBACK header size and at least 1 byte(QoS) in payload if (5 > buf_len) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } // read 1st byte in fixed header header = mqtt_read_char(&curdata); type = (header & MQTT_HEADER_TYPE_MASK) >> MQTT_HEADER_TYPE_SHIFT; if (type != SUBACK) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } // read remain part in fixed header decodeRc = mqtt_read_packet_rem_len_form_buf(curdata, &decodedLen, &readBytesLen); if (decodeRc != QCLOUD_RET_SUCCESS) { IOT_FUNC_EXIT_RC(decodeRc); } curdata += (readBytesLen); enddata = curdata + decodedLen; if (enddata - curdata < 2) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } // read packet id from variable header *packet_id = mqtt_read_uint16_t(&curdata); // read payload *count = 0; while (curdata < enddata) { if (*count > max_count) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } grantedQoSs[(*count)++] = (QoS) mqtt_read_char(&curdata); } IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * Deserializes the supplied (wire) buffer into unsuback data * @param packet_id returned integer - the MQTT packet identifier * @param buf the raw buffer data, of the correct length determined by the remaining length field * @param buf_len the length in bytes of the data in the supplied buffer * @return int indicating function execution status */ int deserialize_unsuback_packet(uint16_t *packet_id, unsigned char *buf, size_t buf_len) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(buf, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(packet_id, QCLOUD_ERR_INVAL); unsigned char type = 0; unsigned char dup = 0; int rc; rc = deserialize_ack_packet(&type, &dup, packet_id, buf, buf_len); if (QCLOUD_RET_SUCCESS == rc && UNSUBACK != type) { rc = QCLOUD_ERR_FAILURE; } IOT_FUNC_EXIT_RC(rc); } /** * Serializes a 0-length packet into the supplied buffer, ready for writing to a socket * @param buf the buffer into which the packet will be serialized * @param buf_len the length in bytes of the supplied buffer, to avoid overruns * @param packettype the message type * @param serialized length * @return int indicating function execution status */ int serialize_packet_with_zero_payload(unsigned char *buf, size_t buf_len, MessageTypes packetType, uint32_t *serialized_len) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(buf, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(serialized_len, QCLOUD_ERR_INVAL); unsigned char header = 0; unsigned char *ptr = buf; int rc; /* Buffer should have at least 2 bytes for the header */ if (4 > buf_len) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } rc = mqtt_init_packet_header(&header, packetType, QOS0, 0, 0); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } /* write header */ mqtt_write_char(&ptr, header); /* write remaining length */ ptr += mqtt_write_packet_rem_len(ptr, 0); *serialized_len = (uint32_t) (ptr - buf); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } int send_mqtt_packet(Qcloud_IoT_Client *pClient, size_t length, Timer *timer) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); int rc = QCLOUD_RET_SUCCESS; size_t sentLen = 0, sent = 0; if (length >= pClient->write_buf_size) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } while (sent < length && !expired(timer)) { rc = pClient->network_stack.write(&(pClient->network_stack), &pClient->write_buf[sent], length, left_ms(timer), &sentLen); if (rc != QCLOUD_RET_SUCCESS) { /* there was an error writing the data */ break; } sent = sent + sentLen; } if (sent == length) { /* record the fact that we have successfully sent the packet */ //countdown(&c->ping_timer, c->keep_alive_interval); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } IOT_FUNC_EXIT_RC(rc); } static int _decode_packet_rem_len_with_net_read(Qcloud_IoT_Client *pClient, uint32_t *value, uint32_t timeout) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(value, QCLOUD_ERR_INVAL); unsigned char i; uint32_t multiplier = 1; uint32_t len = 0; size_t read_len = 0; *value = 0; do { if (++len > MAX_NO_OF_REMAINING_LENGTH_BYTES) { /* bad data */ IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_PACKET_READ) } if ((pClient->network_stack.read(&(pClient->network_stack), &i, 1, timeout, &read_len)) != QCLOUD_RET_SUCCESS) { /* The value argument is the important value. len is just used temporarily * and never used by the calling function for anything else */ IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } *value += ((i & 127) * multiplier); multiplier *= 128; } while ((i & 128) != 0); /* The value argument is the important value. len is just used temporarily * and never used by the calling function for anything else */ IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * @brief Read MQTT packet from network stack * * 1. read 1st byte in fixed header and check if valid * 2. read the remaining length * 3. read payload according to remaining length * * @param pClient MQTT Client * @param timer timeout timer * @param packet_type MQTT packet type * @return QCLOUD_RET_SUCCESS for success, or err code for failure */ static int _read_mqtt_packet(Qcloud_IoT_Client *pClient, Timer *timer, uint8_t *packet_type) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); uint32_t len = 0; uint32_t rem_len = 0; size_t read_len = 0; int rc; int timer_left_ms = left_ms(timer); if (timer_left_ms <= 0) { timer_left_ms = 1; } // 1. read 1st byte in fixed header and check if valid rc = pClient->network_stack.read(&(pClient->network_stack), pClient->read_buf, 1, timer_left_ms, &read_len); if (rc == QCLOUD_ERR_SSL_NOTHING_TO_READ || rc == QCLOUD_ERR_TCP_NOTHING_TO_READ) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_NOTHING_TO_READ); } else if (rc != QCLOUD_RET_SUCCESS) { IOT_FUNC_EXIT_RC(rc); } len = 1; // 2. read the remaining length timer_left_ms = left_ms(timer); if (timer_left_ms <= 0) { timer_left_ms = 1; } timer_left_ms += QCLOUD_IOT_MQTT_MAX_REMAIN_WAIT_MS; rc = _decode_packet_rem_len_with_net_read(pClient, &rem_len, timer_left_ms); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } // if read buffer is not enough to read the remaining length, discard the packet if (rem_len >= pClient->read_buf_size) { size_t total_bytes_read = 0; size_t bytes_to_be_read; int32_t ret_val = 0; timer_left_ms = left_ms(timer); if (timer_left_ms <= 0) { timer_left_ms = 1; } timer_left_ms += QCLOUD_IOT_MQTT_MAX_REMAIN_WAIT_MS; bytes_to_be_read = pClient->read_buf_size; do { ret_val = pClient->network_stack.read(&(pClient->network_stack), pClient->read_buf, bytes_to_be_read, timer_left_ms, &read_len); if (ret_val == QCLOUD_RET_SUCCESS) { total_bytes_read += read_len; if ((rem_len - total_bytes_read) >= pClient->read_buf_size) { bytes_to_be_read = pClient->read_buf_size; } else { bytes_to_be_read = rem_len - total_bytes_read; } } } while (total_bytes_read < rem_len && ret_val == QCLOUD_RET_SUCCESS); Log_e("MQTT Recv buffer not enough: %d < %d", pClient->read_buf_size, rem_len); IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } len += mqtt_write_packet_rem_len(pClient->read_buf + 1, rem_len); // 3. read payload according to remaining length if (rem_len > 0 && ((len + rem_len) > pClient->read_buf_size)) { timer_left_ms = left_ms(timer); if (timer_left_ms <= 0) { timer_left_ms = 1; } timer_left_ms += QCLOUD_IOT_MQTT_MAX_REMAIN_WAIT_MS; pClient->network_stack.read(&(pClient->network_stack), pClient->read_buf, rem_len, timer_left_ms, &read_len); IOT_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT); } else { if (rem_len > 0) { timer_left_ms = left_ms(timer); if (timer_left_ms <= 0) { timer_left_ms = 1; } timer_left_ms += QCLOUD_IOT_MQTT_MAX_REMAIN_WAIT_MS; rc = pClient->network_stack.read(&(pClient->network_stack), pClient->read_buf + len, rem_len, timer_left_ms, &read_len); if (rc != QCLOUD_RET_SUCCESS) { IOT_FUNC_EXIT_RC(rc); } } } *packet_type = (pClient->read_buf[0] & MQTT_HEADER_TYPE_MASK) >> MQTT_HEADER_TYPE_SHIFT; IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * @brief Check if two topic equals * * @param topic_filter * @param topicName * @return */ static uint8_t _is_topic_equals(char *topic_filter, char *topicName) { return (uint8_t) (strlen(topic_filter) == strlen(topicName) && !strcmp(topic_filter, topicName)); } /** * @brief Check if topic match * * assume topic filter and name is in correct format * # can only be at end * + and # can only be next to separator * * @param topic_filter topic name filter, wildcard is supported * @param topicName topic name, no wildcard * @param topicNameLen length of topic name * @return */ static uint8_t _is_topic_matched(char *topic_filter, char *topicName, uint16_t topicNameLen) { char *curf; char *curn; char *curn_end; curf = topic_filter; curn = topicName; curn_end = curn + topicNameLen; while (*curf && (curn < curn_end)) { if (*curf == '+' && *curn == '/') { curf++; continue; } if (*curn == '/' && *curf != '/') { break; } if (*curf != '+' && *curf != '#' && *curf != *curn) { break; } if (*curf == '+') { /* skip until we meet the next separator, or end of string */ char *nextpos = curn + 1; while (nextpos < curn_end && *nextpos != '/') nextpos = ++curn + 1; } else if (*curf == '#') { /* skip until end of string */ curn = curn_end - 1; } curf++; curn++; }; if (*curf == '\0') { return (uint8_t) (curn == curn_end); } else { return (uint8_t) ((*curf == '#') || *(curf + 1) == '#' || (*curf == '+' && *(curn - 1) == '/')); } } /** * @brief deliver the message to user callback * * @param pClient * @param topicName * @param message * @return */ static int _deliver_message(Qcloud_IoT_Client *pClient, char *topicName, uint16_t topicNameLen, MQTTMessage *message) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(topicName, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(message, QCLOUD_ERR_INVAL); message->ptopic = topicName; message->topic_len = (size_t)topicNameLen; uint32_t i; int flag_matched = 0; HAL_MutexLock(pClient->lock_generic); for (i = 0; i < MAX_MESSAGE_HANDLERS; ++i) { if ((pClient->sub_handles[i].topic_filter != NULL) && (_is_topic_equals(topicName, (char *) pClient->sub_handles[i].topic_filter) || _is_topic_matched((char *) pClient->sub_handles[i].topic_filter, topicName, topicNameLen))) { HAL_MutexUnlock(pClient->lock_generic); if (pClient->sub_handles[i].message_handler != NULL) { pClient->sub_handles[i].message_handler(pClient, message, pClient->sub_handles[i].handler_user_data); flag_matched = 1; IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } HAL_MutexLock(pClient->lock_generic); } } /* Message handler not found for topic */ /* May be we do not care change FAILURE use SUCCESS*/ HAL_MutexUnlock(pClient->lock_generic); if (0 == flag_matched) { Log_d("no matching any topic, call default handle function"); if (NULL != pClient->event_handle.h_fp) { MQTTEventMsg msg; msg.event_type = MQTT_EVENT_PUBLISH_RECVEIVED; msg.msg = message; pClient->event_handle.h_fp(pClient, pClient->event_handle.context, &msg); } } IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * @brief remove node signed with msgId from publish ACK wait list * * @return 0, success; NOT 0, fail; */ static int _mask_pubInfo_from(Qcloud_IoT_Client *c, uint16_t msgId) { IOT_FUNC_ENTRY; if (!c) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } HAL_MutexLock(c->lock_list_pub); if (c->list_pub_wait_ack->len) { ListIterator *iter; ListNode *node = NULL; QcloudIotPubInfo *repubInfo = NULL; if (NULL == (iter = list_iterator_new(c->list_pub_wait_ack, LIST_TAIL))) { HAL_MutexUnlock(c->lock_list_pub); return QCLOUD_RET_SUCCESS; } for (;;) { node = list_iterator_next(iter); if (NULL == node) { break; } repubInfo = (QcloudIotPubInfo *) node->val; if (NULL == repubInfo) { Log_e("node's value is invalid!"); continue; } if (repubInfo->msg_id == msgId) { repubInfo->node_state = MQTT_NODE_STATE_INVALID; /* set as invalid node */ } } list_iterator_destroy(iter); } HAL_MutexUnlock(c->lock_list_pub); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } /** * @brief remove node signed with msgId from subscribe ACK wait list, and return the msg handler * * @return 0, success; NOT 0, fail; */ static int _mask_sub_info_from(Qcloud_IoT_Client *c, unsigned int msgId, SubTopicHandle *messageHandler) { IOT_FUNC_ENTRY; if (NULL == c || NULL == messageHandler) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } HAL_MutexLock(c->lock_list_sub); if (c->list_sub_wait_ack->len) { ListIterator *iter; ListNode *node = NULL; QcloudIotSubInfo *sub_info = NULL; if (NULL == (iter = list_iterator_new(c->list_sub_wait_ack, LIST_TAIL))) { HAL_MutexUnlock(c->lock_list_sub); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } for (;;) { node = list_iterator_next(iter); if (NULL == node) { break; } sub_info = (QcloudIotSubInfo *) node->val; if (NULL == sub_info) { Log_e("node's value is invalid!"); continue; } if (sub_info->msg_id == msgId) { *messageHandler = sub_info->handler; /* return handle */ sub_info->node_state = MQTT_NODE_STATE_INVALID; /* mark as invalid node */ } } list_iterator_destroy(iter); } HAL_MutexUnlock(c->lock_list_sub); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static int _handle_puback_packet(Qcloud_IoT_Client *pClient, Timer *timer) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); uint16_t packet_id; uint8_t dup, type; int rc; rc = deserialize_ack_packet(&type, &dup, &packet_id, pClient->read_buf, pClient->read_buf_size); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } (void)_mask_pubInfo_from(pClient, packet_id); /* notify this event to user callback */ if (NULL != pClient->event_handle.h_fp) { MQTTEventMsg msg; msg.event_type = MQTT_EVENT_PUBLISH_SUCCESS; msg.msg = (void *)(uintptr_t)packet_id; pClient->event_handle.h_fp(pClient, pClient->event_handle.context, &msg); } IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static int _handle_suback_packet(Qcloud_IoT_Client *pClient, Timer *timer, QoS qos) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); uint32_t count = 0; uint16_t packet_id = 0; QoS grantedQoS[3] = {QOS0, QOS0, QOS0}; int rc; bool sub_nack = false; rc = deserialize_suback_packet(&packet_id, 1, &count, grantedQoS, pClient->read_buf, pClient->read_buf_size); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } int flag_dup = 0, i_free = -1; // check return code in SUBACK packet: 0x00(QOS0, SUCCESS),0x01(QOS1, SUCCESS),0x02(QOS2, SUCCESS),0x80(Failure) if (grantedQoS[0] == 0x80) { MQTTEventMsg msg; msg.event_type = MQTT_EVENT_SUBCRIBE_NACK; msg.msg = (void *)(uintptr_t)packet_id; pClient->event_handle.h_fp(pClient, pClient->event_handle.context, &msg); sub_nack = true; } HAL_MutexLock(pClient->lock_generic); SubTopicHandle sub_handle; memset(&sub_handle, 0, sizeof(SubTopicHandle)); (void)_mask_sub_info_from(pClient, (unsigned int)packet_id, &sub_handle); if (/*(NULL == sub_handle.message_handler) || */(NULL == sub_handle.topic_filter)) { Log_e("sub_handle is illegal, topic is null"); HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_SUB); } if (sub_nack) { HAL_MutexUnlock(pClient->lock_generic); Log_e("MQTT SUBSCRIBE failed, packet_id: %u topic: %s", packet_id, sub_handle.topic_filter); /* notify this event to topic subscriber */ if (NULL != sub_handle.sub_event_handler) sub_handle.sub_event_handler(pClient, MQTT_EVENT_SUBCRIBE_NACK, sub_handle.handler_user_data); HAL_Free((void *)sub_handle.topic_filter); sub_handle.topic_filter = NULL; IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_SUB); } int i; for (i = 0; i < MAX_MESSAGE_HANDLERS; ++i) { if ((NULL != pClient->sub_handles[i].topic_filter)) { if (0 == _check_handle_is_identical(&pClient->sub_handles[i], &sub_handle)) { flag_dup = 1; Log_w("Identical topic found: %s", sub_handle.topic_filter); if (pClient->sub_handles[i].handler_user_data != sub_handle.handler_user_data) { Log_w("Update handler_user_data %p -> %p!", pClient->sub_handles[i].handler_user_data, sub_handle.handler_user_data); pClient->sub_handles[i].handler_user_data = sub_handle.handler_user_data; } HAL_Free((void *)sub_handle.topic_filter); sub_handle.topic_filter = NULL; break; } } else { if (-1 == i_free) { i_free = i; /* record available element */ } } } if (0 == flag_dup) { if (-1 == i_free) { Log_e("NO more @sub_handles space!"); HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } else { pClient->sub_handles[i_free].topic_filter = sub_handle.topic_filter; pClient->sub_handles[i_free].message_handler = sub_handle.message_handler; pClient->sub_handles[i_free].sub_event_handler = sub_handle.sub_event_handler; pClient->sub_handles[i_free].qos = sub_handle.qos; pClient->sub_handles[i_free].handler_user_data = sub_handle.handler_user_data; } } HAL_MutexUnlock(pClient->lock_generic); /* notify this event to user callback */ if (NULL != pClient->event_handle.h_fp) { MQTTEventMsg msg; msg.event_type = MQTT_EVENT_SUBCRIBE_SUCCESS; msg.msg = (void *)(uintptr_t)packet_id; if (pClient->event_handle.h_fp != NULL) pClient->event_handle.h_fp(pClient, pClient->event_handle.context, &msg); } /* notify this event to topic subscriber */ if (NULL != sub_handle.sub_event_handler) sub_handle.sub_event_handler(pClient, MQTT_EVENT_SUBCRIBE_SUCCESS, sub_handle.handler_user_data); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static int _handle_unsuback_packet(Qcloud_IoT_Client *pClient, Timer *timer) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); uint16_t packet_id = 0; int rc = deserialize_unsuback_packet(&packet_id, pClient->read_buf, pClient->read_buf_size); if (rc != QCLOUD_RET_SUCCESS) { IOT_FUNC_EXIT_RC(rc); } SubTopicHandle messageHandler; (void)_mask_sub_info_from(pClient, packet_id, &messageHandler); /* Remove from message handler array */ HAL_MutexLock(pClient->lock_generic); /* actually below code is nonsense as unsub handle is different with sub handle even the same topic_filter*/ #if 0 int i; for (i = 0; i < MAX_MESSAGE_HANDLERS; ++i) { if ((pClient->sub_handles[i].topic_filter != NULL) && (0 == _check_handle_is_identical(&pClient->sub_handles[i], &messageHandler))) { memset(&pClient->sub_handles[i], 0, sizeof(SubTopicHandle)); /* NOTE: in case of more than one register(subscribe) with different callback function, * so we must keep continuously searching related message handle. */ } } #endif /* Free the topic filter malloced in qcloud_iot_mqtt_unsubscribe */ if (messageHandler.topic_filter) { HAL_Free((void *)messageHandler.topic_filter); messageHandler.topic_filter = NULL; } if (NULL != pClient->event_handle.h_fp) { MQTTEventMsg msg; msg.event_type = MQTT_EVENT_UNSUBCRIBE_SUCCESS; msg.msg = (void *)(uintptr_t)packet_id; pClient->event_handle.h_fp(pClient, pClient->event_handle.context, &msg); } HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } #ifdef MQTT_RMDUP_MSG_ENABLED #define MQTT_MAX_REPEAT_BUF_LEN 50 static uint16_t sg_repeat_packet_id_buf[MQTT_MAX_REPEAT_BUF_LEN]; static int _get_packet_id_in_repeat_buf(uint16_t packet_id) { int i; for (i = 0; i < MQTT_MAX_REPEAT_BUF_LEN; ++i) { if (packet_id == sg_repeat_packet_id_buf[i]) { return packet_id; } } return -1; } static void _add_packet_id_to_repeat_buf(uint16_t packet_id) { static unsigned int current_packet_id_cnt = 0; if (_get_packet_id_in_repeat_buf(packet_id) > 0) return; sg_repeat_packet_id_buf[current_packet_id_cnt++] = packet_id; if (current_packet_id_cnt >= MQTT_MAX_REPEAT_BUF_LEN) current_packet_id_cnt = current_packet_id_cnt % 50; } void reset_repeat_packet_id_buffer(void) { int i; for (i = 0; i < MQTT_MAX_REPEAT_BUF_LEN; ++i) { sg_repeat_packet_id_buf[i] = 0; } } #endif static int _handle_publish_packet(Qcloud_IoT_Client *pClient, Timer *timer) { IOT_FUNC_ENTRY; char *topic_name; uint16_t topic_len; MQTTMessage msg; int rc; uint32_t len = 0; rc = deserialize_publish_packet(&msg.dup, &msg.qos, &msg.retained, &msg.id, &topic_name, &topic_len, (unsigned char **) &msg.payload, &msg.payload_len, pClient->read_buf, pClient->read_buf_size); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } // topicName from packet is NOT null terminated char fix_topic[MAX_SIZE_OF_CLOUD_TOPIC] = {0}; if (topic_len > MAX_SIZE_OF_CLOUD_TOPIC) { topic_len = MAX_SIZE_OF_CLOUD_TOPIC - 1; Log_e("topic len exceed buffer len"); } memcpy(fix_topic, topic_name, topic_len); if (QOS0 == msg.qos) { rc = _deliver_message(pClient, fix_topic, topic_len, &msg); if (QCLOUD_RET_SUCCESS != rc) IOT_FUNC_EXIT_RC(rc); /* No further processing required for QOS0 */ IOT_FUNC_EXIT_RC(rc); } else { #ifdef MQTT_RMDUP_MSG_ENABLED // check if packet_id has been received before int repeat_id = _get_packet_id_in_repeat_buf(msg.id); // deliver to msg callback if (repeat_id < 0) { #endif rc = _deliver_message(pClient, fix_topic, topic_len, &msg); if (QCLOUD_RET_SUCCESS != rc) IOT_FUNC_EXIT_RC(rc); #ifdef MQTT_RMDUP_MSG_ENABLED } _add_packet_id_to_repeat_buf(msg.id); #endif } HAL_MutexLock(pClient->lock_write_buf); if (QOS1 == msg.qos) { rc = serialize_pub_ack_packet(pClient->write_buf, pClient->write_buf_size, PUBACK, 0, msg.id, &len); } else { /* Message is not QOS0 or QOS1 means only option left is QOS2 */ rc = serialize_pub_ack_packet(pClient->write_buf, pClient->write_buf_size, PUBREC, 0, msg.id, &len); } if (QCLOUD_RET_SUCCESS != rc) { HAL_MutexUnlock(pClient->lock_write_buf); IOT_FUNC_EXIT_RC(rc); } rc = send_mqtt_packet(pClient, len, timer); if (QCLOUD_RET_SUCCESS != rc) { HAL_MutexUnlock(pClient->lock_write_buf); IOT_FUNC_EXIT_RC(rc); } HAL_MutexUnlock(pClient->lock_write_buf); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static int _handle_pubrec_packet(Qcloud_IoT_Client *pClient, Timer *timer) { IOT_FUNC_ENTRY; uint16_t packet_id; unsigned char dup, type; int rc; uint32_t len; rc = deserialize_ack_packet(&type, &dup, &packet_id, pClient->read_buf, pClient->read_buf_size); if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } HAL_MutexLock(pClient->lock_write_buf); rc = serialize_pub_ack_packet(pClient->write_buf, pClient->write_buf_size, PUBREL, 0, packet_id, &len); if (QCLOUD_RET_SUCCESS != rc) { HAL_MutexUnlock(pClient->lock_write_buf); IOT_FUNC_EXIT_RC(rc); } /* send the PUBREL packet */ rc = send_mqtt_packet(pClient, len, timer); if (QCLOUD_RET_SUCCESS != rc) { HAL_MutexUnlock(pClient->lock_write_buf); /* there was a problem */ IOT_FUNC_EXIT_RC(rc); } HAL_MutexUnlock(pClient->lock_write_buf); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } static void _handle_pingresp_packet(Qcloud_IoT_Client *pClient) { IOT_FUNC_ENTRY; HAL_MutexLock(pClient->lock_generic); pClient->is_ping_outstanding = 0; countdown(&pClient->ping_timer, pClient->options.keep_alive_interval); HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT; } int cycle_for_read(Qcloud_IoT_Client *pClient, Timer *timer, uint8_t *packet_type, QoS qos) { IOT_FUNC_ENTRY; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); int rc; /* read the socket, see what work is due */ rc = _read_mqtt_packet(pClient, timer, packet_type); if (QCLOUD_ERR_MQTT_NOTHING_TO_READ == rc) { /* Nothing to read, not a cycle failure */ IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } if (QCLOUD_RET_SUCCESS != rc) { IOT_FUNC_EXIT_RC(rc); } switch (*packet_type) { case CONNACK: break; case PUBACK: rc = _handle_puback_packet(pClient, timer); break; case SUBACK: rc = _handle_suback_packet(pClient, timer, qos); break; case UNSUBACK: rc = _handle_unsuback_packet(pClient, timer); break; case PUBLISH: { rc = _handle_publish_packet(pClient, timer); break; } case PUBREC: { rc = _handle_pubrec_packet(pClient, timer); break; } case PUBREL: { Log_e("Packet type PUBREL is currently NOT handled!"); break; } case PUBCOMP: break; case PINGRESP: break; default: { /* Either unknown packet type or Failure occurred * Should not happen */ IOT_FUNC_EXIT_RC(QCLOUD_ERR_RX_MESSAGE_INVAL); break; } } switch (*packet_type) { /* Recv below msgs are all considered as PING OK */ case PUBACK: case SUBACK: case UNSUBACK: case PINGRESP: { _handle_pingresp_packet(pClient); break; } /* Recv downlink pub means link is OK but we still need to send PING request */ case PUBLISH: { HAL_MutexLock(pClient->lock_generic); pClient->is_ping_outstanding = 0; HAL_MutexUnlock(pClient->lock_generic); break; } } IOT_FUNC_EXIT_RC(rc); } int wait_for_read(Qcloud_IoT_Client *pClient, uint8_t packet_type, Timer *timer, QoS qos) { IOT_FUNC_ENTRY; int rc; uint8_t read_packet_type = 0; POINTER_SANITY_CHECK(pClient, QCLOUD_ERR_INVAL); POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL); do { if (expired(timer)) { rc = QCLOUD_ERR_MQTT_REQUEST_TIMEOUT; break; } rc = cycle_for_read(pClient, timer, &read_packet_type, qos); } while (QCLOUD_RET_SUCCESS == rc && read_packet_type != packet_type ); IOT_FUNC_EXIT_RC(rc); } void set_client_conn_state(Qcloud_IoT_Client *pClient, uint8_t connected) { HAL_MutexLock(pClient->lock_generic); pClient->is_connected = connected; HAL_MutexUnlock(pClient->lock_generic); } uint8_t get_client_conn_state(Qcloud_IoT_Client *pClient) { IOT_FUNC_ENTRY; uint8_t is_connected = 0; HAL_MutexLock(pClient->lock_generic); is_connected = pClient->is_connected; HAL_MutexUnlock(pClient->lock_generic); IOT_FUNC_EXIT_RC(is_connected); } /* * @brief push node to subscribe(unsubscribe) ACK wait list * * return: 0, success; NOT 0, fail; */ int push_sub_info_to(Qcloud_IoT_Client *c, int len, unsigned short msgId, MessageTypes type, SubTopicHandle *handler, ListNode **node) { IOT_FUNC_ENTRY; if (!c || !handler || !node) { IOT_FUNC_EXIT_RC(QCLOUD_ERR_INVAL); } HAL_MutexLock(c->lock_list_sub); if (c->list_sub_wait_ack->len >= MAX_MESSAGE_HANDLERS) { HAL_MutexUnlock(c->lock_list_sub); Log_e("number of sub_info more than max! size = %d", c->list_sub_wait_ack->len); IOT_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_MAX_SUBSCRIPTIONS); } QcloudIotSubInfo *sub_info = (QcloudIotSubInfo *)HAL_Malloc(sizeof( QcloudIotSubInfo) + len); if (NULL == sub_info) { HAL_MutexUnlock(c->lock_list_sub); Log_e("malloc failed!"); IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } sub_info->node_state = MQTT_NODE_STATE_NORMANL; sub_info->msg_id = msgId; sub_info->len = len; InitTimer(&sub_info->sub_start_time); countdown_ms(&sub_info->sub_start_time, c->command_timeout_ms); sub_info->type = type; sub_info->handler = *handler; sub_info->buf = (unsigned char *)sub_info + sizeof(QcloudIotSubInfo); memcpy(sub_info->buf, c->write_buf, len); *node = list_node_new(sub_info); if (NULL == *node) { HAL_MutexUnlock(c->lock_list_sub); Log_e("list_node_new failed!"); IOT_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } list_rpush(c->list_sub_wait_ack, *node); HAL_MutexUnlock(c->lock_list_sub); IOT_FUNC_EXIT_RC(QCLOUD_RET_SUCCESS); } #ifdef __cplusplus } #endif
garezb35/mine
public/angel/myroom/customer/js/search_update_form.js
<gh_stars>0 function _init() { _window.resize(680, 420); var frm = $('#frmSearch'); if (frm.length > 0) { var gameServerEl = document.getElementById('custom_gameserver'); existedAngelGameServer = new AngelGames(document.getElementById('custom_gameserver_list'), { containerWrapper: gameServerEl, formElement: '#frmSearch', game: { autoComplete: '#searchRegGameServer', hidden_use: { code: '[name="game"]', text: '[name="game_text"]' } }, server: { use: true, allView: false, hidden_use: { code: '[name="server"]', text: '[name="server_text"]' } }, goods: { use: true, allView: true, hidden_use: { code: '[name="goods_tmp"]', text: '[name="goods_text"]' }, onCustomChange: function() { var goodsCode = ''; if(this.selectedData) { switch (this.selectedData.C) { case 'money' : goodsCode = '3'; break; case 'item' : goodsCode = '1'; break; case 'etc' : goodsCode = '4'; break; case 'character' : goodsCode = '6'; break; default : goodsCode = '0'; } } document.getElementsByName('goods')[0].value = goodsCode; } } }); var checker = new _form_checker(frm); checker.add({ custom: function() { if (frm.find('[name="type"]').val() == '') { alert('거래유형과 물품을 모두 선택해 주세요'); return false; } if (frm.find('[name="game"]').val() == '') { alert('게임을 선택해 주세요'); return false; } if (frm.find('[name="server"]').val() == '' || frm.find('[name="server"]').val() == 0) { alert('서버를 선택해 주세요'); return false; } return true; } }); } }
friedrich12/sodium-wrapper
include/box_precomputed.h
// box_precomputed.h -- PK enc/dec with MAC, with precomputed shared key // // ISC License // // Copyright (C) 2018 <NAME> <<EMAIL>> // // Permission to use, copy, modify, and/or distribute this software for any // purpose with or without fee is hereby granted, provided that the above // copyright notice and this permission notice appear in all copies. // // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR // ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN // ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF // OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #pragma once #include "common.h" #include "key.h" #include "keypair.h" #include "nonce.h" #include <stdexcept> #include <sodium.h> namespace sodium { template<typename BT = bytes> class box_precomputed { public: static constexpr unsigned int NONCESIZE = crypto_box_NONCEBYTES; static constexpr std::size_t KEYSIZE_PUBLIC_KEY = keypair<BT>::KEYSIZE_PUBLIC_KEY; static constexpr std::size_t KEYSIZE_PRIVATE_KEY = keypair<BT>::KEYSIZE_PRIVATE_KEY; static constexpr std::size_t KEYSIZE_SHAREDKEY = crypto_box_BEFORENMBYTES; static constexpr std::size_t MACSIZE = crypto_box_MACBYTES; using private_key_type = typename keypair<BT>::private_key_type; using public_key_type = typename keypair<BT>::public_key_type; using nonce_type = nonce<NONCESIZE>; /** * Create and store an internal shared key built out of a * private key and a public key. * * The private and the public key need not be related, i.e. they * need not belong to the same keypair and need not necessarily * be generated as a pair by the underlying libsodium function(s) * crypto_box_[seed_]keypair(). * * This shared key will be used by the sender to efficiently encrypt * and sign multiple plaintexts to the recipient using the encrypt() * member function (assuming the public key is the recipient's; * and the private key is the sender's). * * In the other direction, this shared key will be used by the * recipient to efficiently decrypt and verify the signature of * multiple ciphertexts from the sender (assuming the public key * is the sender's, and the private key is the recipient's). * * public_key, the public key, must be KEYSIZE_PUBLIC_KEY bytes long. * * If the size of the key isn't correct, the constructor * will throw a std::runtime_error. **/ box_precomputed(const private_key_type& private_key, const public_key_type& public_key) : shared_key_(false) , shared_key_ready_(false) { set_shared_key(private_key, public_key); } box_precomputed(const keypair<BT>& keypair) : shared_key_(false) , shared_key_ready_(false) { set_shared_key(keypair.private_key(), keypair.public_key()); } /** * Copy and move constructors **/ template<typename U> box_precomputed(const box_precomputed<U>& other) : shared_key_(other.shared_key_) , shared_key_ready_(other.shared_key_ready_) {} template<typename U> box_precomputed(box_precomputed<U>&& other) : shared_key_(std::move(other.shared_key_)) , shared_key_ready_(other.shared_key_ready_) { other.shared_key_ready_ = false; } /** * Change the shared key by setting it so that it is built out of * the public key public_key, and the private key private_key. * * public_key must be KEYSIZE_PUBLIC_KEY bytes long. * * If the size of the key isn't correct, this function will throw * a std::runtime_error and the old shared key (if any) will remain * unchanged. * * If the underlying libsodium function crypto_box_beforenm() * returns -1, we throw a std::runtime_error as well, and the state * of the shared key is undefined. **/ void set_shared_key(const private_key_type& private_key, const public_key_type& public_key) { // some sanity checks before we get started if (public_key.size() != KEYSIZE_PUBLIC_KEY) throw std::runtime_error{ "sodium::box_precomputed::set_shared_key(" ") wrong public_key size" }; // now, ready to go shared_key_.readwrite(); if (crypto_box_beforenm( shared_key_.setdata(), reinterpret_cast<const unsigned char*>(public_key.data()), reinterpret_cast<const unsigned char*>(private_key.data())) == -1) { shared_key_ready_ = false; // XXX: undefined? throw std::runtime_error{ "sodium::box_precomputed::set_shared_key(" ") crypto_box_beforenm() -1" }; } shared_key_.readonly(); shared_key_ready_ = true; } // XXX add set_shared_key(const keypair &)... /** * Destroy the shared key by zeroing its contents after it is no * longer needed. * * Normally, you don't need to call this function directly, because * the shared key will destroy itself anyway when this CryptorMultiPK * object goes out of scope. **/ void destroy_shared_key() { shared_key_.destroy(); shared_key_ready_ = false; } /** * Encrypt and sign the plaintext using the precomputed shared key * which contains the recipient's public key (used for encryption) * and the sender's private key (used for signing); and a nonce. * * Compute an authentication tag MAC as well. Return (MAC || * ciphertext); i.e. ciphertext prepended by MAC. * * Any modification of the returned (MAC || ciphertext) will render * decryption impossible. * * The nonce is public and can be sent along the (MAC || * ciphertext). The private key / shared key are private and MUST * NOT be sent over the channel. The public key is intended to be * widely known, even by attackers. * * To thwart Man-in-the-Middle attacks, it is the responsibility of * the recipient to verify (by other means, like certificates, web * of trust, etc.) that the public key of the sender does indeed * belong to the _real_ sender of the message. This is NOT ensured by * this function here. * * The encrypt() function can be _efficiently_ used repeately by the * sender with the same shared key to send multiple messages to the * same recipient, but you MUST then make sure never to reuse the * same nonce. The easiest way to achieve this is to increment nonce * after or prior to each encrypt() invocation. * * The (MAC || ciphertext) size is * MACSIZE + plaintext.size() * bytes long. * * encrypt() will throw a std::runtime_error if * - the shared key is not ready **/ BT encrypt(const BT& plaintext, const nonce_type& nonce) { // some sanity checks before we start if (!shared_key_ready_) throw std::runtime_error{ "sodium::box_precomputed::encrypt() shared key not ready" }; // make space for ciphertext, i.e. for (MAC || encrypted) BT ciphertext(MACSIZE + plaintext.size()); // and now, encrypt! if (crypto_box_easy_afternm( reinterpret_cast<unsigned char*>(ciphertext.data()), reinterpret_cast<const unsigned char*>(plaintext.data()), plaintext.size(), nonce.data(), reinterpret_cast<const unsigned char*>(shared_key_.data())) == -1) throw std::runtime_error{ "sodium::box_precomputed::encrypt() " "crypto_box_easy_afternm() -1" }; return ciphertext; // move semantics } /** * Detached version. * * XXX Document me **/ BT encrypt(const BT& plaintext, const nonce_type& nonce, BT& mac) { // some sanity checks before we start if (!shared_key_ready_) throw std::runtime_error{ "sodium::box_precomputed::encrypt() shared key not ready" }; if (mac.size() != MACSIZE) throw std::runtime_error{ "sodium::box_precomputed::encrypt() wrong mac size" }; // make space for ciphertext, without MAC BT ciphertext(plaintext.size()); // and now, encrypt! if (crypto_box_detached_afternm( reinterpret_cast<unsigned char*>(ciphertext.data()), reinterpret_cast<unsigned char*>(mac.data()), reinterpret_cast<const unsigned char*>(plaintext.data()), plaintext.size(), nonce.data(), reinterpret_cast<const unsigned char*>(shared_key_.data())) == -1) throw std::runtime_error{ "sodium::box_precomputed::encrypt() " "crypto_box_easy_afternm() -1" }; return ciphertext; // move semantics, mac returned via reference } /** * Decrypt and verify the signature of the ciphertext using the * precomputed shared key which contains the recipient's private key * (used for decryption) and the sender's public key (used for * signing); and a nonce. Verify also the MAC within the * ciphertext. Return decrypted plaintext. * * If the ciphertext or the MAC have been tampered with, or if * the signature doesn't verify (e.g. because the sender isn't * the one who she claims to be), decryption will fail and * this function with throw a std::runtime_error. * * The decrypt() function can be _efficiently_ used repeatedly * with the same shared key to decrypt multiple messages from * the same sender. * * This function will also throw a std::runtime_error if, among others: * - the size of the ciphertext_with_mac is not at least MACSIZE * - decryption failed (e.g. because the shared key doesn't match) * - the shared key isn't ready **/ BT decrypt(const BT& ciphertext_with_mac, const nonce_type& nonce) { // some sanity checks before we start if (ciphertext_with_mac.size() < MACSIZE) throw std::runtime_error{ "sodium::box_precomputed::decrypt() " "ciphertext too small for even for MAC" }; if (!shared_key_ready_) throw std::runtime_error{ "sodium::box_precomputed::decrypt() shared key not ready" }; // make space for decrypted text BT decrypted(ciphertext_with_mac.size() - MACSIZE); // and now, decrypt! if (crypto_box_open_easy_afternm( reinterpret_cast<unsigned char*>(decrypted.data()), reinterpret_cast<const unsigned char*>( ciphertext_with_mac.data()), ciphertext_with_mac.size(), nonce.data(), reinterpret_cast<const unsigned char*>(shared_key_.data())) == -1) throw std::runtime_error{ "sodium::box_precomputed::decrypt() decryption failed" }; return decrypted; // move semantics } /** * Detached version * * XXX Document me (yada, yada, yada...) **/ BT decrypt(const BT& ciphertext, const nonce_type& nonce, const BT& mac) { // some sanity checks before we start if (mac.size() != MACSIZE) throw std::runtime_error{ "sodium::box_precomputed::decrypt() wrong mac size" }; if (!shared_key_ready_) throw std::runtime_error{ "sodium::box_precomputed::decrypt() shared key not ready" }; // make space for decrypted text BT decrypted(ciphertext.size()); // and now, decrypt! if (crypto_box_open_detached_afternm( reinterpret_cast<unsigned char*>(decrypted.data()), reinterpret_cast<const unsigned char*>(ciphertext.data()), reinterpret_cast<const unsigned char*>(mac.data()), ciphertext.size(), nonce.data(), reinterpret_cast<const unsigned char*>(shared_key_.data())) == -1) throw std::runtime_error{ "sodium::box_precomputed::decrypt() decryption failed" }; return decrypted; // move semantics } private: key<KEYSIZE_SHAREDKEY> shared_key_; bool shared_key_ready_; }; } // namespace sodium
colinRawlings/osparc-simcore
services/director-v2/tests/unit/test_utils_registry.py
<filename>services/director-v2/tests/unit/test_utils_registry.py from typing import Dict from _pytest.monkeypatch import MonkeyPatch from settings_library.docker_registry import RegistrySettings from simcore_service_director_v2.utils.registry import get_dynamic_sidecar_env_vars MOCKED_PASSWORD = "<PASSWORD>" MOCKED_BASE_REGISTRY_ENV_VARS: Dict[str, str] = { "REGISTRY_AUTH": "False", "REGISTRY_USER": "usr", "REGISTRY_PW": <PASSWORD>, "REGISTRY_SSL": "False", } EXPECTED_DYNAMIC_SIDECAR_ENV_VAR_NAMES = { "REGISTRY_AUTH", "REGISTRY_PATH", "REGISTRY_URL", "REGISTRY_USER", "REGISTRY_PW", "REGISTRY_SSL", } def test_dynamic_sidecar_env_vars(monkeypatch: MonkeyPatch) -> None: for key, value in MOCKED_BASE_REGISTRY_ENV_VARS.items(): monkeypatch.setenv(key, value) registry_settings = RegistrySettings() dynamic_sidecar_env_vars = get_dynamic_sidecar_env_vars(registry_settings) print("dynamic_sidecar_env_vars:", dynamic_sidecar_env_vars) assert len(dynamic_sidecar_env_vars) == len(EXPECTED_DYNAMIC_SIDECAR_ENV_VAR_NAMES) assert set(dynamic_sidecar_env_vars) == EXPECTED_DYNAMIC_SIDECAR_ENV_VAR_NAMES assert dynamic_sidecar_env_vars["REGISTRY_AUTH"] == str( registry_settings.REGISTRY_AUTH ) assert dynamic_sidecar_env_vars["REGISTRY_PATH"] == str( registry_settings.REGISTRY_PATH ) assert dynamic_sidecar_env_vars["REGISTRY_URL"] == str( registry_settings.REGISTRY_URL ) assert dynamic_sidecar_env_vars["REGISTRY_USER"] == str( registry_settings.REGISTRY_USER ) assert dynamic_sidecar_env_vars["REGISTRY_PW"] == str( registry_settings.REGISTRY_PW.get_secret_value() ) assert dynamic_sidecar_env_vars["REGISTRY_SSL"] == str( registry_settings.REGISTRY_SSL ) assert str(registry_settings.REGISTRY_PW) == "**********" assert registry_settings.REGISTRY_PW.get_secret_value() == MOCKED_PASSWORD
mattermost/awat
internal/api/store.go
// Copyright (c) 2020-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. // package api import "github.com/mattermost/awat/model" type Store interface { GetTranslation(id string) (*model.Translation, error) GetTranslationsByInstallation(id string) ([]*model.Translation, error) GetAllTranslations() ([]*model.Translation, error) StoreTranslation(t *model.Translation) error UpdateTranslation(t *model.Translation) error GetAndClaimNextReadyImport(provisionerID string) (*model.Import, error) GetAllImports() ([]*model.Import, error) GetImport(id string) (*model.Import, error) GetImportsInProgress() ([]*model.Import, error) GetImportsByInstallation(id string) ([]*model.Import, error) GetImportsByTranslation(id string) ([]*model.Import, error) UpdateImport(imp *model.Import) error GetUpload(id string) (*model.Upload, error) CreateUpload(id string) error CompleteUpload(uploadID, errorMessage string) error }
keithpitt/vendor
spec/lib/vendor/vendor_file/library/remote_spec.rb
require 'spec_helper' describe Vendor::VendorFile::Library::Remote do let(:lib) { Vendor::VendorFile::Library::Remote.new(:name => "DKBenchmark", :version => "0.1") } context "#download" do before :each do Vendor.stub(:library_path).and_return Dir.mktmpdir("spec") end it "should find the correct version if one isn't set on the lib" do lib.version = nil lib.matched_version.should == "0.2" end it "should download the lib if its not cached locally" do Vendor::API.should_receive(:download).with(lib.name, lib.version).and_return(File.open(File.join(PACKAGED_VENDOR_PATH, "DKBenchmark-0.1.vendor"))) lib.download end it "should not download the lib if it already exists" do File.should_receive(:exist?).with(lib.cache_path).and_return(true) Vendor::API.should_not_receive(:download).with(lib.name, lib.version) lib.download end it "should unzip the file" do lib.download File.exist?(File.join(lib.cache_path, "vendor.json")) File.exist?(File.join(lib.cache_path, "data/DKBenchmark.h")) File.exist?(File.join(lib.cache_path, "data/DKBenchmark.m")) end end context "#cache_path" do it "should contain the name of the vendor and the version" do lib.cache_path.should =~ /DKBenchmark\/0.1$/ end end context "#matched_version" do it "should just return the version if there is no equality matche" do lib.version = "3.0.5" lib.matched_version.should == "3.0.5" end it "should just return the correct version if no version is passed" do lib.version = nil # The DKBenchmark FakeWeb call returns 0.2 as the latest release lib.matched_version.should == "0.2" end context "when finding the correct library" do before :each do lib.stub!('meta').and_return({ "versions" => [ [ "0.1"] , [ "0.1.1" ], [ "0.1.2.alpha" ], [ "0.2"] , [ "0.5" ], [ "0.6.1" ], [ "0.6.2" ], [ "0.6.8" ] ] }) end it "should match <=" do lib.version = "<= 0.5" lib.matched_version.to_s.should == "0.6.8" end it "should match >=" do lib.version = ">= 0.2" lib.matched_version.to_s.should == "0.2" end it "should match >" do lib.version = "> 0.2" lib.matched_version.to_s.should == "0.1.1" end it "should match <" do lib.version = "< 0.2" lib.matched_version.to_s.should == "0.6.8" end it "should match ~>" do lib.version = "~> 0.6" lib.matched_version.to_s.should == "0.6.8" end it "should not return pre-releases" do lib.version = "~> 0.1" lib.matched_version.to_s.should == "0.1.1" end it "should return pre-releases if specified specifically" do lib.version = "~> 0.1.2.alpha" lib.matched_version.to_s.should == "0.1.2.alpha" end end end context "#==" do it "should return true if the libs match" do x = Vendor::VendorFile::Library::Remote.new(:name => "DKRest", :version => "1.0", :equality => "~>") y = Vendor::VendorFile::Library::Remote.new(:name => "DKRest", :version => "1.0", :equality => "~>") x.should == y end it "should return false if the libs don't match" do x = Vendor::VendorFile::Library::Remote.new(:name => "DKRest", :version => "1.0", :equality => "~>") y = Vendor::VendorFile::Library::Remote.new(:name => "DKRest", :version => "1.1", :equality => "~>") x.should_not == y end end context "#version=" do it "should have a version attribute" do lib.version = "3.0.5" lib.version.should == "3.0.5" end it "should handle versions with an equality matcher" do lib.version = "<= 3.0" lib.equality.should == "<=" lib.version.should == "3.0" lib.version = ">= 3.1" lib.equality.should == ">=" lib.version.should == "3.1" lib.version = "~> 3.2" lib.equality.should == "~>" lib.version.should == "3.2" end it "should clear the version and the equality if you pass nil" do lib.version = nil lib.equality.should be_nil lib.version.should be_nil end it "should exit if you pass something silly to it" do expect do Vendor.ui.should_receive(:error).with("Invalid version format '+ .5' for 'DKBenchmark'") lib.version = "+ .5" end.should raise_error(SystemExit) end end end
caizhenxing/mobileAutoTest
src/main/java/com/bmtc/device/service/TestCaseService.java
package com.bmtc.device.service; import java.util.List; import com.bmtc.device.domain.TestCase; import com.bmtc.device.domain.TestCaseTable; import com.bmtc.task.domain.ExecuteDetail; /** *@author: Jason.ma *@date: 2018年1月3日下午4:30:36 * */ public interface TestCaseService { /** * android 环境下bmtc平台运行测试套 * @param executeDetail 测试套运行参数 * @return 测试套运行结果,正常运行:true,运行异常:false */ public boolean runTestSuiteForAndroid(ExecuteDetail executeDetail); /** * ios 环境下bmtc平台运行测试套 * @param executeDetail 测试套运行参数 * @return 测试套运行结果,正常运行:true,运行异常:false */ public boolean runTestSuiteForIOS(ExecuteDetail executeDetail); /** * android 环境下atp平台运行测试套 * @param executeDetail * @return */ public boolean atpRunCaseForAndroid(ExecuteDetail executeDetail); /** * ios 环境下atp平台运行测试套 * @param executeDetail 测试套运行参数 * @return 测试套运行结果,正常运行:true,运行异常:false */ public boolean atpRunCaseForIOS(ExecuteDetail executeDetail); /** * 解析脚本文件夹/文件中所有测试用例名称 * @param testSuite 测试脚本文件路径 * @return List<TestCase> 文件中所有测试用例名称和对应测试套路径的集合 */ public List<TestCase> getAllTestCase(String testSuite); /** * 解析脚本文件中所有测试用例名称 * @param testSuite 测试脚本文件路径 * @return list 该文件中所有测试用例名称 */ public List<TestCaseTable> getTestCaseName(String testSuite); /** * 解析所有脚本文件/文件夹中所有测试用例名称 * @param testSuite 测试脚本文件路径 * @return List<TestCase> 所有文件/文件夹中所有测试用例名称和对应测试套路径的集合 */ public List<TestCase> getTestCase(List<String> testSuiteList); }
EvenOldridge/NVTabular
tests/unit/test_s3.py
<gh_stars>0 # # Copyright (c) 2020, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import cudf import fsspec import pytest from cudf.tests.utils import assert_eq import nvtabular as nvt from nvtabular import ops as ops from tests.conftest import mycols_csv, mycols_pq boto3 = pytest.importorskip("boto3") s3fs = pytest.importorskip("s3fs") moto = pytest.importorskip("moto") @pytest.fixture(scope="function") def aws_credentials(): """Mocked AWS Credentials for moto.""" os.environ["AWS_ACCESS_KEY_ID"] = "testing" os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" os.environ["AWS_SECURITY_TOKEN"] = "testing" os.environ["AWS_SESSION_TOKEN"] = "testing" @pytest.fixture(scope="function") def s3(aws_credentials): with moto.mock_s3(): yield boto3.client("s3", region_name="us-east-1") @pytest.mark.parametrize("engine", ["parquet", "csv"]) def test_s3_dataset(s3, paths, engine, df): # create a mocked out bucket here bucket = "testbucket" s3.create_bucket(Bucket=bucket) s3_paths = [] for path in paths: s3_path = f"s3://{bucket}/{path}" with fsspec.open(s3_path, "wb") as f: f.write(open(path, "rb").read()) s3_paths.append(s3_path) # create a basic s3 dataset dataset = nvt.Dataset(s3_paths) # make sure the iteration API works columns = mycols_pq if engine == "parquet" else mycols_csv gdf = cudf.concat(list(dataset.to_iter()))[columns] assert_eq(gdf.reset_index(drop=True), df.reset_index(drop=True)) cat_names = ["name-cat", "name-string"] if engine == "parquet" else ["name-string"] cont_names = ["x", "y", "id"] label_name = ["label"] processor = nvt.Workflow(cat_names=cat_names, cont_names=cont_names, label_name=label_name) processor.add_feature([ops.FillMissing(), ops.Clip(min_value=0), ops.LogOp()]) processor.add_preprocess(ops.Normalize()) processor.add_preprocess(ops.Categorify(cat_cache="host")) processor.finalize() processor.update_stats(dataset)
sn0b4ll/Incident-Playbook
Incident-Response/Tools/TheHive/frontend/app/scripts/filters/tag-value.js
<reponame>sn0b4ll/Incident-Playbook<gh_stars>1-10 (function() { 'use strict'; angular.module('theHiveFilters').filter('tagValue', function () { return function (tag) { if (!tag) { return ''; } return _.without([ tag.namespace, ':', tag.predicate, tag.value ? ("=\"" + tag.value + "\"") : null ], null).join(''); }; }); })();
cedar-framework/cedar
examples/basic-3d-ser/periodic.cc
<filename>examples/basic-3d-ser/periodic.cc<gh_stars>1-10 #include <math.h> #include <memory> #include <array> #include <iostream> #include <cedar/types.h> #include <cedar/3d/gallery.h> #include <cedar/3d/solver.h> using namespace cedar; using namespace cedar::cdr3; static stencil_op<seven_pt> create_op(len_t nx, len_t ny, len_t nz, std::array<bool, 3> periodic) { stencil_op<seven_pt> so(nx, ny, nz); so.set(0); if (periodic[0]) nx--; if (periodic[1]) ny--; if (periodic[2]) nz--; real_t hx = 1.0/(nx+1); real_t hy = 1.0/(ny+1); real_t hz = 1.0/(nz+1); real_t xh=hy*hz/hx; real_t yh=hx*hz/hy; real_t zh=hx*hy/hz; len_t l = so.shape(0); len_t m = so.shape(1); len_t n = so.shape(2); len_t i1 = so.shape(0) + 1; len_t j1 = so.shape(1) + 1; len_t k1 = so.shape(2) + 1; len_t ibeg = 2; len_t jbeg = 2; len_t kbeg = 2; if (periodic[0]) ibeg--; if (periodic[1]) jbeg--; if (periodic[2]) kbeg--; for (auto k : range<len_t>(1, k1)) { for (auto j : range<len_t>(jbeg, j1)) { for (auto i : range<len_t>(1, i1)) { so(i,j,k,seven_pt::ps) = 1.0*yh; } } } for (auto k : range<len_t>(1, k1)) { for (auto j : range<len_t>(1, j1)) { for (auto i : range<len_t>(ibeg, i1)) { so(i,j,k,seven_pt::pw) = 1.0*xh; } } } for (auto k : range<len_t>(kbeg, k1)) { for (auto j : range<len_t>(1, j1)) { for (auto i : range<len_t>(1, i1)) { so(i,j,k,seven_pt::b) = 1.0*zh; } } } for (auto k : so.range(2)) { for (auto j : so.range(1)) { for (auto i : so.range(0)) { so(i,j,k,seven_pt::p) = 2.0*xh + 2.0*yh + 2.0*zh; } } } if (periodic[0]) { for (auto k : so.grange(2)) { for (auto j : so.grange(1)) { so(ibeg-1,j,k,seven_pt::p ) = so(l,j,k,seven_pt::p ); so(ibeg-1,j,k,seven_pt::pw) = so(l,j,k,seven_pt::pw); so(ibeg-1,j,k,seven_pt::ps) = so(l,j,k,seven_pt::ps); so(ibeg-1,j,k,seven_pt::b ) = so(l,j,k,seven_pt::b ); so(l+1,j,k,seven_pt::p ) = so(ibeg,j,k,seven_pt::p ); so(l+1,j,k,seven_pt::pw) = so(ibeg,j,k,seven_pt::pw); so(l+1,j,k,seven_pt::ps) = so(ibeg,j,k,seven_pt::ps); so(l+1,j,k,seven_pt::b ) = so(ibeg,j,k,seven_pt::b ); } } } if (periodic[1]) { for (auto k : so.grange(2)) { for (auto i : so.grange(0)) { so(i,jbeg-1,k,seven_pt::p ) = so(i,m,k,seven_pt::p ); so(i,jbeg-1,k,seven_pt::pw) = so(i,m,k,seven_pt::pw); so(i,jbeg-1,k,seven_pt::ps) = so(i,m,k,seven_pt::ps); so(i,jbeg-1,k,seven_pt::b ) = so(i,m,k,seven_pt::b ); so(i,m+1,k,seven_pt::p ) = so(i,jbeg,k,seven_pt::p ); so(i,m+1,k,seven_pt::pw) = so(i,jbeg,k,seven_pt::pw); so(i,m+1,k,seven_pt::ps) = so(i,jbeg,k,seven_pt::ps); so(i,m+1,k,seven_pt::b ) = so(i,jbeg,k,seven_pt::b ); } } } if (periodic[2]) { for (auto j: so.grange(1)) { for (auto i :so.grange(0)) { so(i,j,kbeg-1,seven_pt::p ) = so(i,j,n,seven_pt::p ); so(i,j,kbeg-1,seven_pt::pw) = so(i,j,n,seven_pt::pw); so(i,j,kbeg-1,seven_pt::ps) = so(i,j,n,seven_pt::ps); so(i,j,kbeg-1,seven_pt::b ) = so(i,j,n,seven_pt::b ); so(i,j,n+1,seven_pt::p ) = so(i,j,kbeg,seven_pt::p ); so(i,j,n+1,seven_pt::pw) = so(i,j,kbeg,seven_pt::pw); so(i,j,n+1,seven_pt::ps) = so(i,j,kbeg,seven_pt::ps); so(i,j,n+1,seven_pt::b ) = so(i,j,kbeg,seven_pt::b ); } } } return so; } static void set_problem(grid_func & b, std::array<bool, 3> periodic) { const double pi = M_PI; auto rhs = [pi](real_t x, real_t y, real_t z) { return 12*(pi*pi)*sin(2*pi*x)*sin(2*pi*y)*sin(2*pi*z); }; b.set(0); len_t nx = b.len(0) - 2; len_t ny = b.len(1) - 2; len_t nz = b.len(2) - 2; if (periodic[0]) nx--; if (periodic[1]) ny--; if (periodic[2]) nz--; real_t hx = 1.0 / (nx + 1); real_t hy = 1.0 / (ny + 1); real_t hz = 1.0 / (nz + 1); real_t h2 = hx*hy*hz; for (auto k : b.range(2)) { for (auto j : b.range(1)) { for (auto i : b.range(0)) { real_t x = i*hx; real_t y = j*hy; real_t z = k*hz; b(i,j,k) = rhs(x,y,z) * h2; } } } if (periodic[0]) { for (auto k : b.grange(2)) { for (auto j : b.grange(1)) { b(0 ,j,k) = b(b.shape(0),j,k); b(b.shape(0)+1,j,k) = b(1 ,j,k); } } } if (periodic[1]) { for (auto k : b.grange(2)) { for (auto i : b.grange(0)) { b(i,0, k) = b(i,b.shape(1),k); b(i,b.shape(1)+1,k) = b(i,1 ,k); } } } if (periodic[2]) { for (auto j : b.grange(1)) { for (auto i : b.grange(0)) { b(i,j,0 ) = b(i,j,b.shape(2)); b(i,j,b.shape(2)+1) = b(i,j,1 ); } } } } static void set_solution(grid_func & q, std::array<bool, 3> periodic) { const double pi = M_PI; auto sol = [pi](real_t x, real_t y, real_t z) { return sin(2*pi*x)*sin(2*pi*y)*sin(2*pi*z); }; len_t nx = q.len(0) - 2; len_t ny = q.len(1) - 2; len_t nz = q.len(2) - 2; if (periodic[0]) nx--; if (periodic[1]) ny--; if (periodic[2]) nz--; real_t hx = 1.0 / (nx + 1); real_t hy = 1.0 / (ny + 1); real_t hz = 1.0 / (nz + 1); for (auto k : q.range(2)) { for (auto j : q.range(1)) { for (auto i : q.range(0)) { real_t x = i*hx; real_t y = j*hy; real_t z = k*hz; q(i,j,k) = sol(x,y,z); } } } } int main(int argc, char *argv[]) { auto conf = std::make_shared<config>(); auto params = build_kernel_params(*conf); auto ndofs = conf->getvec<len_t>("grid.n"); auto nx = ndofs[0]; auto ny = ndofs[1]; auto nz = ndofs[2]; auto so = create_op(nx, ny, nz, params->periodic); grid_func b(nx, ny, nz); set_problem(b, params->periodic); solver<seven_pt> bmg(so, conf); { std::ofstream ffile("output/ser-fine"); std::ofstream rfile("output/ser-restrict"); std::ofstream cfile("output/ser-coarse"); ffile << bmg.levels.get<seven_pt>(0).A; rfile << bmg.levels.get(1).P; cfile << bmg.levels.get(1).A; ffile.close(); rfile.close(); cfile.close(); } auto sol = bmg.solve(b); grid_func exact_sol(nx, ny, nz); set_solution(exact_sol, params->periodic); auto diff = exact_sol - sol; log::status << "Solution norm: " << diff.inf_norm() << std::endl; log::status << "Finished test" << std::endl; return 0; }
GaloisInc/hacrypto
src/C/Security-57031.40.6/SecurityTests/clxutils/rootStoreTool/rootUtils.cpp
/* * rootUtils.cpp - utility routines for rootStoreTool */ #include <stdlib.h> #include <strings.h> #include <stdio.h> #include <unistd.h> #include "rootUtils.h" #include <Security/SecCertificatePriv.h> #include <Security/SecBasePriv.h> #include <Security/SecTrustSettings.h> #include <Security/TrustSettingsSchema.h> /* private header */ #include <Security/SecAsn1Coder.h> #include <Security/nameTemplates.h> /* oh frabjous day */ #include <CoreServices/../Frameworks/CarbonCore.framework/Headers/MacErrors.h> static int indentSize = 0; void indentIncr(void) { indentSize += 3; } void indentDecr(void) { indentSize -= 3; } void indent(void) { if(indentSize < 0) { printf("***indent screwup\n"); indentSize = 0; } for (int dex=0; dex<indentSize; dex++) { putchar(' '); } } void printAscii( const char *buf, unsigned len, unsigned maxLen) { bool doEllipsis = false; if(len > maxLen) { len = maxLen; doEllipsis = true; } for(unsigned dex=0; dex<len; dex++) { char c = *buf++; if(isalnum(c) || (c == ' ')) { putchar(c); } else { putchar('.'); } fflush(stdout); } if(doEllipsis) { printf("...etc."); } } void printHex( const unsigned char *buf, unsigned len, unsigned maxLen) { bool doEllipsis = false; if(len > maxLen) { len = maxLen; doEllipsis = true; } for(unsigned dex=0; dex<len; dex++) { printf("%02X ", *buf++); } if(doEllipsis) { printf("...etc."); } } void printOid( const void *buf, unsigned len, OidParser &parser) { char outstr[OID_PARSER_STRING_SIZE]; parser.oidParse((const unsigned char *)buf, len, outstr); printf("%s", outstr); } void printData( const char *label, CFDataRef data, PrintDataType whichType, OidParser &parser) { const unsigned char *buf = CFDataGetBytePtr(data); unsigned len = CFDataGetLength(data); printf("%s: ", label); switch(whichType) { case PD_Hex: printHex(buf, len, 16); break; case PD_ASCII: printAscii((const char *)buf, len, 50); break; case PD_OID: printOid(buf, len, parser); } putchar('\n'); } /* print the contents of a CFString */ void printCfStr( CFStringRef cfstr) { CFDataRef strData = CFStringCreateExternalRepresentation(NULL, cfstr, kCFStringEncodingUTF8, true); if(strData == NULL) { printf("<<string decode error>>"); return; } const char *cp = (const char *)CFDataGetBytePtr(strData); CFIndex len = CFDataGetLength(strData); for(CFIndex dex=0; dex<len; dex++) { putchar(*cp++); } CFRelease(strData); } /* print a CFDateRef */ static const char *months[12] = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; void printCFDate( CFDateRef dateRef) { CFAbsoluteTime absTime = CFDateGetAbsoluteTime(dateRef); if(absTime == 0.0) { printf("<<Malformed CFDateeRef>>\n"); return; } CFGregorianDate gregDate = CFAbsoluteTimeGetGregorianDate(absTime, NULL); const char *month = "Unknown"; if((gregDate.month > 12) || (gregDate.month <= 0)) { printf("Huh? GregDate.month > 11. These amps only GO to 11.\n"); } else { month = months[gregDate.month - 1]; } printf("%s %d, %ld %02d:%02d", month, gregDate.day, gregDate.year, gregDate.hour, gregDate.minute); } /* print a CFNumber */ void printCfNumber( CFNumberRef cfNum) { SInt32 s; if(!CFNumberGetValue(cfNum, kCFNumberSInt32Type, &s)) { printf("***CFNumber overflow***"); return; } printf("%ld", s); } /* print a CFNumber as a SecTrustSettingsResult */ void printResult( CFNumberRef cfNum) { SInt32 n; if(!CFNumberGetValue(cfNum, kCFNumberSInt32Type, &n)) { printf("***CFNumber overflow***"); return; } const char *s; char bogus[100]; switch(n) { case kSecTrustSettingsResultInvalid: s = "kSecTrustSettingsResultInvalid"; break; case kSecTrustSettingsResultTrustRoot: s = "kSecTrustSettingsResultTrustRoot"; break; case kSecTrustSettingsResultTrustAsRoot: s = "kSecTrustSettingsResultTrustAsRoot"; break; case kSecTrustSettingsResultDeny: s = "kSecTrustSettingsResultDeny"; break; case kSecTrustSettingsResultUnspecified: s = "kSecTrustSettingsResultUnspecified"; break; default: sprintf(bogus, "Unknown SecTrustSettingsResult (%ld)", n); s = bogus; break; } printf("%s", s); } /* print a CFNumber as SecTrustSettingsKeyUsage */ void printKeyUsage( CFNumberRef cfNum) { SInt32 s; if(!CFNumberGetValue(cfNum, kCFNumberSInt32Type, &s)) { printf("***CFNumber overflow***"); return; } uint32 n = (uint32)s; if(n == kSecTrustSettingsKeyUseAny) { printf("<any>"); return; } else if(n == 0) { printf("<none>"); return; } printf("< "); if(n & kSecTrustSettingsKeyUseSignature) { printf("Signature "); } if(n & kSecTrustSettingsKeyUseEnDecryptData) { printf("EnDecryptData "); } if(n & kSecTrustSettingsKeyUseEnDecryptKey) { printf("EnDecryptKey "); } if(n & kSecTrustSettingsKeyUseSignCert) { printf("SignCert "); } if(n & kSecTrustSettingsKeyUseSignRevocation) { printf("SignRevocation "); } if(n & kSecTrustSettingsKeyUseKeyExchange) { printf("KeyExchange "); } printf(" >"); } /* print a CFNumber as CSSM_RETURN string */ void printCssmErr( CFNumberRef cfNum) { SInt32 s; if(!CFNumberGetValue(cfNum, kCFNumberSInt32Type, &s)) { printf("***CFNumber overflow***"); return; } printf("%s", cssmErrorString((CSSM_RETURN)s)); } /* print cert's label (the one SecCertificate infers) */ OSStatus printCertLabel( SecCertificateRef certRef) { OSStatus ortn; CFStringRef label; ortn = SecCertificateInferLabel(certRef, &label); if(ortn) { cssmPerror("SecCertificateInferLabel", ortn); return ortn; } printCfStr(label); CFRelease(label); return noErr; } /* * How many items in a NULL-terminated array of pointers? */ static unsigned nssArraySize( const void **array) { unsigned count = 0; if (array) { while (*array++) { count++; } } return count; } static int compareOids( const CSSM_OID *data1, const CSSM_OID *data2) { if((data1 == NULL) || (data1->Data == NULL) || (data2 == NULL) || (data2->Data == NULL) || (data1->Length != data2->Length)) { return 0; } if(data1->Length != data2->Length) { return 0; } return memcmp(data1->Data, data2->Data, data1->Length) == 0; } static void printRdn(const NSS_RDN *rdn, OidParser &parser) { unsigned numAtvs = nssArraySize((const void **)rdn->atvs); char *fieldName; for(unsigned dex=0; dex<numAtvs; dex++) { const NSS_ATV *atv = rdn->atvs[dex]; if(compareOids(&atv->type, &CSSMOID_CountryName)) { fieldName = "Country "; } else if(compareOids(&atv->type, &CSSMOID_OrganizationName)) { fieldName = "Org "; } else if(compareOids(&atv->type, &CSSMOID_LocalityName)) { fieldName = "Locality "; } else if(compareOids(&atv->type, &CSSMOID_OrganizationalUnitName)) { fieldName = "OrgUnit "; } else if(compareOids(&atv->type, &CSSMOID_CommonName)) { fieldName = "Common Name "; } else if(compareOids(&atv->type, &CSSMOID_Surname)) { fieldName = "Surname "; } else if(compareOids(&atv->type, &CSSMOID_Title)) { fieldName = "Title "; } else if(compareOids(&atv->type, &CSSMOID_Surname)) { fieldName = "Surname "; } else if(compareOids(&atv->type, &CSSMOID_StateProvinceName)) { fieldName = "State "; } else if(compareOids(&atv->type, &CSSMOID_CollectiveStateProvinceName)) { fieldName = "Coll. State "; } else if(compareOids(&atv->type, &CSSMOID_EmailAddress)) { /* deprecated, used by Thawte */ fieldName = "Email addrs "; } else { fieldName = "Other name "; } indent(); printf("%s : ", fieldName); /* Not strictly true here, but we'll just assume we can print everything */ printAscii((char *)atv->value.item.Data, atv->value.item.Length, atv->value.item.Length); putchar('\n'); } } /* print a CFData as an X509 Name (i.e., subject or issuer) */ void printCfName( CFDataRef nameData, OidParser &parser) { SecAsn1CoderRef coder = NULL; OSStatus ortn; ortn = SecAsn1CoderCreate(&coder); if(ortn) { cssmPerror("SecAsn1CoderCreate", ortn); return; } /* subsequent errors to errOut: */ NSS_Name nssName = {NULL}; unsigned numRdns; ortn = SecAsn1Decode(coder, CFDataGetBytePtr(nameData), CFDataGetLength(nameData), kSecAsn1NameTemplate, &nssName); if(ortn) { printf("***Error decoding NSS_Name\n"); goto errOut; } numRdns = nssArraySize((const void **)nssName.rdns); for(unsigned dex=0; dex<numRdns; dex++) { printRdn(nssName.rdns[dex], parser); } errOut: if(coder) { SecAsn1CoderRelease(coder); } }
Satily/leetcode_python_solution
solutions/solution313.py
<filename>solutions/solution313.py class Solution: def nthSuperUglyNumber(self, n, primes): """ :type n: int :type primes: List[int] :rtype: int """ indices = [0] * len(primes) ugly_numbers = [1] next_numbers = primes.copy() for _ in range(n): min_num = min(next_numbers) ugly_numbers.append(min_num) for index in range(len(indices)): if next_numbers[index] == min_num: indices[index] += 1 next_numbers[index] = primes[index] * ugly_numbers[indices[index]] return ugly_numbers[n - 1] if __name__ == "__main__": print(Solution().nthSuperUglyNumber(12, [2, 7, 13, 19])) print(Solution().nthSuperUglyNumber(100000, [7, 19, 29, 37, 41, 47, 53, 59, 61, 79, 83, 89, 101, 103, 109, 127, 131, 137, 139, 157, 167, 179, 181, 199, 211, 229, 233, 239, 241, 251]))
Banno/sbt-plantuml-plugin
src/main/java/net/sourceforge/plantuml/skin/rose/ComponentRoseParticipant.java
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * (C) Copyright 2009-2017, <NAME> * * Project Info: http://plantuml.com * * This file is part of PlantUML. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Original Author: <NAME> */ package net.sourceforge.plantuml.skin.rose; import net.sourceforge.plantuml.ISkinSimple; import net.sourceforge.plantuml.cucadiagram.Display; import net.sourceforge.plantuml.graphic.FontConfiguration; import net.sourceforge.plantuml.graphic.HorizontalAlignment; import net.sourceforge.plantuml.graphic.HtmlColor; import net.sourceforge.plantuml.graphic.StringBounder; import net.sourceforge.plantuml.graphic.SymbolContext; import net.sourceforge.plantuml.graphic.TextBlock; import net.sourceforge.plantuml.skin.AbstractTextualComponent; import net.sourceforge.plantuml.skin.Area; import net.sourceforge.plantuml.ugraphic.UChangeBackColor; import net.sourceforge.plantuml.ugraphic.UChangeColor; import net.sourceforge.plantuml.ugraphic.UFont; import net.sourceforge.plantuml.ugraphic.UGraphic; import net.sourceforge.plantuml.ugraphic.URectangle; import net.sourceforge.plantuml.ugraphic.UStroke; import net.sourceforge.plantuml.ugraphic.UTranslate; public class ComponentRoseParticipant extends AbstractTextualComponent { private final HtmlColor back; private final HtmlColor foregroundColor; private final double deltaShadow; private final double roundCorner; private final UStroke stroke; private final double minWidth; private final boolean collections; public ComponentRoseParticipant(SymbolContext biColor, FontConfiguration font, Display stringsToDisplay, ISkinSimple spriteContainer, double roundCorner, UFont fontForStereotype, HtmlColor htmlColorForStereotype, double minWidth, boolean collections) { super(stringsToDisplay, font, HorizontalAlignment.CENTER, 7, 7, 7, spriteContainer, 0, false, fontForStereotype, htmlColorForStereotype); this.minWidth = minWidth; this.collections = collections; this.back = biColor.getBackColor(); this.roundCorner = roundCorner; this.deltaShadow = biColor.getDeltaShadow(); this.foregroundColor = biColor.getForeColor(); this.stroke = biColor.getStroke(); } @Override protected void drawInternalU(UGraphic ug, Area area) { final StringBounder stringBounder = ug.getStringBounder(); ug = ug.apply(new UChangeBackColor(back)).apply(new UChangeColor(foregroundColor)); ug = ug.apply(stroke); final URectangle rect = new URectangle(getTextWidth(stringBounder), getTextHeight(stringBounder), roundCorner, roundCorner); rect.setDeltaShadow(deltaShadow); if (collections) { ug.apply(new UTranslate(getDeltaCollection(), 0)).draw(rect); ug = ug.apply(new UTranslate(0, getDeltaCollection())); } ug.draw(rect); ug = ug.apply(new UStroke()); final TextBlock textBlock = getTextBlock(); textBlock.drawU(ug.apply(new UTranslate(getMarginX1() + suppWidth(stringBounder) / 2, getMarginY()))); } private double getDeltaCollection() { if (collections) { return 4; } return 0; } @Override public double getPreferredHeight(StringBounder stringBounder) { return getTextHeight(stringBounder) + deltaShadow + 1 + getDeltaCollection(); } @Override public double getPreferredWidth(StringBounder stringBounder) { return getTextWidth(stringBounder) + deltaShadow + getDeltaCollection(); } @Override protected double getPureTextWidth(StringBounder stringBounder) { return Math.max(super.getPureTextWidth(stringBounder), minWidth); } private final double suppWidth(StringBounder stringBounder) { return getPureTextWidth(stringBounder) - super.getPureTextWidth(stringBounder); } }
woodrow/pyoac
lib-python/modified-2.5.2/encodings/ascii.py
<reponame>woodrow/pyoac """ Python 'ascii' Codec Written by <NAME> (<EMAIL>). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ import codecs ### Codec APIs class Codec(codecs.Codec): # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. encode = staticmethod(codecs.ascii_encode) decode = staticmethod(codecs.ascii_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.ascii_encode(input, self.errors)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.ascii_decode(input, self.errors)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass class StreamConverter(StreamWriter,StreamReader): encode = codecs.ascii_decode decode = codecs.ascii_encode ### encodings module API def getregentry(): return codecs.CodecInfo( name='ascii', encode=Codec.encode, decode=Codec.decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, streamreader=StreamReader, )
studiobee/JUCE6-Svalbard-Fork
examples/GUI/CodeEditorDemo.h
<gh_stars>1000+ /* ============================================================================== This file is part of the JUCE examples. Copyright (c) 2020 - Raw Material Software Limited The code included in this file is provided under the terms of the ISC license http://www.isc.org/downloads/software-support-policy/isc-license. Permission To use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE DISCLAIMED. ============================================================================== */ /******************************************************************************* The block below describes the properties of this PIP. A PIP is a short snippet of code that can be read by the Projucer and used to generate a JUCE project. BEGIN_JUCE_PIP_METADATA name: CodeEditorDemo version: 1.0.0 vendor: JUCE website: http://juce.com description: Displays a code editor. dependencies: juce_core, juce_data_structures, juce_events, juce_graphics, juce_gui_basics, juce_gui_extra exporters: xcode_mac, vs2019, linux_make, xcode_iphone moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1 type: Component mainClass: CodeEditorDemo useLocalCopy: 1 END_JUCE_PIP_METADATA *******************************************************************************/ #pragma once #include "../Assets/DemoUtilities.h" #if JUCE_ANDROID #error "This demo is not supported on Android!" #endif //============================================================================== class CodeEditorDemo : public Component, private FilenameComponentListener { public: CodeEditorDemo() { setOpaque (true); // Create the editor.. editor.reset (new CodeEditorComponent (codeDocument, &cppTokeniser)); addAndMakeVisible (editor.get()); editor->loadContent ("\n" "/* Code editor demo!\n" "\n" " To see a real-world example of the code editor\n" " in action, have a look at the Projucer!\n" "\n" "*/\n" "\n"); // Create a file chooser control to load files into it.. addAndMakeVisible (fileChooser); fileChooser.addListener (this); lookAndFeelChanged(); setSize (500, 500); } ~CodeEditorDemo() override { fileChooser.removeListener (this); } void paint (Graphics& g) override { g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground, Colours::lightgrey)); } void resized() override { auto r = getLocalBounds().reduced (8); fileChooser.setBounds (r.removeFromTop (25)); editor->setBounds (r.withTrimmedTop (8)); } private: // this is the document that the editor component is showing CodeDocument codeDocument; // this is a tokeniser to apply the C++ syntax highlighting CPlusPlusCodeTokeniser cppTokeniser; // the editor component std::unique_ptr<CodeEditorComponent> editor; FilenameComponent fileChooser { "File", {}, true, false, false, "*.cpp;*.h;*.hpp;*.c;*.mm;*.m", {}, "Choose a C++ file to open it in the editor" }; //============================================================================== void filenameComponentChanged (FilenameComponent*) override { editor->loadContent (fileChooser.getCurrentFile().loadFileAsString()); } void lookAndFeelChanged() override { if (auto* v4 = dynamic_cast<LookAndFeel_V4*> (&LookAndFeel::getDefaultLookAndFeel())) { auto useLight = v4->getCurrentColourScheme() == LookAndFeel_V4::getLightColourScheme(); editor->setColourScheme (useLight ? getLightCodeEditorColourScheme() : getDarkCodeEditorColourScheme()); } else { editor->setColourScheme (cppTokeniser.getDefaultColourScheme()); } } CodeEditorComponent::ColourScheme getDarkCodeEditorColourScheme() { struct Type { const char* name; juce::uint32 colour; }; const Type types[] = { { "Error", 0xffe60000 }, { "Comment", 0xff72d20c }, { "Keyword", 0xffee6f6f }, { "Operator", 0xffc4eb19 }, { "Identifier", 0xffcfcfcf }, { "Integer", 0xff42c8c4 }, { "Float", 0xff885500 }, { "String", 0xffbc45dd }, { "Bracket", 0xff058202 }, { "Punctuation", 0xffcfbeff }, { "Preprocessor Text", 0xfff8f631 } }; CodeEditorComponent::ColourScheme cs; for (auto& t : types) cs.set (t.name, Colour (t.colour)); return cs; } CodeEditorComponent::ColourScheme getLightCodeEditorColourScheme() { struct Type { const char* name; juce::uint32 colour; }; const Type types[] = { { "Error", 0xffcc0000 }, { "Comment", 0xff00aa00 }, { "Keyword", 0xff0000cc }, { "Operator", 0xff225500 }, { "Identifier", 0xff000000 }, { "Integer", 0xff880000 }, { "Float", 0xff885500 }, { "String", 0xff990099 }, { "Bracket", 0xff000055 }, { "Punctuation", 0xff004400 }, { "Preprocessor Text", 0xff660000 } }; CodeEditorComponent::ColourScheme cs; for (auto& t : types) cs.set (t.name, Colour (t.colour)); return cs; } JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (CodeEditorDemo) };
xiaoping-yang/ms2pip_c
ms2pip/ms2pip_tools/spectrum_output.py
<reponame>xiaoping-yang/ms2pip_c<filename>ms2pip/ms2pip_tools/spectrum_output.py<gh_stars>0 """ Write spectrum files from MS2PIP predictions. """ # Standard library import os import logging from ast import literal_eval from io import StringIO from operator import itemgetter from time import localtime, strftime from functools import wraps from typing import List, Dict, Any # Project imports from ms2pip.peptides import Modifications logger = logging.getLogger("ms2pip.spectrum_output") class InvalidWriteModeError(ValueError): pass # Writer decorator def writer(**kwargs): def deco(write_function): @wraps(write_function) def wrapper(self): return self._write_general(write_function, **kwargs) return wrapper return deco def output_format(output_format): class OutputFormat: def __init__(self, fn): self.fn = fn self.output_format = output_format def __set_name__(self, owner, name): owner.OUTPUT_FORMATS[self.output_format] = self.fn setattr(owner, name, self.fn) return OutputFormat class SpectrumOutput: """ Write MS2PIP predictions to various output formats. Parameters ---------- all_preds: pd.DataFrame MS2PIP predictions peprec: pd.DataFrame PEPREC with peptide information params: dict MS2PIP parameters output_filename: str, optional path and name for output files, will be suffexed with `_predictions` and the relevant file extension (default: ms2pip_predictions) write_mode: str, optional write mode to use: "wt+" to append to start a new file, "at" to append to an existing file (default: "wt+") return_stringbuffer: bool, optional If True, files are written to a StringIO object, which the write function returns. If False, files are written to a file on disk. is_log_space: bool, optional Set to true if predicted intensities in `all_preds` are in log-space. In that case, intensities will first be transformed to "normal"-space. Methods ------- write_msp() Write predictions to MSP file write_mgf() Write predictions to MGF file write_bibliospec() Write predictions to Bibliospec SSL/MS2 files (also for Skyline) write_spectronaut() Write predictions to Spectronaut CSV file write_csv() Write predictions to CSV file write_results(output_formats) Write MS2PIP predictions in output formats defined by output_formats. Example ------- >>> so = ms2pip.spectrum_tools.spectrum_output.SpectrumOutput( all_preds, peprec, params ) >>> so.write_msp() >>> so.write_spectronaut() """ OUTPUT_FORMATS = {} def __init__( self, all_preds, peprec, params, output_filename="ms2pip_predictions", write_mode="wt+", return_stringbuffer=False, is_log_space=True, ): self.all_preds = all_preds self.peprec = peprec self.params = params self.output_filename = output_filename self.write_mode = write_mode self.return_stringbuffer = return_stringbuffer self.is_log_space = is_log_space self.peprec_dict = None self.preds_dict = None self.normalization = None self.ssl_modification_mapping = None self.has_rt = "rt" in self.peprec.columns self.has_protein_list = "protein_list" in self.peprec.columns self.mods = Modifications() self.mods.add_from_ms2pip_modstrings(params["ptm"]) if self.write_mode not in ["wt+", "wt", "at", "w", "a"]: raise InvalidWriteModeError(self.write_mode) if "a" in self.write_mode and self.return_stringbuffer: raise InvalidWriteModeError(self.write_mode) def _generate_peprec_dict(self, rt_to_seconds=True): """ Create easy to access dict from all_preds and peprec dataframes """ peprec_tmp = self.peprec.copy() if self.has_rt and rt_to_seconds: peprec_tmp["rt"] = peprec_tmp["rt"] * 60 peprec_tmp.index = peprec_tmp["spec_id"] peprec_tmp.drop("spec_id", axis=1, inplace=True) self.peprec_dict = peprec_tmp.to_dict(orient="index") def _generate_preds_dict(self): """ Create easy to access dict from peprec dataframes """ self.preds_dict = {} preds_list = self.all_preds[ ["spec_id", "charge", "ion", "ionnumber", "mz", "prediction"] ].values.tolist() for row in preds_list: spec_id = row[0] if spec_id in self.preds_dict.keys(): if row[2] in self.preds_dict[spec_id]["peaks"]: self.preds_dict[spec_id]["peaks"][row[2]].append(tuple(row[3:])) else: self.preds_dict[spec_id]["peaks"][row[2]] = [tuple(row[3:])] else: self.preds_dict[spec_id] = { "charge": row[1], "peaks": {row[2]: [tuple(row[3:])]}, } def _normalize_spectra(self, method="basepeak_10000"): """ Normalize spectra """ if self.is_log_space: self.all_preds["prediction"] = ( (2 ** self.all_preds["prediction"]) - 0.001 ).clip(lower=0) self.is_log_space = False if method == "basepeak_10000": if self.normalization == "basepeak_10000": pass elif self.normalization == "basepeak_1": self.all_preds["prediction"] *= 10000 else: self.all_preds["prediction"] = self.all_preds.groupby(["spec_id"])[ "prediction" ].apply(lambda x: (x / x.max()) * 10000) self.normalization = "basepeak_10000" elif method == "basepeak_1": if self.normalization == "basepeak_1": pass elif self.normalization == "basepeak_10000": self.all_preds["prediction"] /= 10000 else: self.all_preds["prediction"] = self.all_preds.groupby(["spec_id"])[ "prediction" ].apply(lambda x: (x / x.max())) self.normalization = "basepeak_1" elif method == "tic": if self.normalization != "tic": self.all_preds["prediction"] = self.all_preds.groupby(["spec_id"])[ "prediction" ].apply(lambda x: x / x.sum()) self.normalization = "tic" else: raise NotImplementedError def _get_peak_string( self, peak_dict, sep="\t", include_zero=False, include_annotations=True, intensity_type=float, ): """ Get MGF/MSP-like peaklist string """ all_peaks = [] for ion_type, peaks in peak_dict.items(): for peak in peaks: if not include_zero and peak[2] == 0: continue if include_annotations: all_peaks.append( ( peak[1], f'{peak[1]:.6f}{sep}{intensity_type(peak[2])}{sep}"{ion_type.lower()}{peak[0]}"', ) ) else: all_peaks.append((peak[1], f"{peak[1]:.6f}{sep}{peak[2]}")) all_peaks = sorted(all_peaks, key=itemgetter(0)) peak_string = "\n".join([peak[1] for peak in all_peaks]) return peak_string def _get_msp_modifications(self, sequence, modifications): """ Format modifications in MSP-style, e.g. "1/0,E,Glu->pyro-Glu" """ if isinstance(modifications, str): if modifications == "-": msp_modifications = "0" else: mods = modifications.split("|") mods = [(int(mods[i]), mods[i + 1]) for i in range(0, len(mods), 2)] mods = [(x, y) if x == 0 else (x - 1, y) for (x, y) in mods] mods = [(str(x), sequence[x], y) for (x, y) in mods] msp_modifications = "/".join([",".join(list(x)) for x in mods]) msp_modifications = f"{len(mods)}/{msp_modifications}" else: msp_modifications = "0" return msp_modifications def _parse_protein_string(self, protein_list): """ Parse protein string from list, list string literal, or string. """ if isinstance(protein_list, list): protein_string = "/".join(protein_list) elif isinstance(protein_list, str): try: protein_string = "/".join(literal_eval(protein_list)) except ValueError: protein_string = protein_list else: protein_string = "" return protein_string def _get_last_ssl_scannr(self): """ Return scan number of last line in a Bibliospec SSL file. """ ssl_filename = "{}_predictions.ssl".format(self.output_filename) with open(ssl_filename, "rt") as ssl: for line in ssl: last_line = line last_scannr = int(last_line.split("\t")[1]) return last_scannr def _generate_ssl_modification_mapping(self): """ Make modification name -> ssl modification name mapping. """ self.ssl_modification_mapping = { ptm.split(",")[0]: "{:+.1f}".format(round(float(ptm.split(",")[1]), 1)) for ptm in self.params["ptm"] } def _get_ssl_modified_sequence(self, sequence, modifications): """ Build BiblioSpec SSL modified sequence string. """ pep = list(sequence) for loc, name in zip( modifications.split("|")[::2], modifications.split("|")[1::2] ): # C-term mod if loc == "-1": pep[-1] = pep[-1] + "[{}]".format(self.ssl_modification_mapping[name]) # N-term mod elif loc == "0": pep[0] = pep[0] + "[{}]".format(self.ssl_modification_mapping[name]) # Normal mod else: pep[int(loc) - 1] = pep[int(loc) - 1] + "[{}]".format( self.ssl_modification_mapping[name] ) return "".join(pep) @output_format('msp') @writer( file_suffix="_predictions.msp", normalization_method="basepeak_10000", requires_dicts=True, requires_ssl_modifications=False, ) def write_msp(self, file_object): """ Construct MSP string and write to file_object. """ for spec_id in sorted(self.peprec_dict.keys()): seq = self.peprec_dict[spec_id]["peptide"] mods = self.peprec_dict[spec_id]["modifications"] charge = self.peprec_dict[spec_id]["charge"] prec_mass, prec_mz = self.mods.calc_precursor_mz(seq, mods, charge) msp_modifications = self._get_msp_modifications(seq, mods) num_peaks = sum( [ len(peaklist) for _, peaklist in self.preds_dict[spec_id]["peaks"].items() ] ) comment_line = f" Mods={msp_modifications} Parent={prec_mz}" if self.has_protein_list: protein_list = self.peprec_dict[spec_id]["protein_list"] protein_string = self._parse_protein_string(protein_list) comment_line += f' Protein="{protein_string}"' if self.has_rt: rt = self.peprec_dict[spec_id]["rt"] comment_line += f" RTINSECONDS={rt}" comment_line += f' MS2PIP_ID="{spec_id}"' out = [ f"Name: {seq}/{charge}", f"MW: {prec_mass}", f"Comment:{comment_line}", f"Num peaks: {num_peaks}", self._get_peak_string( self.preds_dict[spec_id]["peaks"], sep="\t", include_annotations=True, intensity_type=int, ), ] file_object.writelines([line + "\n" for line in out] + ["\n"]) @output_format('mgf') @writer( file_suffix="_predictions.mgf", normalization_method="basepeak_10000", requires_dicts=True, requires_ssl_modifications=False, ) def write_mgf(self, file_object): """ Construct MGF string and write to file_object """ for spec_id in sorted(self.peprec_dict.keys()): seq = self.peprec_dict[spec_id]["peptide"] mods = self.peprec_dict[spec_id]["modifications"] charge = self.peprec_dict[spec_id]["charge"] prec_mass, prec_mz = self.mods.calc_precursor_mz(seq, mods, charge) msp_modifications = self._get_msp_modifications(seq, mods) if self.has_protein_list: protein_list = self.peprec_dict[spec_id]["protein_list"] protein_string = self._parse_protein_string(protein_list) else: protein_string = "" out = [ "BEGIN IONS", f"TITLE={spec_id} {seq}/{charge} {msp_modifications} {protein_string}", f"PEPMASS={prec_mz}", f"CHARGE={charge}+", ] if self.has_rt: rt = self.peprec_dict[spec_id]["rt"] out.append(f"RTINSECONDS={rt}") out.append( self._get_peak_string( self.preds_dict[spec_id]["peaks"], sep=" ", include_annotations=False, ) ) out.append("END IONS\n") file_object.writelines([line + "\n" for line in out]) @output_format('spectronaut') @writer( file_suffix="_predictions_spectronaut.csv", normalization_method="tic", requires_dicts=False, requires_ssl_modifications=True, ) def write_spectronaut(self, file_obj): """ Construct spectronaut DataFrame and write to file_object. """ if "w" in self.write_mode: header = True elif "a" in self.write_mode: header = False else: raise InvalidWriteModeError(self.write_mode) spectronaut_peprec = self.peprec.copy() # ModifiedPeptide and PrecursorMz columns spectronaut_peprec["ModifiedPeptide"] = spectronaut_peprec.apply( lambda row: self._get_ssl_modified_sequence( row["peptide"], row["modifications"] ), axis=1, ) spectronaut_peprec["PrecursorMz"] = spectronaut_peprec.apply( lambda row: self.mods.calc_precursor_mz( row["peptide"], row["modifications"], row["charge"] )[1], axis=1, ) spectronaut_peprec["ModifiedPeptide"] = ( "_" + spectronaut_peprec["ModifiedPeptide"] + "_" ) # Additional columns spectronaut_peprec["FragmentLossType"] = "noloss" # Retention time if "rt" in spectronaut_peprec.columns: rt_cols = ["iRT"] spectronaut_peprec["iRT"] = spectronaut_peprec["rt"] else: rt_cols = [] # ProteinId if self.has_protein_list: spectronaut_peprec["ProteinId"] = spectronaut_peprec["protein_list"].apply( self._parse_protein_string ) else: spectronaut_peprec["ProteinId"] = spectronaut_peprec["spec_id"] # Rename columns and merge with predictions spectronaut_peprec = spectronaut_peprec.rename( columns={"charge": "PrecursorCharge", "peptide": "StrippedPeptide"} ) peptide_cols = ( [ "ModifiedPeptide", "StrippedPeptide", "PrecursorCharge", "PrecursorMz", "ProteinId", ] + rt_cols + ["FragmentLossType"] ) spectronaut_df = spectronaut_peprec[peptide_cols + ["spec_id"]] spectronaut_df = self.all_preds.merge(spectronaut_df, on="spec_id") # Fragment columns spectronaut_df["FragmentCharge"] = ( spectronaut_df["ion"].str.contains("2").map({True: 2, False: 1}) ) spectronaut_df["FragmentType"] = spectronaut_df["ion"].str[0].str.lower() # Rename and sort columns spectronaut_df = spectronaut_df.rename( columns={ "mz": "FragmentMz", "prediction": "RelativeIntensity", "ionnumber": "FragmentNumber", } ) fragment_cols = [ "FragmentCharge", "FragmentMz", "RelativeIntensity", "FragmentType", "FragmentNumber", ] spectronaut_df = spectronaut_df[peptide_cols + fragment_cols] spectronaut_df.to_csv(file_obj, index=False, header=header) return file_obj def _write_bibliospec_core(self, file_obj_ssl, file_obj_ms2, start_scannr=0): """ Construct Bibliospec SSL/MS2 strings and write to file_objects. """ for i, spec_id in enumerate(sorted(self.preds_dict.keys())): scannr = i + start_scannr seq = self.peprec_dict[spec_id]["peptide"] mods = self.peprec_dict[spec_id]["modifications"] charge = self.peprec_dict[spec_id]["charge"] prec_mass, prec_mz = self.mods.calc_precursor_mz(seq, mods, charge) ms2_filename = os.path.basename(self.output_filename) + "_predictions.ms2" peaks = self._get_peak_string( self.preds_dict[spec_id]["peaks"], sep="\t", include_annotations=False, ) if isinstance(mods, str) and mods != "-" and mods != "": mod_seq = self._get_ssl_modified_sequence(seq, mods) else: mod_seq = seq rt = self.peprec_dict[spec_id]["rt"] if self.has_rt else "" # TODO: implement csv instead of manual writing file_obj_ssl.write( "\t".join( [ms2_filename, str(scannr), str(charge), mod_seq, "", "", str(rt)] ) + "\n" ) file_obj_ms2.write( "\n".join( [ f"S\t{scannr}\t{prec_mz}", f"Z\t{charge}\t{prec_mass}", f"D\tseq\t{seq}", f"D\tmodified seq\t{mod_seq}", peaks, ] ) + "\n" ) def _write_general( self, write_function, file_suffix, normalization_method, requires_dicts, requires_ssl_modifications, ): """ General write function to call core write functions. Note: Does not work for write_bibliospec function. """ # Normalize if necessary and make dicts if not self.normalization == normalization_method: self._normalize_spectra(method=normalization_method) if requires_dicts: self._generate_preds_dict() elif requires_dicts and not self.preds_dict: self._generate_preds_dict() if requires_dicts and not self.peprec_dict: self._generate_peprec_dict() if requires_ssl_modifications and not self.ssl_modification_mapping: self._generate_ssl_modification_mapping() # Write to file or stringbuffer if self.return_stringbuffer: file_object = StringIO() logger.info("writing results to StringIO using %s", write_function.__name__) else: f_name = self.output_filename + file_suffix file_object = open(f_name, self.write_mode) logger.info("writing results to %s", f_name) write_function(self, file_object) return file_object @output_format('bibliospec') def write_bibliospec(self): """ Write MS2PIP predictions to BiblioSpec SSL and MS2 spectral library files (For example for use in Skyline). """ if not self.ssl_modification_mapping: self._generate_ssl_modification_mapping() # Normalize if necessary and make dicts if not self.normalization == "basepeak_10000": self._normalize_spectra(method="basepeak_10000") self._generate_preds_dict() elif not self.preds_dict: self._generate_preds_dict() if not self.peprec_dict: self._generate_peprec_dict() if self.return_stringbuffer: file_obj_ssl = StringIO() file_obj_ms2 = StringIO() else: file_obj_ssl = open( "{}_predictions.ssl".format(self.output_filename), self.write_mode ) file_obj_ms2 = open( "{}_predictions.ms2".format(self.output_filename), self.write_mode ) # If a new file is written, write headers if "w" in self.write_mode: start_scannr = 0 ssl_header = [ "file", "scan", "charge", "sequence", "score-type", "score", "retention-time", "\n", ] file_obj_ssl.write("\t".join(ssl_header)) file_obj_ms2.write( "H\tCreationDate\t{}\n".format( strftime("%Y-%m-%d %H:%M:%S", localtime()) ) ) file_obj_ms2.write("H\tExtractor\tMS2PIP predictions\n") else: # Get last scan number of ssl file, to continue indexing from there # because Bibliospec speclib scan numbers can only be integers start_scannr = self._get_last_ssl_scannr() + 1 self._write_bibliospec_core( file_obj_ssl, file_obj_ms2, start_scannr=start_scannr ) return file_obj_ssl, file_obj_ms2 def get_normalized_predictions(self, normalization_method='tic'): """ Return normalized copy of predictions. """ self._normalize_spectra(method=normalization_method) return self.all_preds.copy() @output_format('csv') def write_csv(self): """ Write MS2PIP predictions to CSV. """ self._normalize_spectra(method='tic') # Write to file or stringbuffer if self.return_stringbuffer: file_object = StringIO() logger.info("writing results to StringIO using %s", "write_csv") else: f_name = "{}_predictions.csv".format(self.output_filename) file_object = open(f_name, self.write_mode) logger.info("writing results to %s", f_name) self.all_preds.to_csv(file_object, float_format="%.6g", index=False) return file_object def write_results(self, output_formats: List[str]) -> Dict[str, Any]: """ Write MS2PIP predictions in output formats defined by output_formats. """ results = {} for output_format in output_formats: output_format = output_format.lower() writer = self.OUTPUT_FORMATS[output_format] results[output_format] = writer(self) return results
CoolElvis/white_noise
lib/noise/rate_limit_error.rb
# frozen_string_literal: true require 'noise/public_error' require 'noise/rate_limit_error_responder' module Noise # Rate limit error. # class RateLimitError < PublicError attr_reader :retry_after # @param code [Symbol] # @param [String] retry_after # def initialize(code, retry_after:) super(code) @retry_after = retry_after end def responder_class RateLimitErrorResponder end end end Noise::RateLimitError.register_as :too_many_requests, severity: :info
knokko/custom-items-gradle
plug-in/src/main/java/nl/knokko/customitems/plugin/set/item/CustomShears.java
package nl.knokko.customitems.plugin.set.item; import java.util.Collection; import java.util.List; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import nl.knokko.customitems.effect.EquippedPotionEffect; import nl.knokko.customitems.effect.PotionEffect; import nl.knokko.customitems.item.AttributeModifier; import nl.knokko.customitems.item.CustomItemType; import nl.knokko.customitems.item.Enchantment; import nl.knokko.customitems.item.ReplaceCondition; import nl.knokko.customitems.item.ReplaceCondition.ConditionOperation; import nl.knokko.customitems.item.nbt.ExtraItemNbt; import nl.knokko.customitems.plugin.CustomItemsEventHandler; import nl.knokko.customitems.plugin.recipe.ingredient.Ingredient; public class CustomShears extends CustomTool { private final int shearDurabilityLoss; public CustomShears( short itemDamage, String name, String alias, String displayName, String[] lore, AttributeModifier[] attributes, Enchantment[] defaultEnchantments, long maxDurability, boolean allowEnchanting, boolean allowAnvil, Ingredient repairItem, boolean[] itemFlags, int entityHitDurabilityLoss, int blockBreakDurabilityLoss, int shearDurabilityLoss, List<PotionEffect> playerEffects, List<PotionEffect> targetEffects, Collection<EquippedPotionEffect> equippedEffects, String[] commands, ReplaceCondition[] conditions, ConditionOperation op, ExtraItemNbt extraNbt, float attackRange ) { super( CustomItemType.SHEARS, itemDamage, name, alias, displayName, lore, attributes, defaultEnchantments, maxDurability, allowEnchanting, allowAnvil, repairItem, itemFlags, entityHitDurabilityLoss, blockBreakDurabilityLoss, playerEffects, targetEffects, equippedEffects, commands, conditions, op, extraNbt, attackRange ); this.shearDurabilityLoss = shearDurabilityLoss; } public int getShearDurabilityLoss() { return shearDurabilityLoss; } @Override public void onBlockBreak(Player player, ItemStack tool, boolean wasSolid, boolean wasFakeMainHand) { // Only lose durability when breaking non-solid blocks because we shear it if (!wasSolid && blockBreakDurabilityLoss != 0) { ItemStack newTool = decreaseDurability(tool, blockBreakDurabilityLoss); if (tool != newTool) { if (newTool == null) { CustomItemsEventHandler.playBreakSound(player); } if (wasFakeMainHand) { player.getInventory().setItemInOffHand(newTool); } else { player.getInventory().setItemInMainHand(newTool); } } } } }
georkost4/WeatherFarm
app/src/main/java/com/dsktp/sora/weatherfarm/utils/AppUtils.java
package com.dsktp.sora.weatherfarm.utils; import android.content.Context; import android.content.SharedPreferences; import android.location.Location; import android.preference.PreferenceManager; import android.util.Log; import com.google.android.gms.location.places.Place; import static com.dsktp.sora.weatherfarm.utils.Constants.NO_LATITUDE; import static com.dsktp.sora.weatherfarm.utils.Constants.NO_LONGITUDE; import static com.dsktp.sora.weatherfarm.utils.Constants.NO_PLACE; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_CONNECTIVITY_KEY; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_CURRENT_PLACE_LATITUDE_KEY; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_CURRENT_PLACE_LONGTITUDE_KEY; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_IS_POLYGON_LIST_SYNCED; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_UNITS_IMPERIAL_VALUE; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_UNITS_KEY; import static com.dsktp.sora.weatherfarm.utils.Constants.PREFERENCES_UNITS_METRIC_VALUE; /** * This file created by <NAME> * and was last modified on 30/7/2018. * The name of the project is WeatherFarm and it was created as part of * UDACITY ND programm. */ /** * This class contains static helper methods to get and save values like last time fetched from server * save and get current position, save and get selected position. */ public class AppUtils { private static final String DEBUG_TAG = "#AppUtils"; /** * Saves into preference the time in millis that we last fetched data from the server * @param context The context used to access shared preferences * @param lastUpdated The time in millis */ public static void saveLastUpdatedValue(Context context, long lastUpdated) { PreferenceManager.getDefaultSharedPreferences(context).edit().putLong(Constants.PREFERENCES_SAVE_LAST_UPDATED_KEY,lastUpdated).apply(); } /** * This method returns the value of last updated time in millis * @param context The context to access shared preferences * @return long representing the time in millis */ public static long getLastUpdated(Context context) { long lastUpdated = PreferenceManager.getDefaultSharedPreferences(context).getLong(Constants.PREFERENCES_SAVE_LAST_UPDATED_KEY,-1); Log.d(DEBUG_TAG,"Last updated = " + lastUpdated ); return lastUpdated; } /** * This method saves into preferences the selected location we entered to get weather forecast data * @param place The place object containing the name , latitude , longtitude of the place * @param context The context object */ public static void saveSelectedPosition(Place place,Context context) { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); sharedPreferences.edit().putString(Constants.PREFERENCES_SELECTED_PLACE_NAME_KEY, String.valueOf(place.getName())).apply(); sharedPreferences.edit().putString(Constants.PREFERENCES_SELECTED_PLACE_LATITUDE_KEY, String.valueOf(place.getLatLng().latitude)).apply(); sharedPreferences.edit().putString(Constants.PREFERENCES_SELECTED_PLACE_LONGTITUDE_KEY, String.valueOf(place.getLatLng().longitude)).apply(); Log.d(DEBUG_TAG,"Saving values.. name = "+ place.getName() + " lat = " + place.getLatLng().latitude + " lon = " + place.getLatLng().longitude); } /** * This method return an array of string representing the selected position we entered to get weather forecast info. * The returnedValue[0] contains the name of the place * The returnedValue[1] contains the latitude of the place * The returnedValue[2] contains the longitude of the place * @param context The context to access the shared preferences * @return String[] containing the name,latitude,longitude of the place */ public static String[] getSelectedPosition(Context context) { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); String[] values = new String[3]; values[0] = sharedPreferences.getString(Constants.PREFERENCES_SELECTED_PLACE_NAME_KEY,NO_PLACE); values[1] = sharedPreferences.getString(Constants.PREFERENCES_SELECTED_PLACE_LATITUDE_KEY,NO_LATITUDE); values[2] = sharedPreferences.getString(Constants.PREFERENCES_SELECTED_PLACE_LONGTITUDE_KEY, NO_LONGITUDE); Log.i(DEBUG_TAG,"Getting values.. name = "+ values[0] + " lat = " + values[1] + " lon = " + values[2]); return values; } /** * This method saves into shared preferences the current place we obtained from the device via gps or other method * @param context The context to access the shared preferences * @param location The location object that contains the latitude,longitude of the current place */ public static void saveCurrentPosition(Context context, Location location) { //todo use reverse geo coding to find the locantion name SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); // sharedPreferences.edit().putString("selected_place_name_key", String.valueOf(place.getName())).apply(); sharedPreferences.edit().putString(PREFERENCES_CURRENT_PLACE_LATITUDE_KEY, String.valueOf(location.getLatitude())).apply(); sharedPreferences.edit().putString(PREFERENCES_CURRENT_PLACE_LONGTITUDE_KEY, String.valueOf(location.getLongitude())).apply(); } /** * This method return an array of string representing the current position we obtained from device to get weather forecast info. * The returnedValue[0] contains the latitude of the place * The returnedValue[1] contains the longitude of the place * @param context The context to access the shared preferences * @return String[] containing the latitude,longitude of the place */ public static String[] getCurrentPosition(Context context) { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); String[] values = new String[2]; values[0] = sharedPreferences.getString(PREFERENCES_CURRENT_PLACE_LATITUDE_KEY,NO_LATITUDE); values[1] = sharedPreferences.getString(PREFERENCES_CURRENT_PLACE_LONGTITUDE_KEY, NO_LONGITUDE); return values; } /** * This method saves into preferences a boolean value representing if we have synced the polygon list from the server * @param context THe context object to access the preferences */ public static void setPolygonListBeenSynced(Context context) { PreferenceManager.getDefaultSharedPreferences(context).edit().putBoolean(PREFERENCES_IS_POLYGON_LIST_SYNCED,true).apply(); } /** * This method returns a boolean representing whether we have synced the list of polygons from the server * @param context The context to access the shared preferences * @return boolean representing if we have synced the data with the server. */ public static boolean hasThePolygonListSynced(Context context) { return PreferenceManager.getDefaultSharedPreferences(context).getBoolean(PREFERENCES_IS_POLYGON_LIST_SYNCED,false); } /** * This method saves the network state to shared preferences * @param context The context to access shared preferences * @param value The boolean value of whether we have internet connection or not */ public static void saveNetworkState(Context context,boolean value) { PreferenceManager.getDefaultSharedPreferences(context).edit().putBoolean(PREFERENCES_CONNECTIVITY_KEY,value).apply(); } /** * This method returns the network state. * @param context The context to access the shared preferences * @return boolean representing whether we have internet access or not */ public static boolean getNetworkState(Context context) { return PreferenceManager.getDefaultSharedPreferences(context).getBoolean(PREFERENCES_CONNECTIVITY_KEY,false); } /** * This method retrieves the user preferred units . Imperial or * Metric. * @param context The Context object to access the shared preferences * @return String representing the user preferred units */ public static String getUnitUserPreference(Context context) { return PreferenceManager.getDefaultSharedPreferences(context) .getString(PREFERENCES_UNITS_KEY,PREFERENCES_UNITS_IMPERIAL_VALUE); } /** * This method saves the units preference for the user into. * shared app preferences for later user .It can * take two values either Metric or Imperial. * @param context The Context object to access the preferences * @param units The String values of the units */ public static void saveUnitUserPreference(Context context,String units) { Log.i(DEBUG_TAG,"Setting the user preferred unit to = " + units); PreferenceManager.getDefaultSharedPreferences(context) .edit() .putString(Constants.PREFERENCES_UNITS_KEY,units) .apply(); } }
mkinsner/llvm
clang/test/Sema/builtins-memcpy-inline.cpp
// RUN: %clang_cc1 -fsyntax-only -verify %s #define NULL ((char *)0) #if __has_builtin(__builtin_memcpy_inline) #warning defined as expected // expected-warning@-1 {{defined as expected}} #endif void test_memcpy_inline_null_src(void *ptr) { __builtin_memcpy_inline(ptr, NULL, 4); // expected-warning {{null passed to a callee that requires a non-null argument}} } void test_memcpy_inline_null_dst(void *ptr) { __builtin_memcpy_inline(NULL, ptr, 4); // expected-warning {{null passed to a callee that requires a non-null argument}} } void test_memcpy_inline_null_buffers() { __builtin_memcpy_inline(NULL, NULL, 4); // expected-warning@-1 {{null passed to a callee that requires a non-null argument}} // expected-warning@-2 {{null passed to a callee that requires a non-null argument}} } void test_memcpy_inline_null_buffer_is_ok_if_size_is_zero(void *ptr) { __builtin_memcpy_inline(ptr, NULL, /*size */ 0); __builtin_memcpy_inline(NULL, ptr, /*size */ 0); __builtin_memcpy_inline(NULL, NULL, /*size */ 0); } void test_memcpy_inline_non_constant_size(void *dst, const void *src, unsigned size) { __builtin_memcpy_inline(dst, src, size); // expected-error {{argument to '__builtin_memcpy_inline' must be a constant integer}} } template <unsigned size> void test_memcpy_inline_template(void *dst, const void *src) { // we do not try to evaluate size in non intantiated templates. __builtin_memcpy_inline(dst, src, size); }
yihongmingfeng/OrderingS
OrderingS/CSCDCouponTimeLimitPanel.h
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "UIView.h" #import "CSCountDownPanelDelegate.h" @class BaseBizVc, CSCDLimitRangePanel, CSCountDownPanel, CSHtmLabel, CouponInfoData, NSObject<CSCDCouponTimeLimitPanelDelegate>, NSString, UILabel; @interface CSCDCouponTimeLimitPanel : UIView <CSCountDownPanelDelegate> { NSObject<CSCDCouponTimeLimitPanelDelegate> *_delegate; BaseBizVc *_nrVc; UILabel *_ctrlLimitTitle; CSCountDownPanel *_ctrlCountDownPanel; CSHtmLabel *_ctrlLimitHint; CSCDLimitRangePanel *_ctrlLimitRange; CouponInfoData *_dto; } @property(retain, nonatomic) CouponInfoData *dto; // @synthesize dto=_dto; @property(retain, nonatomic) CSCDLimitRangePanel *ctrlLimitRange; // @synthesize ctrlLimitRange=_ctrlLimitRange; @property(retain, nonatomic) CSHtmLabel *ctrlLimitHint; // @synthesize ctrlLimitHint=_ctrlLimitHint; @property(retain, nonatomic) CSCountDownPanel *ctrlCountDownPanel; // @synthesize ctrlCountDownPanel=_ctrlCountDownPanel; @property(retain, nonatomic) UILabel *ctrlLimitTitle; // @synthesize ctrlLimitTitle=_ctrlLimitTitle; @property BaseBizVc *nrVc; // @synthesize nrVc=_nrVc; @property NSObject<CSCDCouponTimeLimitPanelDelegate> *delegate; // @synthesize delegate=_delegate; - (void)onCountDownPanelCountToZero; - (void)refreshPanel:(id)arg1; - (void)reset; - (void)dealloc; - (id)initWithVc:(id)arg1; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
Sumitkk10/USACO-submissions
2015/February/Silver/Superbull.cpp
#include <bits/stdc++.h> #define fast ios_base::sync_with_stdio(0);cin.tie(NULL);cout.tie(NULL) #define ll long long int #define ld long double using namespace std; const int N = 1e6 + 5; const int MOD = 1e9 + 7; int n, a[N], component[N]; vector<pair<ll, pair<int, int> > > edges; int find(int x){ while(true){ if(x == component[x]) return x; component[x] = component[component[x]]; x = component[x]; } } void merge(int a, int b){ int u = find(a), v = find(b); component[u] = v; } ll MST(){ ll ans = 0; for(int i = 0; i < edges.size(); ++i){ int u = edges[i].second.first, v = edges[i].second.second; if(find(u) == find(v)) continue; ans += edges[i].first; merge(u, v); } return ans; } void solve(){ cin >> n; for(int i = 1; i <= n; ++i){ cin >> a[i]; component[i] = i; } for(int i = 1; i <= n; ++i) for(int j = i + 1; j <= n; ++j) edges.push_back({a[i] ^ a[j], {i, j}}); sort(edges.rbegin(), edges.rend()); cout << MST() << "\n"; } int main(){ fast; freopen("superbull.in", "r", stdin); freopen("superbull.out", "w", stdout); int t = 1; // cin >> t; while(t--) solve(); return 0; }
RobotLocomotion/drake-python3.7
examples/acrobot/test/acrobot_geometry_test.cc
<filename>examples/acrobot/test/acrobot_geometry_test.cc<gh_stars>1-10 #include "drake/examples/acrobot/acrobot_geometry.h" #include <gtest/gtest.h> #include "drake/examples/acrobot/acrobot_plant.h" #include "drake/geometry/scene_graph.h" #include "drake/systems/framework/diagram_builder.h" namespace drake { namespace examples { namespace acrobot { namespace { GTEST_TEST(AcrobotGeometryTest, AcceptanceTest) { // Just make sure nothing faults out. systems::DiagramBuilder<double> builder; auto plant = builder.AddSystem<AcrobotPlant>(); auto scene_graph = builder.AddSystem<geometry::SceneGraph>(); auto geom = AcrobotGeometry::AddToBuilder( &builder, plant->get_output_port(0), scene_graph); auto diagram = builder.Build(); ASSERT_NE(geom, nullptr); } } // namespace } // namespace acrobot } // namespace examples } // namespace drake
Ovlic/cowsay_py
cowsay/lib/cows/vader_koala.py
def Vader_koala(thoughts, eyes, eye, tongue): return f""" {thoughts} {thoughts} . .---. // Y|{eye} {eye}|Y// /_(i=i)K/ ~()~*~()~ (_)-(_) Darth Vader koala """
HotMoka/hotmoka
io-hotmoka-crypto/src/main/java/io/hotmoka/crypto/internal/AbstractSignatureAlgorithm.java
<gh_stars>1-10 /* Copyright 2021 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package io.hotmoka.crypto.internal; import java.nio.charset.StandardCharsets; import java.security.InvalidAlgorithmParameterException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.util.stream.Collectors; import org.bouncycastle.crypto.digests.SHA512Digest; import org.bouncycastle.crypto.generators.PKCS5S2ParametersGenerator; import org.bouncycastle.crypto.params.KeyParameter; import io.hotmoka.beans.InternalFailureException; import io.hotmoka.crypto.BIP39Dictionary; import io.hotmoka.crypto.SignatureAlgorithm; /** * Shared code of signature algorithms. */ abstract class AbstractSignatureAlgorithm<T> implements SignatureAlgorithm<T> { /** * Creates a key pair generator for this signature algorithm. * * @param random the generator of entropy to use for the key pair generator * @return the key pair generator */ protected abstract KeyPairGenerator mkKeyPairGenerator(SecureRandom random) throws NoSuchAlgorithmException, NoSuchProviderException, InvalidAlgorithmParameterException; @Override public KeyPair getKeyPair(byte[] entropy, BIP39Dictionary dictionary, String password) { // we create a random object that we use only once and always provides the seed SecureRandom random = new SecureRandom() { private final static long serialVersionUID = 1L; private final byte[] seed = mergeEntropyWithPassword(); @Override public void nextBytes(byte[] bytes) { // copy the seed into the requested bytes System.arraycopy(seed, 0, bytes, 0, bytes.length); } private byte[] mergeEntropyWithPassword() { var words = new BIP39WordsImpl(entropy, dictionary); String mnemonic = words.stream().collect(Collectors.joining(" ")); String salt = String.format("mnemonic%s", password); // 2048 iterations of the key-stretching algorithm PBKDF2 using HMAC-SHA512 PKCS5S2ParametersGenerator gen = new PKCS5S2ParametersGenerator(new SHA512Digest()); gen.init(mnemonic.getBytes(StandardCharsets.UTF_8), salt.getBytes(StandardCharsets.UTF_8), 2048); return ((KeyParameter) gen.generateDerivedParameters(512)).getKey(); } }; try { return mkKeyPairGenerator(random).generateKeyPair(); } catch (NoSuchProviderException | InvalidAlgorithmParameterException | NoSuchAlgorithmException e) { throw InternalFailureException.of("unexpected exception", e); } } @Override public KeyPair getKeyPair(byte[] entropy, String password) { return getKeyPair(entropy, BIP39Dictionary.ENGLISH_DICTIONARY, password); } @Override public byte[] encodingOf(PublicKey publicKey) { return publicKey.getEncoded(); } @Override public byte[] encodingOf(PrivateKey privateKey) { return privateKey.getEncoded(); } }
sohaniwso2/carbon-registry-1
components/registry/org.wso2.carbon.registry.social.impl/src/main/java/org/wso2/carbon/registry/social/impl/appdata/AppDataManagerImpl.java
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.registry.social.impl.appdata; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.social.api.SocialDataException; import org.wso2.carbon.registry.social.api.appdata.AppDataManager; import org.wso2.carbon.registry.social.impl.SocialImplConstants; import org.wso2.carbon.registry.social.impl.internal.SocialDSComponent; import org.wso2.carbon.registry.social.impl.people.relationship.RelationshipManagerImpl; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; /** * An implementation of the {@link org.wso2.carbon.registry.social.api.appdata.AppDataManager} interface * <p> * This implementation uses the {@link org.wso2.carbon.registry.core.Registry} to store app data * </p> * <p> * The app data is stored as a {@link org.wso2.carbon.registry.core.Registry} {@link org.wso2.carbon.registry.core.Resource} * The key-value pairs are stored as resource properties * </p> * <p> * <p/> * Each AppData key-value is grouped according to the appId of it * </p> * <p> * Resource path : /{AppData}/{appId}/[key][value] * </p> */ public class AppDataManagerImpl implements AppDataManager { private static Log log = LogFactory.getLog(AppDataManagerImpl.class); private Registry registry; /* Setting the Registry object */ public void setRegistry(Registry reg) { this.registry = reg; } /* The Registry object used throughout */ public Registry getRegistry() throws RegistryException { if (this.registry != null) { return this.registry; } else { return SocialDSComponent.getRegistry(); } } /** * Retrieves app data for the specified user list and group * * @param userIds A set of userIds whose app data to be retrieved * @param groupId The group * @param appId The app * @param fields The fields to return * @return A collection of appData for the given user list and group * @throws SocialDataException */ public Map<String, Map<String, String>> getPersonData(String[] userIds, String groupId, String appId, Set<String> fields) throws SocialDataException { List<String> userIdsToFetch = new ArrayList<String>(); // Check for groupId if (SocialImplConstants.GROUP_ID_FRIENDS.equals(groupId)) { for (String id : userIds) { String[] friendsList = new RelationshipManagerImpl().getRelationshipList(id); for (String friend : friendsList) { userIdsToFetch.add(friend); } } userIds = new String[userIdsToFetch.size()]; userIds = userIdsToFetch.toArray(userIds); } Map<String, Map<String, String>> personDataMap = new HashMap<String, Map<String, String>>(); for (String id : userIds) { try { Map<String, String> data = getAppData(id, appId, fields); if (data == null) { // log.error("No data found for the user " + id); return new HashMap<String, Map<String, String>>(); } personDataMap.put(id, data); } catch (RegistryException e) { log.error(e.getMessage(), e); throw new SocialDataException( "Error while retrieving app data with id " + appId + " for user " + id, e); } } return personDataMap; } /** * Deletes data for the specified user and group * * @param userId The userId of the person whose app data to be removed * @param groupId The group * @param appId The app * @param fields The fields to delete. Empty implies all * @throws SocialDataException */ public void deletePersonData(String userId, String groupId, String appId, Set<String> fields) throws SocialDataException { try { registry = getRegistry(); Resource appDataResource; String appDataResourcePath = SocialImplConstants.APP_DATA_REGISTRY_ROOT + SocialImplConstants.SEPARATOR + appId + SocialImplConstants.SEPARATOR + userId; if (registry.resourceExists(appDataResourcePath)) { appDataResource = registry.get(appDataResourcePath); for (String key : fields) { appDataResource.removeProperty(key); } registry.put(appDataResourcePath, appDataResource); } } catch (RegistryException e) { log.error(e.getMessage(), e); throw new SocialDataException( "Error while deleting app data with id " + appId + " for user " + userId, e); } } /** * Updates app data for the specified user and group with the new values * * @param userId The userId of the person whose app data to be modified * @param groupId The group * @param appId The app * @param fields The fields to update. Empty implies all * @param values The new values to set * @throws SocialDataException */ public void updatePersonData(String userId, String groupId, String appId, Set<String> fields, Map<String, String> values) throws SocialDataException { Set<String> fieldsToDelete = new TreeSet<String>(); // If a field is in the param list but not in the map, that means it is a delete // Retrieve the fields to delete for (String field : fields) { if (!values.containsKey(field)) { fieldsToDelete.add(field); } } List<String> userIdsToFetch = new ArrayList<String>(); // Check for groupId if (SocialImplConstants.GROUP_ID_FRIENDS.equals(groupId)) { String[] friendsList = new RelationshipManagerImpl().getRelationshipList(userId); for (String friend : friendsList) { userIdsToFetch.add(friend); } } else{ userIdsToFetch.add(userId); } for (String id : userIdsToFetch) { // Update the fields savePersonAppData(userId, appId, values, true); // Deletes the fields which are not in the Map but in the param list deletePersonData(userId, groupId, appId, fieldsToDelete); } } /** * Save app data for the specified user with the given values * * @param userId The userId of the person whose app data to be modified * @param appId The app * @param values The new values to set * @throws SocialDataException */ public void savePersonData(String userId, String appId, Map<String, String> values) throws SocialDataException { savePersonAppData(userId, appId, values, false); } /** * Adds/updates the Map of key-value pairs (appData) to the registry resource * * @param appDataResource The registry resource to add properties * @param values The Map of key-value appData * @param isUpdate true- if required to update the properties * false- if required to add the properties * @return The registry resource with the appData added as properties */ private Resource getAppDataAddedRegistryResource(Resource appDataResource, Map<String, String> values, boolean isUpdate) { for (Map.Entry<String, String> e : values.entrySet()) { /* for each key in the map */ if (e.getValue() != null) { /* if (isUpdate) {*/ String oldValue = appDataResource.getProperty(e.getKey()); if (oldValue != null) { appDataResource.editPropertyValue(e.getKey(), oldValue, e.getValue()); /* edit properties to the resource */ } /* } else {*/ else { appDataResource.addProperty(e.getKey(), e.getValue()); /* add properties to the resource */ } /* }*/ } } return appDataResource; } /** * Persists/update person Appdata as registry reource * * @param userId The id of the person to whom the appData belongs to * @param appId The id of the application to which the appData belongs to * @param values The appData key-value pairs * @param isUpdate True, if required to update the properties. Else false * @throws SocialDataException */ private void savePersonAppData(String userId, String appId, Map<String, String> values, boolean isUpdate) throws SocialDataException { try { registry = getRegistry(); Resource appDataResource; String appDataResourcePath = SocialImplConstants.APP_DATA_REGISTRY_ROOT + SocialImplConstants.SEPARATOR + appId + SocialImplConstants.SEPARATOR + userId; if (registry.resourceExists(appDataResourcePath)) { appDataResource = registry.get(appDataResourcePath); } else { appDataResource = registry.newCollection(); } appDataResource = getAppDataAddedRegistryResource(appDataResource, values, isUpdate); registry.put(appDataResourcePath, appDataResource); } catch (RegistryException e) { log.error(e.getMessage(), e); throw new SocialDataException( "Error while saving app data with id " + appId + " for user " + userId, e); } } /** * Fetches AppData for the given userId,appId,fields collection * * @param userId The id of the person to fetch the AppData * @param appId The appId to of the AppData to fetch * @param fields The fields of AppData to fetch * @return A Map<String,String> of AppData values * @throws RegistryException */ private Map<String, String> getAppData(String userId, String appId, Set<String> fields) throws RegistryException { Map<String, String> appDataMap = new HashMap<String, String>(); String appDataPath = SocialImplConstants.APP_DATA_REGISTRY_ROOT + SocialImplConstants.SEPARATOR + appId + SocialImplConstants.SEPARATOR + userId; Resource appDataResource; registry = getRegistry(); if (registry.resourceExists(appDataPath)) { appDataResource = registry.get(appDataPath); if (fields != null && fields.size()>0) { for (String key : fields) { String value; if ((value = appDataResource.getProperty(key)) != null) { appDataMap.put(key, value); } } } else { //Handle when fields is null -> get All properties // TODO: refactor code Properties props = appDataResource.getProperties(); for (Enumeration propKeys = props.keys(); propKeys.hasMoreElements();) { String key = propKeys.nextElement().toString(); String propValue=props.get(key).toString(); //TODO: Re-write this code appDataMap.put(key, propValue.substring(1,propValue.length()-1)); } } } else { return null; } return appDataMap; } }
sistanlp/twitter4food
src/test/scala/org/clulab/twitter4food/TestAnalyzeResults.scala
package org.clulab.twitter4food class LabelCount { var humans = 0 var orgs = 0 var unknowns = 0 def +=(b: LabelCount): LabelCount = { this.humans += b.humans this.orgs += b.orgs this.unknowns += b.unknowns this } def total = humans + orgs + unknowns } object TestAnalyzeResults { def main(args: Array[String]): Unit = { val OPT = "opt/usersMidrangePredictedLabels_" val counts: LabelCount = new LabelCount() for(i <- 0 to 15) { val fin = OPT + i + ".txt" val results = scala.io.Source.fromFile(fin).getLines .map(x => { val s = x.split("\t"); (s(0), s(1)) }) .toArray counts += results.foldLeft(new LabelCount()) { (c, r) => r._2 match { case "human" => c.humans += 1; c case "org" => c.orgs += 1; c case "unknown" => c.unknowns += 1; c } } } print(s"numHumans : ${counts.humans}\t") print(s"numOrgs : ${counts.orgs}\t") print(s"numUnknowns : ${counts.unknowns}\t") println(s"total : ${counts.total}") val out = new java.text.DecimalFormat("#.###") print(s"humans : ${out.format(counts.humans.toDouble/counts.total*100)}%\t") print(s"orgs : ${out.format(counts.orgs.toDouble/counts.total*100)}%\t") println(s"unknowns : ${out.format(counts.unknowns.toDouble/counts.total*100)}%") } }
isabella232/modite-adventure
src/GameState/player/GPlayer.cpp
#include "Game.h" #include <GameState/status/GStatProcess.h> #include "GPlayer.h" #include "Items.h" TUint32 GPlayer::mLevel; TUint32 GPlayer::mNextLevel; TUint32 GPlayer::mExperience; TInt16 GPlayer::mHitPoints; TInt16 GPlayer::mMaxHitPoints; TBool GPlayer::mInvulnerable; TInt GPlayer::mAttackStrength; TInt32 GPlayer::mManaPotion; TInt32 GPlayer::mMaxMana; TBool GPlayer::mGameOver; GInventoryList GPlayer::mInventoryList; GPlayerProcess *GPlayer::mProcess; GPlayerSprite *GPlayer::mSprite; GGameState *GPlayer::mGameState; GEquipped GPlayer::mEquipped; GBossProcess *GPlayer::mActiveBoss; GAnchorSprite *GPlayer::mTargeted; GAnchorSprite *GPlayer::mClosestEnemy; void GPlayer::WriteToStream(BMemoryStream &stream) { stream.Write(&mAttackStrength, sizeof(mAttackStrength)); stream.Write(&mLevel, sizeof(mLevel)); stream.Write(&mNextLevel, sizeof(mNextLevel)); stream.Write(&mExperience, sizeof(mExperience)); stream.Write(&mHitPoints, sizeof(mHitPoints)); // stream.Write(&mMaxMana, sizeof(mMaxMana)); stream.Write(&mMaxHitPoints, sizeof(mMaxHitPoints)); stream.Write(&mManaPotion, sizeof(mManaPotion)); mInventoryList.WriteToStream(stream); // Equipped stream.Write(&mEquipped.mAmuletElement, sizeof(mEquipped.mAmuletElement)); stream.Write(&mEquipped.mRingElement, sizeof(mEquipped.mRingElement)); stream.Write(&mEquipped.mSpellBookElement, sizeof(mEquipped.mSpellBookElement)); TUint16 v; v = mEquipped.mGloves ? mEquipped.mGloves->mItemNumber : 0; stream.Write(&v, sizeof(v)); v = mEquipped.mBoots ? mEquipped.mBoots->mItemNumber : 0; stream.Write(&v, sizeof(v)); v = mEquipped.mSword ? mEquipped.mSword->mItemNumber : 0; stream.Write(&v, sizeof(v)); } void GPlayer::ReadFromStream(BMemoryStream &stream) { stream.Read(&mAttackStrength, sizeof(mAttackStrength)); stream.Read(&mLevel, sizeof(mLevel)); stream.Read(&mNextLevel, sizeof(mNextLevel)); stream.Read(&mExperience, sizeof(mExperience)); stream.Read(&mHitPoints, sizeof(mHitPoints)); // stream.Read(&mManaPotion, sizeof(mManaPotion)); stream.Read(&mMaxHitPoints, sizeof(mMaxHitPoints)); stream.Read(&mMaxMana, sizeof(mMaxMana)); mInventoryList.ReadFromStream(stream); // Equipped stream.Read(&mEquipped.mAmuletElement, sizeof(mEquipped.mAmuletElement)); stream.Read(&mEquipped.mRingElement, sizeof(mEquipped.mRingElement)); stream.Read(&mEquipped.mSpellBookElement, sizeof(mEquipped.mSpellBookElement)); TUint16 v; stream.Read(&v, sizeof(v)); mEquipped.mGloves = mInventoryList.FindItem(v); stream.Read(&v, sizeof(v)); mEquipped.mBoots = mInventoryList.FindItem(v); stream.Read(&v, sizeof(v)); mEquipped.mSword = mInventoryList.FindItem(v); } void GPlayer::Dump() { printf("GPlayer\n"); printf("%-32.32s: %d,%d/%d\n", "mLevel,mNextLevel, mExperience", mLevel, mNextLevel, mExperience); printf("%-32.32s: %d,%d/%d\n", "mHitPoints, mMaxHitPoints, mAttackStrength", mHitPoints, mMaxHitPoints, mAttackStrength); printf("%-32.32s: %d\n", "mManaPotion", mManaPotion); mInventoryList.Dump(); } TUint16 GPlayer::GetSpellSlot() { switch (GPlayer::mEquipped.mSpellBookElement) { case ELEMENT_WATER: return SPELL_WATER_SLOT; case ELEMENT_FIRE: return SPELL_FIRE_SLOT; case ELEMENT_EARTH: return SPELL_EARTH_SLOT; case ELEMENT_ENERGY: return SPELL_ELECTRICITY_SLOT; default: Panic("Invalid spell"); } return 0; }
dornerworks/camkes
apps/epit/components/Driver/src/driver.c
/* * Copyright 2017, Data61 * Commonwealth Scientific and Industrial Research Organisation (CSIRO) * ABN 41 687 119 230. * * This software may be distributed and modified according to the terms of * the BSD 2-Clause license. Note that NO WARRANTY is provided. * See "LICENSE_BSD2.txt" for details. * * @TAG(DATA61_BSD) */ #include <autoconf.h> #include <camkes.h> #include <stdio.h> #define KZM_EPIT_BASE_ADDR (unsigned int)mem #define KZM_EPIT_CTRL_ADDR (KZM_EPIT_BASE_ADDR + 0x00) #define KZM_EPIT_STAT_ADDR (KZM_EPIT_BASE_ADDR + 0x04) #define KZM_EPIT_LOAD_ADDR (KZM_EPIT_BASE_ADDR + 0x08) #define KZM_EPIT_COMP_ADDR (KZM_EPIT_BASE_ADDR + 0x0C) #define KZM_EPIT_CNT_ADDR (KZM_EPIT_BASE_ADDR + 0x10) #define REG_VAL(x) *((volatile uint32_t *)(x)) #define CTRL_EN (1 << 0) /* EPIT enable */ #define CTRL_ENMOD (1 << 1) /* EPIT enable mode */ #define CTRL_OCIEN (1 << 2) /* EPIT interrupt enable */ #define CTRL_RLD (1 << 3) /* Counter reload control */ #define CTRL_SWR (1 << 17) /* Software reset */ #define CTRL_CLKSRC_SHIFT (24) /* Clock source */ #define CTRL_PRESCALE_SHIFT (4) /* Prescalar */ //#define IPG_CLK_KHZ (53200) /* Clock frequency in KHz */ #define IPG_CLK_KHZ (66000) /* Clock frequency in KHz */ #define CLKSRC_IPG (0x1) /* IPG clock */ #define CLKSRC_IPG_HIGH (0x2) /* IPG clock high frequency */ #define CLKSRC_IPG_32K (0x3) /* IPG 32K clock */ void epit_init() { printf("EPIT init\n"); REG_VAL(KZM_EPIT_CTRL_ADDR) = 0; /* Disable EPIT and reset. */ REG_VAL(KZM_EPIT_CTRL_ADDR) = CTRL_SWR; /* Select Clock source */ REG_VAL(KZM_EPIT_CTRL_ADDR) = (CLKSRC_IPG << CTRL_CLKSRC_SHIFT); /* Reload from load register */ REG_VAL(KZM_EPIT_CTRL_ADDR) |= (CTRL_RLD | CTRL_ENMOD); /* Enable interrupt */ REG_VAL(KZM_EPIT_CTRL_ADDR) |= CTRL_OCIEN; } /* Set interrupt interval, in milliseconds. */ void epit_set_interval(int interval) { REG_VAL(KZM_EPIT_LOAD_ADDR) = (IPG_CLK_KHZ * interval) ; REG_VAL(KZM_EPIT_COMP_ADDR) = 0; } void epit_start_timer(void) { REG_VAL(KZM_EPIT_STAT_ADDR) = 0x1; /* Enable timer */ REG_VAL(KZM_EPIT_CTRL_ADDR) |= CTRL_EN; } static int count = 0; void irq_handle(void) { /* Clear status bit. */ REG_VAL(KZM_EPIT_STAT_ADDR) = 0x1; irq_acknowledge(); printf("EPIT time out...%d\n", count++); } int run(void) { epit_init(); epit_set_interval(1000); epit_start_timer(); return 0; }
pborbas/commando
commando-remote/src/main/java/org/commando/remote/dispatcher/filter/circuit/CircuitBreakerState.java
<filename>commando-remote/src/main/java/org/commando/remote/dispatcher/filter/circuit/CircuitBreakerState.java<gh_stars>0 package org.commando.remote.dispatcher.filter.circuit; public enum CircuitBreakerState { CLOSED, OPEN, HALF_OPEN; }
ymaxgit/odp
platform/linux-generic/include/odp/api/sync.h
<reponame>ymaxgit/odp /* Copyright (c) 2013, Linaro Limited * All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ /** * @file * * ODP synchronisation */ #ifndef ODP_PLAT_SYNC_H_ #define ODP_PLAT_SYNC_H_ #ifdef __cplusplus extern "C" { #endif /** @ingroup odp_barrier * @{ */ #include <odp/api/plat/static_inline.h> #if ODP_ABI_COMPAT == 0 #include <odp/api/plat/sync_inlines.h> #endif /** * @} */ #include <odp/api/spec/sync.h> #ifdef __cplusplus } #endif #endif
oguzhand95/cerbos
internal/audit/conf_test.go
// Copyright 2021-2022 Zenauth Ltd. // SPDX-License-Identifier: Apache-2.0 package audit_test import ( "testing" "github.com/stretchr/testify/require" "github.com/cerbos/cerbos/internal/audit" "github.com/cerbos/cerbos/internal/config" ) func TestLenientConfigLoad(t *testing.T) { conf := map[string]any{ "audit": map[string]any{ "enabled": true, "backend": "local", "local": map[string]any{ "storagePath": t.TempDir(), }, "wibble": "wobble", }, } require.NoError(t, config.LoadMap(conf)) c := &audit.Conf{} err := config.GetSection(c) require.NoError(t, err) require.True(t, c.Enabled) require.Equal(t, "local", c.Backend) }
Fei-Lu/JavaSchool
src/xuebo/PersonDemo.java
<reponame>Fei-Lu/JavaSchool /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package xuebo; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; /** * * @author <NAME> */ public class PersonDemo { public static void tests () { int size = 100; String ss = "ABCDEFGHIGKLMNOPQRSTUVWXYZ"; List<Person> roster = new ArrayList(); for (int i = 0; i < size; i++) { StringBuilder sb = new StringBuilder(); sb.append(ss.charAt((int)(Math.random()*ss.length()))).append(ss.charAt((int)(Math.random()*ss.length()))).append(ss.charAt((int)(Math.random()*ss.length()))); String name = sb.toString(); int age = (int)(Math.random()*100); Person.Sex sex = Math.random()>0.5? Person.Sex.MALE : Person.Sex.FEMALE; roster.add(new Person (name, age, sex, name+"@<EMAIL>")); } printPersonsOlderThan (roster, 30); printPersonsWithinAgeRange (roster, 20, 30); printPersons(roster, new CheckPersonEligibleForSelectiveService()); printPersonsWithAnonymousClass (roster); printPersonsWithLambdaExpression (roster); printPersonsWithPredicateImpl(roster, new CheckPersonUtilFunctionForSelectiveService()); printPersonsWithPredicateFunction (roster); processPersionsWithTwoFunction (roster); processPersionsWithThreeFunction (roster); processPersionsWithAgregateFunction (roster); processPersionsWithAgregateFunctionMultiThreads (roster); } public static void processPersionsWithAgregateFunctionMultiThreads (List<Person> roster) { roster.parallelStream().filter(p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25) .map(p -> p.getEmailAddress()) .forEach(email -> System.out.println(email)); } public static void processPersionsWithAgregateFunction (List<Person> roster) { roster.stream().filter(p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25) .map(p -> p.getEmailAddress()) .forEach(email -> System.out.println(email)); } public static void processPersonsWithFunction(List<Person> roster, Predicate<Person> tester, Function<Person, String> mapper, Consumer<String> block) { for (Person p : roster) { if (tester.test(p)) { String data = mapper.apply(p); block.accept(data); } } } public static void processPersionsWithThreeFunction (List<Person> roster) { processPersonsWithFunction( roster, p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25, p -> p.getEmailAddress(), email -> System.out.println(email) ); } public static void processPersons(List<Person> roster, Predicate<Person> tester, Function<Person, String> mapper, Consumer<String> block) { for (Person p : roster) { if (tester.test(p)) { String data = mapper.apply(p); block.accept(data); } } } public static void processPersionsWithTwoFunction (List<Person> roster) { processPersons( roster, p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25, p -> p.printPerson() ); } public static void processPersons(List<Person> roster, Predicate<Person> tester, Consumer<Person> block) { for (Person p : roster) { if (tester.test(p)) { block.accept(p); } } } public static void printPersonsOlderThan(List<Person> roster, int age) { for (Person p : roster) { if (p.getAge() >= age) { p.printPerson(); } } } public static void printPersonsWithinAgeRange(List<Person> roster, int low, int high) { for (Person p : roster) { if (low <= p.getAge() && p.getAge() < high) { p.printPerson(); } } } public static void printPersons(List<Person> roster, CheckPerson tester) { for (Person p : roster) { if (tester.test(p)) { p.printPerson(); } } } public static void printPersonsWithAnonymousClass (List<Person> roster) { printPersons( roster, new CheckPerson() { @Override public boolean test(Person p) { return p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25; } } ); } public static void printPersonsWithLambdaExpression (List<Person> roster) { printPersons( roster, (Person p) -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25 ); } public static void printPersonsWithPredicateImpl(List<Person> roster, Predicate<Person> tester) { for (Person p : roster) { if (tester.test(p)) { p.printPerson(); } } } public static void printPersonsWithPredicateFunction (List<Person> roster) { printPersonsWithPredicateImpl (roster, p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25); //Why does the following work? // printPersons (roster, p -> p.getGender() == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25); } } class CheckPersonUtilFunctionForSelectiveService implements Predicate<Person> { @Override public boolean test(Person p) { return p.gender == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25; } } class CheckPersonEligibleForSelectiveService implements CheckPerson { @Override public boolean test(Person p) { return p.gender == Person.Sex.MALE && p.getAge() >= 18 && p.getAge() <= 25; } } interface CheckPerson { boolean test(Person p); } class Person { public enum Sex { MALE, FEMALE } String name; int age; Sex gender; String emailAddress; public Person (String name, int age, Sex gender, String emailAddress) { this.name = name; this.age = age; this.gender = gender; this.emailAddress = emailAddress; } public int getAge() { return age; } public void printPerson() { System.out.println("Name: " + name + ", Age: " + String.valueOf(age) + ", Gender: " + gender); } public Sex getGender() { return gender; } public String getEmailAddress () { return this.emailAddress; } } class LambdaCalculator { interface IntegerMath { int operation(int a, int b); } public int operateBinary(int a, int b, IntegerMath op) { return op.operation(a, b); } public static void main(String... args) { LambdaCalculator myApp = new LambdaCalculator(); IntegerMath addition = (a, b) -> a + b; IntegerMath subtraction = (a, b) -> a - b; System.out.println("40 + 2 = " + myApp.operateBinary(40, 2, addition)); System.out.println("20 - 10 = " + myApp.operateBinary(20, 10, subtraction)); } } class LambdaScopeClass { public int x = 0; class FirstLevel { public int x = 1; void methodInFirstLevel(int x) { Consumer<Integer> myConsumer = (y) -> { System.out.println("x = " + x); // Statement A System.out.println("y = " + y); System.out.println("this.x = " + this.x); System.out.println("LambdaScopeClass.this.x = " + LambdaScopeClass.this.x); }; myConsumer.accept(x); } } }
mmore500/dishtiny
include/dish2/peripheral/readable_state/introspective_state/raw_introspective_state/PhylogeneticRootView.hpp
#pragma once #ifndef DISH2_PERIPHERAL_READABLE_STATE_INTROSPECTIVE_STATE_RAW_INTROSPECTIVE_STATE_PHYLOGENETICROOTVIEW_HPP_INCLUDE #define DISH2_PERIPHERAL_READABLE_STATE_INTROSPECTIVE_STATE_RAW_INTROSPECTIVE_STATE_PHYLOGENETICROOTVIEW_HPP_INCLUDE #include "../../../../../../third-party/conduit/include/uitsl/datastructs/PodLeafNode.hpp" #include "../../../../../../third-party/conduit/include/uitsl/meta/TypeName.hpp" #include "../../../../../../third-party/signalgp-lite/include/sgpl/utility/CountingIterator.hpp" #include "../../../../config/cfg.hpp" namespace dish2 { /** * What is this cell's phylogenetic root ID? * * (Which initially-generated ancestor is this cell descended from?) */ struct PhylogeneticRootView : public uitsl::PodLeafNode< size_t > { // inherit constructors using parent_t = uitsl::PodLeafNode<size_t>; using parent_t::parent_t; }; } // namespace dish2 namespace uitsl { UITSL_ENABLE_TYPENAME( dish2::PhylogeneticRootView ); } // namespace uitsl #endif // #ifndef DISH2_PERIPHERAL_READABLE_STATE_INTROSPECTIVE_STATE_RAW_INTROSPECTIVE_STATE_PHYLOGENETICROOTVIEW_HPP_INCLUDE
mdejean/goodservice-v2
app/workers/route_processor_worker.rb
<gh_stars>1-10 class RouteProcessorWorker include Sidekiq::Worker sidekiq_options retry: false, queue: 'default' def perform(route_id, timestamp) marshaled_trips = RedisStore.route_trips(route_id, timestamp) trips = Marshal.load(marshaled_trips) if marshaled_trips if !trips raise "Error: Trips for #{route_id} at #{Time.zone.at(timestamp)} not found" end RouteProcessor.process_route(route_id, trips, timestamp) end end
Hivemapper/HM-colony-draco
point_cloud/point_cloud.h
// Copyright 2016 The Draco Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #ifndef DRACO_POINT_CLOUD_POINT_CLOUD_H_ #define DRACO_POINT_CLOUD_POINT_CLOUD_H_ #include "point_cloud/point_attribute.h" namespace draco { // PointCloud is a collection of n-dimensional points that are described by a // set of PointAttributes that can represent data such as positions or colors // of individual points (see point_attribute.h). class PointCloud { public: PointCloud(); virtual ~PointCloud() = default; // Returns the number of named attributes of a given type. int32_t NumNamedAttributes(GeometryAttribute::Type type) const; // Returns attribute id of the first named attribute with a given type or -1 // when the attribute is not used by the point cloud. int32_t GetNamedAttributeId(GeometryAttribute::Type type) const; // Returns the id of the i-th named attribute of a given type. int32_t GetNamedAttributeId(GeometryAttribute::Type type, int i) const; // Returns the first named attribute of a given type or nullptr if the // attribute is not used by the point cloud. const PointAttribute *GetNamedAttribute(GeometryAttribute::Type type) const; // Returns the i-th named attribute of a given type. const PointAttribute *GetNamedAttribute(GeometryAttribute::Type type, int i) const; // Returns the named attribute of a given custom id. const PointAttribute *GetNamedAttributeByCustomId( GeometryAttribute::Type type, uint16_t id) const; int32_t num_attributes() const { return attributes_.size(); } const PointAttribute *attribute(int32_t att_id) const { DCHECK_LE(0, att_id); DCHECK_LT(att_id, static_cast<int32_t>(attributes_.size())); return attributes_[att_id].get(); } // Returned attribute can be modified, but it's caller's responsibility to // maintain the attribute's consistency with draco::PointCloud. PointAttribute *attribute(int32_t att_id) { DCHECK_LE(0, att_id); DCHECK_LT(att_id, static_cast<int32_t>(attributes_.size())); return attributes_[att_id].get(); } // Adds a new attribute to the point cloud. // Returns the attribute id. int AddAttribute(std::unique_ptr<PointAttribute> pa); // Creates and adds a new attribute to the point cloud. The attribute has // properties derived from the provided GeometryAttribute |att|. // If |identity_mapping| is set to true, the attribute will use identity // mapping between point indices and attribute value indices (i.e., each point // has a unique attribute value). // If |identity_mapping| is false, the mapping between point indices and // attribute value indices is set to explicit, and it needs to be initialized // manually using the PointAttribute::SetPointMapEntry() method. // |num_attribute_values| can be used to specify the number of attribute // values that are going to be stored in the newly created attribute. // Returns attribute id of the newly created attribute. int AddAttribute(const GeometryAttribute &att, bool identity_mapping, AttributeValueIndex::ValueType num_attribute_values); // Assigns an attribute id to a given PointAttribute. If an attribute with the // same attribute id already exists, it is deleted. virtual void SetAttribute(int att_id, std::unique_ptr<PointAttribute> pa); // Deduplicates all attribute values (all attribute entries with the same // value are merged into a single entry). virtual bool DeduplicateAttributeValues(); // Removes duplicate point ids (two point ids are duplicate when all of their // attributes are mapped to the same entry ids). virtual void DeduplicatePointIds(); // Returns the number of n-dimensional points stored within the point cloud. size_t num_points() const { return num_points_; } // Sets the number of points. It's the caller's responsibility to ensure the // new number is valid with respect to the PointAttributes stored in the point // cloud. void set_num_points(PointIndex::ValueType num) { num_points_ = num; } protected: // Applies id mapping of deduplicated points (called by DeduplicatePointIds). virtual void ApplyPointIdDeduplication( const IndexTypeVector<PointIndex, PointIndex> &id_map, const std::vector<PointIndex> &unique_point_ids); private: // Attributes describing the point cloud. std::vector<std::unique_ptr<PointAttribute>> attributes_; // Ids of named attributes of the given type. std::vector<int32_t> named_attribute_index_[GeometryAttribute::NAMED_ATTRIBUTES_COUNT]; // The number of n-dimensional points. All point attribute values are stored // in corresponding PointAttribute instances in the |attributes_| array. PointIndex::ValueType num_points_; friend struct PointCloudHasher; }; // Functor for computing a hash from data stored within a point cloud. // Note that this can be quite slow. Two point clouds will have the same hash // only when all points have the same order and when all attribute values are // exactly the same. struct PointCloudHasher { size_t operator()(const PointCloud &pc) const { size_t hash = pc.num_points_; hash = HashCombine(pc.attributes_.size(), hash); for (int i = 0; i < GeometryAttribute::NAMED_ATTRIBUTES_COUNT; ++i) { hash = HashCombine(pc.named_attribute_index_[i].size(), hash); for (int j = 0; j < static_cast<int>(pc.named_attribute_index_[i].size()); ++j) { hash = HashCombine(pc.named_attribute_index_[i][j], hash); } } // Hash attributes. for (int i = 0; i < static_cast<int>(pc.attributes_.size()); ++i) { PointAttributeHasher att_hasher; hash = HashCombine(att_hasher(*pc.attributes_[i]), hash); } return hash; } }; } // namespace draco #endif // DRACO_POINT_CLOUD_POINT_CLOUD_H_
holajiawei/js-ipfs
packages/ipfs-message-port-client/test/interface.core.js
/* eslint-env mocha, browser */ 'use strict' const { createSuite } = require('interface-ipfs-core/src/utils/suite') exports.core = createSuite({ add: require('interface-ipfs-core/src/add'), cat: require('interface-ipfs-core/src/cat') })
istudko/pie
functions/filter_not.go
<filename>functions/filter_not.go package functions // FilterNot works the same as Filter, with a negated condition. That is, it will // return a new slice only containing the elements that returned false from the // condition. The returned slice may contain zero elements (nil). func (ss SliceType) FilterNot(condition func(ElementType) bool) (ss2 SliceType) { for _, s := range ss { if !condition(s) { ss2 = append(ss2, s) } } return }
yannzido/new
src/proxy/src/test/java/ee/ria/xroad/proxy/testsuite/testcases/CDATAMessage.java
/** * The MIT License * Copyright (c) 2018 Estonian Information System Authority (RIA), * Nordic Institute for Interoperability Solutions (NIIS), Population Register Centre (VRK) * Copyright (c) 2015-2017 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.proxy.testsuite.testcases; import ee.ria.xroad.common.message.SoapHeader; import ee.ria.xroad.common.message.SoapMessageImpl; import ee.ria.xroad.proxy.testsuite.Message; import ee.ria.xroad.proxy.testsuite.MessageTestCase; import javax.xml.namespace.QName; import javax.xml.soap.SOAPElement; /** * Client sends and receives a message with a CDATA block. * Result: all CDATA blocks remain intact. */ public class CDATAMessage extends MessageTestCase { private static final String CDATA = "<a><b><c>12345678</c></b></a>"; /** * Constructs the test case. */ public CDATAMessage() { requestFileName = "cdata.query"; responseFile = "cdata.answer"; } @Override protected void onServiceReceivedRequest(Message receivedRequest) throws Exception { validateCDATA(receivedRequest); } @Override protected void validateNormalResponse(Message receivedResponse) throws Exception { validateCDATA(receivedResponse); } private void validateCDATA(Message receivedResponse) throws Exception { SoapMessageImpl soap = (SoapMessageImpl) receivedResponse.getSoap(); SOAPElement cdata = ((SOAPElement)soap.getSoap().getSOAPHeader() .getChildElements(new QName(SoapHeader.NS_XROAD, "xml")) .next()); if (!cdata.getTextContent().equals(CDATA)) { throw new Exception("CDATA block is incorrect '" + cdata.getTextContent() + "', should be " + CDATA); } } }
xiaozhu36/alibaba-cloud-sdk-go
services/democenter/describe_demo_access_token.go
package democenter //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // DescribeDemoAccessToken invokes the democenter.DescribeDemoAccessToken API synchronously // api document: https://help.aliyun.com/api/democenter/describedemoaccesstoken.html func (client *Client) DescribeDemoAccessToken(request *DescribeDemoAccessTokenRequest) (response *DescribeDemoAccessTokenResponse, err error) { response = CreateDescribeDemoAccessTokenResponse() err = client.DoAction(request, response) return } // DescribeDemoAccessTokenWithChan invokes the democenter.DescribeDemoAccessToken API asynchronously // api document: https://help.aliyun.com/api/democenter/describedemoaccesstoken.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) DescribeDemoAccessTokenWithChan(request *DescribeDemoAccessTokenRequest) (<-chan *DescribeDemoAccessTokenResponse, <-chan error) { responseChan := make(chan *DescribeDemoAccessTokenResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.DescribeDemoAccessToken(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // DescribeDemoAccessTokenWithCallback invokes the democenter.DescribeDemoAccessToken API asynchronously // api document: https://help.aliyun.com/api/democenter/describedemoaccesstoken.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) DescribeDemoAccessTokenWithCallback(request *DescribeDemoAccessTokenRequest, callback func(response *DescribeDemoAccessTokenResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *DescribeDemoAccessTokenResponse var err error defer close(result) response, err = client.DescribeDemoAccessToken(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // DescribeDemoAccessTokenRequest is the request struct for api DescribeDemoAccessToken type DescribeDemoAccessTokenRequest struct { *requests.RpcRequest DemoAccessToken string `position:"Body" name:"DemoAccessToken"` } // DescribeDemoAccessTokenResponse is the response struct for api DescribeDemoAccessToken type DescribeDemoAccessTokenResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` DemoAccessToken string `json:"DemoAccessToken" xml:"DemoAccessToken"` ExpiredDate string `json:"ExpiredDate" xml:"ExpiredDate"` OpenUserId string `json:"OpenUserId" xml:"OpenUserId"` DemoTrialPage string `json:"DemoTrialPage" xml:"DemoTrialPage"` DemoDetailPage string `json:"DemoDetailPage" xml:"DemoDetailPage"` Edition string `json:"Edition" xml:"Edition"` } // CreateDescribeDemoAccessTokenRequest creates a request to invoke DescribeDemoAccessToken API func CreateDescribeDemoAccessTokenRequest() (request *DescribeDemoAccessTokenRequest) { request = &DescribeDemoAccessTokenRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("DemoCenter", "2020-01-21", "DescribeDemoAccessToken", "", "") return } // CreateDescribeDemoAccessTokenResponse creates a response to parse from DescribeDemoAccessToken response func CreateDescribeDemoAccessTokenResponse() (response *DescribeDemoAccessTokenResponse) { response = &DescribeDemoAccessTokenResponse{ BaseResponse: &responses.BaseResponse{}, } return }
mghgroup/Glide-Browser
content/browser/accessibility/browser_accessibility_state_impl.h
<gh_stars>0 // Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_BROWSER_ACCESSIBILITY_BROWSER_ACCESSIBILITY_STATE_IMPL_H_ #define CONTENT_BROWSER_ACCESSIBILITY_BROWSER_ACCESSIBILITY_STATE_IMPL_H_ #include <vector> #include "base/compiler_specific.h" #include "base/macros.h" #include "base/memory/singleton.h" #include "build/build_config.h" #include "components/metrics/metrics_provider.h" #include "content/public/browser/browser_accessibility_state.h" #include "ui/accessibility/ax_mode.h" #include "ui/accessibility/ax_mode_observer.h" #if defined(OS_WIN) #include <memory> #include "ui/gfx/win/singleton_hwnd_observer.h" #endif namespace content { // The BrowserAccessibilityState class is used to determine if Chrome should be // customized for users with assistive technology, such as screen readers. We // modify the behavior of certain user interfaces to provide a better experience // for screen reader users. The way we detect a screen reader program is // different for each platform. // // Screen Reader Detection // (1) On windows many screen reader detection mechinisms will give false // positives like relying on the SPI_GETSCREENREADER system parameter. In Chrome // we attempt to dynamically detect a MSAA client screen reader by calling // NotifiyWinEvent in NativeWidgetWin with a custom ID and wait to see if the ID // is requested by a subsequent call to WM_GETOBJECT. // (2) On mac we detect dynamically if VoiceOver is running. We rely upon the // undocumented accessibility attribute @"AXEnhancedUserInterface" which is set // when VoiceOver is launched and unset when VoiceOver is closed. This is an // improvement over reading defaults preference values (which has no callback // mechanism). class CONTENT_EXPORT BrowserAccessibilityStateImpl : public base::RefCountedThreadSafe<BrowserAccessibilityStateImpl>, public BrowserAccessibilityState, public ui::AXModeObserver { public: BrowserAccessibilityStateImpl(); static BrowserAccessibilityStateImpl* GetInstance(); void EnableAccessibility() override; void DisableAccessibility() override; bool IsRendererAccessibilityEnabled() override; ui::AXMode GetAccessibilityMode() override; void AddAccessibilityModeFlags(ui::AXMode mode) override; void RemoveAccessibilityModeFlags(ui::AXMode mode) override; void ResetAccessibilityMode() override; void OnScreenReaderDetected() override; bool IsAccessibleBrowser() override; void AddUIThreadHistogramCallback(base::OnceClosure callback) override; void AddOtherThreadHistogramCallback(base::OnceClosure callback) override; void UpdateHistogramsForTesting() override; void SetCaretBrowsingState(bool enabled) override; // Returns whether caret browsing is enabled for the most recently // used profile. bool IsCaretBrowsingEnabled() const; // AXModeObserver void OnAXModeAdded(ui::AXMode mode) override; // Fire frequent metrics signals to ensure users keeping browser open multiple // days are counted each day, not only at launch. This is necessary, because // UMA only aggregates uniques on a daily basis, void UpdateUniqueUserHistograms(); // Accessibility objects can have the "hot tracked" state set when // the mouse is hovering over them, but this makes tests flaky because // the test behaves differently when the mouse happens to be over an // element. This is a global switch to not use the "hot tracked" state // in a test. void set_disable_hot_tracking_for_testing(bool disable_hot_tracking) { disable_hot_tracking_ = disable_hot_tracking; } bool disable_hot_tracking_for_testing() const { return disable_hot_tracking_; } private: friend class base::RefCountedThreadSafe<BrowserAccessibilityStateImpl>; friend struct base::DefaultSingletonTraits<BrowserAccessibilityStateImpl>; // Resets accessibility_mode_ to the default value. void ResetAccessibilityModeValue(); // Called a short while after startup to allow time for the accessibility // state to be determined. Updates histograms with the current state. // Two variants - one for things that must be run on the UI thread, and // another that can be run on another thread. void UpdateHistogramsOnUIThread(); void UpdateHistogramsOnOtherThread(); // Leaky singleton, destructor generally won't be called. ~BrowserAccessibilityStateImpl() override; void PlatformInitialize(); void UpdatePlatformSpecificHistogramsOnUIThread(); void UpdatePlatformSpecificHistogramsOnOtherThread(); ui::AXMode accessibility_mode_; std::vector<base::OnceClosure> ui_thread_histogram_callbacks_; std::vector<base::OnceClosure> other_thread_histogram_callbacks_; bool disable_hot_tracking_; // Keeps track of whether caret browsing is enabled for the most // recently used profile. bool caret_browsing_enabled_ = false; #if defined(OS_WIN) // Only used on Windows std::unique_ptr<gfx::SingletonHwndObserver> singleton_hwnd_observer_; #endif DISALLOW_COPY_AND_ASSIGN(BrowserAccessibilityStateImpl); }; } // namespace content #endif // CONTENT_BROWSER_ACCESSIBILITY_BROWSER_ACCESSIBILITY_STATE_IMPL_H_
jasonTangxd/clockwork
clockwork-common/src/main/java/com/creditease/adx/clockwork/common/entity/RelationFatherChildren.java
package com.creditease.adx.clockwork.common.entity; /** * @ Author :XuanDongTang * @ Date :Created in 下午7:51 2020/12/6 * @ Description: * @ Modified By: */ public class RelationFatherChildren { private Integer fatherTaskId; private String taskIds; public Integer getFatherTaskId() { return fatherTaskId; } public void setFatherTaskId(Integer fatherTaskId) { this.fatherTaskId = fatherTaskId; } public String getTaskIds() { return taskIds; } public void setTaskIds(String taskIds) { this.taskIds = taskIds; } }
OfficeGlobal/msgraph-sdk-java
src/main/java/com/microsoft/graph/requests/extensions/IUserWithReferenceRequest.java
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.requests.extensions; import com.microsoft.graph.concurrency.*; import com.microsoft.graph.core.*; import com.microsoft.graph.models.extensions.*; import com.microsoft.graph.models.generated.*; import com.microsoft.graph.http.*; import com.microsoft.graph.requests.extensions.*; import com.microsoft.graph.options.*; import com.microsoft.graph.serializer.*; import java.util.Arrays; import java.util.EnumSet; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The interface for the User With Reference Request. */ public interface IUserWithReferenceRequest extends IHttpRequest { void post(final User newUser, final IJsonBackedObject payload, final ICallback<User> callback); User post(final User newUser, final IJsonBackedObject payload) throws ClientException; void get(final ICallback<User> callback); User get() throws ClientException; void delete(final ICallback<User> callback); void delete() throws ClientException; void patch(final User sourceUser, final ICallback<User> callback); User patch(final User sourceUser) throws ClientException; IUserWithReferenceRequest select(final String value); IUserWithReferenceRequest expand(final String value); }
feiwu7457/jipaipai
xcxzhibozujian/pages/wxpay/wxpay.js
<gh_stars>0 // pages/wxpay/wxpay.js Page({ /** * 页面的初始数据 */ data: { }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { var that = this; //页面加载调取微信支付(原则上应该对options的携带的参数进行校验) that.requestPayment(options); }, //根据 obj 的参数请求wx 支付 requestPayment: function (obj) { //console.log(obj); //调起微信支付 wx.requestPayment({ //相关支付参数 'timeStamp': obj.timestamp, 'nonceStr': obj.nonceStr, 'package': 'prepay_id=' + obj.prepay_id, 'signType': obj.signType, 'paySign': obj.paySign, //小程序微信支付成功的回调通知 'success': function (res) { console.log('ok'); // 错误信息 wx.navigateBack(); }, //小程序支付失败的回调通知 'fail': function (res) { if (res.errMsg === "requestPayment:fail cancel") { // 用户取消支付 wx.navigateBack(); return } if (res.errMsg === "requestPayment:fail") { console.log(res.err_desc) // 错误信息 wx.navigateBack(); return } } }) }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 生命周期函数--监听页面显示 */ onShow: function () { }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { }, /** * 页面相关事件处理函数--监听用户下拉动作 */ onPullDownRefresh: function () { }, /** * 页面上拉触底事件的处理函数 */ onReachBottom: function () { }, /** * 用户点击右上角分享 */ onShareAppMessage: function () { } })
businesscode/BCD-UI
Server/src/main/java/de/businesscode/bcdui/web/wrs/ExportServlet.java
<reponame>businesscode/BCD-UI<gh_stars>0 /* Copyright 2010-2019 BusinessCode GmbH, Germany Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package de.businesscode.bcdui.web.wrs; import java.util.Set; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.apache.shiro.SecurityUtils; import de.businesscode.bcdui.subjectsettings.SecurityHelper; /** * Servlet providing the base class for ExcelExportServlet, SylkServlet and CsvServlet. * For now, the class handles the maxRowsDefault parameter that's common to all these servlets. * It provides method getMaxRows that can be overwritten for a custom logic on the export rows limit. */ public class ExportServlet extends HttpServlet { private static final long serialVersionUID = 1L; private Logger log = LogManager.getLogger(getClass()); protected int maxRowsDefault = 30000; // Default @Override public void init(ServletConfig config) throws ServletException { super.init(config); try { maxRowsDefault = Integer.parseInt(config.getInitParameter("MaxRows")); } catch(Exception e) { if( config.getInitParameter("MaxRows") != null ) { log.warn("Servlet init parameter 'MaxRows' for "+getServletName()+" could not be parsed"); } } log.info("Using "+maxRowsDefault+" for export MaxRows, unless overwritten on user level."); } /** * Returns the maxRows value from subject setting bcdExport:maxRows * @param defValue fallback default value * @return either the given default value or the value coming from subjectSettings (if it's a valid integer) */ public int getMaxRows( HttpServletRequest request, int defValue) { int maxRows = defValue; try { if (SecurityUtils.getSubject() != null && SecurityUtils.getSubject().isAuthenticated()) { Set<String> perms = SecurityHelper.getPermissions(SecurityUtils.getSubject(), getMaxRowsUserPermissionType()); maxRows = perms.iterator().hasNext() ? Integer.parseInt(perms.iterator().next()) : defValue; } } catch (Exception e) { // ignore and return defValue } return maxRows; } protected String getMaxRowsUserPermissionType() { return "bcdExport:maxRows"; } }
Justineo/vue-awesome-material
icons/local_library_two_tone.js
import Icon from 'vue-awesome/components/Icon' Icon.register({ local_library_two_tone: { raw: '<path fill="none" d="M0 0h24v24H0V0z"/><path opacity=".3" d="M19 17.13v-6.95c-2.1.38-4.05 1.35-5.64 2.83L12 14.28l-1.36-1.27A11.18 11.18 0 005 10.18v6.95c2.53.34 4.94 1.3 7 2.83 2.07-1.52 4.47-2.49 7-2.83z"/><circle opacity=".3" cx="12" cy="5" r="2"/><path d="M16 5c0-2.21-1.79-4-4-4S8 2.79 8 5s1.79 4 4 4 4-1.79 4-4zm-6 0c0-1.1.9-2 2-2s2 .9 2 2-.9 2-2 2-2-.9-2-2z"/><path d="M3 19c3.48 0 6.64 1.35 9 3.55 2.36-2.19 5.52-3.55 9-3.55V8c-3.48 0-6.64 1.35-9 3.55C9.64 9.35 6.48 8 3 8v11zm2-8.82c2.1.38 4.05 1.35 5.64 2.83L12 14.28l1.36-1.27A11.18 11.18 0 0119 10.18v6.95c-2.53.34-4.93 1.3-7 2.82a15.2 15.2 0 00-7-2.83v-6.94z"/>', width: '24', height: '24' } })
Codernob/problem-solving-codes
code/uva/10370.cpp
<gh_stars>0 #include <iostream> using namespace std; int main() { int i,j,t,n,sum,aa,a[1000]; double avg; cin>>t; for(i=1;i<=t;i++) { cin>>n; sum=0; for(j=0;j<n;j++) { cin>>a[j]; sum=sum+a[j]; } avg=((double)sum)/n; aa=0; for(j=0;j<n;j++) { if(a[j]>avg) aa++; } printf("%.3lf%%\n",((double)aa/n)*100.0); } return 0; }
zqn1996-alan/talkback
utils/src/main/java/com/google/android/accessibility/utils/StringBuilderUtils.java
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.accessibility.utils; import android.text.SpannableStringBuilder; import android.text.TextUtils; import java.util.Arrays; import java.util.List; import org.checkerframework.checker.nullness.qual.Nullable; /** Frequently used functions for concatenating text. */ public class StringBuilderUtils { private static final String TAG = "StringBuildingUtils"; //////////////////////////////////////////////////////////////////////////////////////// // Constants /** * Breaking separator inserted between text, intended to make TTS pause an appropriate amount. * Using a period breaks pronunciation of street abbreviations, and using a new line doesn't work * in eSpeak. */ public static final String DEFAULT_BREAKING_SEPARATOR = ", "; /** * Non-breaking separator inserted between text. Used when text already ends with some sort of * breaking separator or non-alphanumeric character. */ public static final String DEFAULT_SEPARATOR = " "; /** The hex alphabet. */ private static final char[] HEX_ALPHABET = "0123456789abcdef".toCharArray(); //////////////////////////////////////////////////////////////////////////////////////// // Methods public static String repeatChar(char c, int times) { char[] chars = new char[times]; Arrays.fill(chars, c); return new String(chars); } /** Return labeled field-value, only if field-value is not null. */ public static String optionalField(String fieldName, @Nullable Object fieldValue) { return (fieldValue == null) ? "" : String.format("%s=%s", fieldName, fieldValue.toString()); } /** Return labeled delimited field-value, only if field-value is not null. */ public static String optionalSubObj(String fieldName, @Nullable Object fieldValue) { return (fieldValue == null) ? "" : String.format("%s= %s", fieldName, fieldValue.toString()); } /** Return labeled quoted field-value, only if field-value is not null. */ public static String optionalText(String fieldName, @Nullable CharSequence fieldValue) { return (fieldValue == null) ? "" : String.format("%s=\"%s\"", fieldName, fieldValue); } /** Return labeled field-value, only if field-value is not default. */ public static String optionalInt(String fieldName, int fieldValue, int defaultValue) { return (fieldValue == defaultValue) ? "" : String.format("%s=%s", fieldName, fieldValue); } /** Return labeled field-value, only if field-value is not default. */ public static String optionalInt(String fieldName, long fieldValue, long defaultValue) { return (fieldValue == defaultValue) ? "" : String.format("%s=%s", fieldName, fieldValue); } /** Return field-tag, only if field-value is true. */ public static String optionalTag(String tagName, boolean tagValue) { return tagValue ? tagName : ""; } public static String joinFields(String... strings) { StringBuilder builder = new StringBuilder(); for (String s : strings) { if (s != null && !s.equals("")) { builder.append(s); builder.append(" "); } } return builder.toString(); } /** * Generates the aggregate text from a list of {@link CharSequence}s, separating as necessary. * * @param textList The list of text to process. * @return The separated aggregate text, or null if no text was appended. */ public static @Nullable CharSequence getAggregateText(List<CharSequence> textList) { if (textList == null || textList.isEmpty()) { return null; } else { SpannableStringBuilder builder = new SpannableStringBuilder(); for (CharSequence text : textList) { appendWithSeparator(builder, text); } return builder; } } /** * Appends CharSequence representations of the specified arguments to a {@link * SpannableStringBuilder}, creating one if the supplied builder is {@code null}. A separator will * be inserted between each of the arguments. * * @param builder An existing {@link SpannableStringBuilder}, or {@code null} to create one. * @param args The objects to append to the builder. * @return A builder with the specified objects appended. */ public static SpannableStringBuilder appendWithSeparator( SpannableStringBuilder builder, CharSequence... args) { if (builder == null) { builder = new SpannableStringBuilder(); } for (CharSequence arg : args) { if (arg == null) { continue; } if (arg.toString().length() == 0) { continue; } if (builder.length() > 0) { if (needsBreakingSeparator(builder)) { builder.append(DEFAULT_BREAKING_SEPARATOR); } else { builder.append(DEFAULT_SEPARATOR); } } builder.append(arg); } return builder; } /** * Appends CharSequence representations of the specified arguments to a {@link * SpannableStringBuilder}, creating one if the supplied builder is {@code null}. A separator will * be inserted before the first non-{@code null} argument, but additional separators will not be * inserted between the following elements. * * @param builder An existing {@link SpannableStringBuilder}, or {@code null} to create one. * @param args The objects to append to the builder. * @return A builder with the specified objects appended. */ public static SpannableStringBuilder append( SpannableStringBuilder builder, CharSequence... args) { if (builder == null) { builder = new SpannableStringBuilder(); } boolean didAppend = false; for (CharSequence arg : args) { if (arg == null) { continue; } if (arg.toString().length() == 0) { continue; } if (builder.length() > 0) { if (!didAppend && needsBreakingSeparator(builder)) { builder.append(DEFAULT_BREAKING_SEPARATOR); } else { builder.append(DEFAULT_SEPARATOR); } } builder.append(arg); didAppend = true; } return builder; } /** * Returns whether the text needs a breaking separator (e.g. a period followed by a space) * appended before more text is appended. * * <p>If text ends with a letter or digit (according to the current locale) then this method will * return {@code true}. */ private static boolean needsBreakingSeparator(CharSequence text) { return !TextUtils.isEmpty(text) && Character.isLetterOrDigit(text.charAt(text.length() - 1)); } /** * Convert a byte array to a hex-encoded string. * * @param bytes The byte array of data to convert * @return The hex encoding of {@code bytes}, or null if {@code bytes} was null */ public static @Nullable String bytesToHexString(byte[] bytes) { if (bytes == null) { return null; } final StringBuilder hex = new StringBuilder(bytes.length * 2); int nibble1; int nibble2; for (byte b : bytes) { nibble1 = (b >>> 4) & 0xf; nibble2 = b & 0xf; hex.append(HEX_ALPHABET[nibble1]); hex.append(HEX_ALPHABET[nibble2]); } return hex.toString(); } }
ronaldoWang/myjww
jww-ump/jww-ump-server/src/main/java/com/jww/ump/server/controller/SysRoleController.java
<reponame>ronaldoWang/myjww package com.jww.ump.server.controller; import com.jww.common.core.Constants; import com.jww.common.core.exception.BusinessException; import com.jww.common.core.model.PageModel; import com.jww.common.web.BaseController; import com.jww.common.web.model.ResultModel; import com.jww.common.web.util.ResultUtil; import com.jww.ump.model.SysDeptModel; import com.jww.ump.model.SysRoleModel; import com.jww.ump.rpc.api.SysDeptService; import com.jww.ump.rpc.api.SysRoleService; import com.jww.ump.server.annotation.SysLogOpt; import cn.hutool.core.lang.Assert; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiOperation; import org.apache.shiro.authz.annotation.RequiresPermissions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; import java.util.List; /** * <p> * 角色信息表 前端控制器 * </p> * * @author wanyong * @since 2017-11-17 */ @RestController @RequestMapping("/role") @Api(value = "角色管理", description = "角色管理") public class SysRoleController extends BaseController { @Autowired private SysRoleService sysRoleService; //@Autowired //private SysDeptService sysDeptService; /** * 根据角色ID查询角色 * * @param roleId * @return ResultModel<SysRoleModel> * @author wanyong * @date 2017-12-05 13:35 */ @ApiOperation(value = "查询角色", notes = "根据角色主键ID查询角色") @ApiImplicitParam(name = "id", value = "角色ID", required = true, dataType = "Long") @PostMapping("/query") @RequiresPermissions("sys:role:read") public ResultModel query(@RequestBody Long roleId) { Assert.notNull(roleId); SysRoleModel sysRoleModel = sysRoleService.queryById(roleId); //SysDeptModel sysDeptModel = sysDeptService.queryById(sysRoleModel.getDeptId()); //sysRoleModel.setDeptName(sysDeptModel.getDeptName()); return ResultUtil.ok(sysRoleModel); } /** * 分页查询角色列表 * * @param pageModel 分页实体 * @return ResultModel * @author wanyong * @date 2018-01-04 11:25 */ @ApiOperation(value = "分页查询角色列表", notes = "根据分页参数查询角色列表") @PostMapping("/listPage") @RequiresPermissions("sys:role:read") public ResultModel queryListPage(@RequestBody PageModel pageModel) { return ResultUtil.ok(sysRoleService.queryListPage(pageModel)); } /** * 新增角色 * * @param sysRoleModel 角色实体 * @return ResultModel * @author wanyong * @date 2018-01-04 11:26 */ @ApiOperation(value = "新增角色", notes = "根据角色实体新增角色") @PostMapping("/add") @RequiresPermissions("sys:role:add") @SysLogOpt(module = "角色管理", value = "角色新增", operationType = Constants.LogOptEnum.ADD) public ResultModel add(@Valid @RequestBody SysRoleModel sysRoleModel) { sysRoleModel.setCreateBy(super.getCurrentUserId()); sysRoleModel.setUpdateBy(super.getCurrentUserId()); return ResultUtil.ok(sysRoleService.add(sysRoleModel)); } /** * 修改角色 * * @param sysRoleModel 角色实体 * @return ResultModel * @author wanyong * @date 2018-01-04 11:27 */ @ApiOperation(value = "修改角色", notes = "根据角色ID修改角色") @PostMapping("/modify") @RequiresPermissions("sys:role:update") @SysLogOpt(module = "角色管理", value = "角色修改", operationType = Constants.LogOptEnum.MODIFY) public ResultModel modify(@Valid @RequestBody SysRoleModel sysRoleModel) { sysRoleModel.setUpdateBy(super.getCurrentUserId()); sysRoleService.modifyById(sysRoleModel); return ResultUtil.ok(); } /** * 根据角色ID集合批量删除 * * @param ids 角色ID集合 * @return ResultModel * @author wanyong * @date 2017-12-23 02:46 */ @ApiOperation(value = "批量删除角色", notes = "根据主键ID集合批量删除角色") @PostMapping("/delBatchByIds") @RequiresPermissions("sys:role:delete") @SysLogOpt(module = "角色管理", value = "角色批量删除", operationType = Constants.LogOptEnum.DELETE) public ResultModel delBatchByIds(@RequestBody List<Long> ids) { if (ids.size() == 0) { throw new BusinessException("角色ID集合不能为空"); } return ResultUtil.ok(sysRoleService.deleteBatchIds(ids)); } /** * 根据部门ID查询所属角色 * * @return ResultModel * @author wanyong * @date 2018-01-04 11:28 */ @ApiOperation(value = "查询角色", notes = "查询所属角色列表") @GetMapping("/queryRoles") @RequiresPermissions("sys:role:read") public ResultModel queryRoles() { List<SysRoleModel> list = sysRoleService.queryRoles(); return ResultUtil.ok(list); } }
aniwange33/lamisplus-main
src/main/webapp/reducers/userReducer.js
import * as USERTYPES from '../actions/types' const userReducer = (state = { list:[] ,status: 0, user: {} }, action) => { switch(action.type){ case USERTYPES.FETCH_USERS: return {...state, user:action.payload} case USERTYPES.REGISTER_REQUEST: return { ...state, status: action.payload } case USERTYPES.FETCH_USER_BY_ID: return {...state, user:action.payload} case USERTYPES.USERS_ERROR: return {...state, errors:action.payload} default: return state } } export default userReducer
kovitikus/hecate
characters/inventory_handler.py
<filename>characters/inventory_handler.py from evennia.utils.evtable import EvTable from misc import coin class InventoryHandler(): def __init__(self, owner): self.owner = owner self.inventory_contents = owner.contents self.hands_desc = owner.db.hands_desc def get_object(self, obj, container, owner_possess): #TODO: Allow auto-stow of items in hand and continue to pick up obj if more than one exists in location. owner = self.owner main_wield, off_wield, both_wield = owner.db.wielding.values() main_hand, off_hand = owner.db.hands.values() use_main_hand, use_off_hand = False, False owner_msg = '' others_msg = '' if obj in [main_hand, off_hand]: owner.msg(f"You are already carrying {obj.name}.") return # calling at_before_get hook method if not obj.at_before_get(owner): return # TODO: Check for free inventory slots. # inventory_dict = dict(owner.db.inventory) # if inventory_dict['occupied_slots'] < inventory_dict['max_slots']: # free_slot = True # All restriction checks passed, move the object to the owner. obj.move_to(owner, quiet=True, move_hooks=False) #---------------------------[Hand Logic]---------------------------# # Items should prefer an open hand first and foremost. # When both hands are occupied, but neither are wielding, prefer the dominate hand. # Offhand should be used as a last resort, as this is a shield #------------------------------------------------------------------# if both_wield is not None: use_main_hand = False use_off_hand = True owner_msg = f"You stop wielding {both_wield.name}." others_msg = f"{owner.name} stops wielding {both_wield.name}." owner.db.wielding['both'] = None if main_hand == None: use_main_hand = True elif off_hand == None: use_off_hand = True else: # Both hands are full. if main_wield and off_wield is not None: # Both hands are wielding, prefer main hand to preserve shield. use_main_hand = True owner_msg = f"You stop wielding and stow away {main_hand.name}." others_msg = f"{owner.name} stops wielding and stows away {main_hand.name}." owner.db.wielding['main'] = None owner.db.hands['main'] = None elif main_wield == None: use_main_hand = True owner.db.wielding['main'] = None owner_msg = f"You stow away {main_hand.name} into your inventory." others_msg = f"{owner.name} stows away {main_hand.name} into their inventory." owner.db.hands['main'] = None elif off_wield == None: use_off_hand = True owner.db.wielding['off'] = None owner_msg = f"You stow away {off_hand.name} into your inventory." others_msg = f"{owner.name} stows away {off_hand.name} into their inventory." owner.db.hands['off'] = None if use_main_hand: owner.db.hands['main'] = obj elif use_off_hand: owner.db.hands['off'] = obj # TODO: Add an extra occupied inventory slot count. # owner.db.inventory_slots['occupied_slots'] +=1 # Determine the nature of the object's origin. owner_msg = f"{owner_msg}\nYou get {obj.name}" others_msg = f"{others_msg}\n{owner.name} gets {obj.name}" if owner_possess: owner_msg = f"{owner_msg} from your inventory" others_msg = f"{others_msg} from their inventory" if container is not None: owner_msg = f"{owner_msg} from {container.name}" others_msg = f"{others_msg} from {container.name}" owner_msg = f"{owner_msg}." others_msg = f"{others_msg}." # Send out messages. owner.msg(owner_msg) owner.location.msg_contents(others_msg, exclude=owner) # calling at_get hook method obj.at_get(owner) def stow_object(self, item): owner = self.owner main_hand, off_hand = self.get_hands() main_wield, off_wield, both_wield = owner.equip.get_wielding() if item in [main_hand, off_hand]: # If the stowed object is currently wielded, stop wielding it and stow it. if item in [main_wield, off_wield, both_wield]: # The item is wielded, stop wielding it. owner.equip.stop_wielding(item) # Item is simply in the hands, send basic stow message. for looker in owner.location.contents: if looker == owner: owner.msg(f"You stow away {item.name}.") else: looker.msg(f"{owner.get_display_name(looker)} stows away " f"{item.get_display_name(looker)}.") # Hands need to be cleared, no matter if it was wielded or held. if item == off_hand: owner.db.hands['off'] = None else: owner.db.hands['main'] = None elif item.location == owner.location: for looker in owner.location.contents: if looker == owner: looker.msg(f"You pick up {item.get_display_name(looker)} and stow it away.") else: looker.msg(f"{owner.get_display_name(looker)} picks up " f"{item.get_display_name(looker)} and stows it away.") item.move_to(owner, quiet=True) # calling at_get hook method item.at_get(owner) def drop_object(self, obj): owner = self.owner main_hand, off_hand = owner.db.hands.values() wielding = owner.db.wielding main_wield, off_wield, both_wield = wielding.values() # Call the object script's at_before_drop() method. if not obj.at_before_drop(owner): return if obj in [main_hand, off_hand]: # If the object is currently wielded, stop wielding it and drop it. if obj in [main_wield, off_wield, both_wield]: owner.msg(f"You stop wielding {obj.name} and drop it.") owner.location.msg_contents(f"{owner.name} stops wielding {obj.name} and drops it.", exclude=owner) if off_wield: wielding['off'] = None else: wielding['main'], wielding['both'] = None, None else: owner.msg(f"You drop {obj.name}.") owner.location.msg_contents(f"{owner.name} drops {obj.name}.", exclude=owner) if obj == off_hand: owner.db.hands['off'] = None else: owner.db.hands['main'] = None elif obj.location == owner: owner.msg(f"You pull {obj.name} from your inventory and drop it on the ground.") owner.location.msg_contents(f"{owner.name} pulls {obj.name} from their inventory and drops it on the ground.", exclude=owner) elif obj.location == owner.location: owner.msg(f"{obj.name} is already on the ground.") return obj.move_to(owner.location, quiet=True) # Call the object script's at_drop() method. obj.at_drop(owner) def force_item_into_hand(self, item, hand=None): """ This method is used to determine how to force an object into the character's main hand. Arguments: item (object, string): This is the item that is to go into the main hand. If a string is provided, this method will attempt to search the character's inventory and the character's location for a matching object. Keyword Arguments: hand (string): The hand that the item is to be assigned to. If this is None, the hand will default to main_hand. Returns: (None): If the item arg is a string and the search for a matching object has failed, this method will instead return None. """ owner = self.owner if hand is None or hand not in ['main', 'off']: hand = 'main' if isinstance(item, str): found_item = owner.search(item, quiet=True) if found_item: item = found_item[0] else: return None hand_obj = self.get_hands(hand=hand) hand_wield = owner.equip.get_wielding(hand=hand) for looker in owner.location.contents: if hand_obj is not None: if hand_wield is not None: if looker == owner: looker.msg(f"You stop wielding {hand_obj.get_display_name(looker)}") else: looker.msg(f"{owner.get_display_name(looker)} stops wielding " f"{hand_obj.get_display_name(looker)}.") if looker == owner: looker.msg(f"You stow away {hand_obj.get_display_name(looker)}.") else: looker.msg(f"{owner.get_display_name(looker)} stows away " f"{hand_obj.get_display_name(looker)}.") if item.location == owner: # Item is in the character's inventory. if looker == owner: looker.msg(f"You get {item.get_display_name(looker)} from your inventory.") else: looker.msg(f"{owner.get_display_name(looker)} gets {item.get_display_name(looker)} from " "their inventory.") elif item.location == owner.location: # Item is in the room. if looker == owner: looker.msg(f"You pick up {item.get_display_name(looker)} from the ground.") else: looker.msg(f"{owner.get_display_name(looker)} picks up " f"{item.get_display_name(looker)} from the ground.") else: # Item is in some other container. if looker == owner: looker.msg(f"You get {item.get_display_name(looker)} from " f"{item.location.get_display_name(looker)}.") else: looker.msg(f"{owner.get_display_name(looker)} gets {item.get_display_name(looker)} " f"from {item.location.get_display_name(looker)}") # Move the object and assign it a hand. if item.location != owner: item.move_to(owner, quiet=True) self.set_hands(hand, item=item) def get_hands(self, hand=None): """ Returns the requested wielded status of the owner, based on the kwarg provided. """ if self.hands is None: self.hands = self.owner.attributes.get('hands') if hand is None: return self.hands.values() else: return self.hands.get(hand) def set_hands(self, hand, item=None): self.hands[hand] = item self._save_hands() def _save_hands(self): self.owner.db.hands = self.hands def inhand(self): owner = self.owner main_wield, off_wield, both_wield = owner.db.wielding.values() main_hand, off_hand = self.hands.values() main_desc, off_desc = self.hands_desc.values() if off_hand: off_item = off_hand.name else: off_item = 'nothing' if main_hand: main_item = main_hand.name else: main_item = 'nothing' if not off_hand and not main_hand: owner.msg(f"Your hands are empty.") return if off_wield and not main_wield: owner.msg(f"You are holding {main_item} in your {main_desc} hand and wielding {off_item} in your {off_desc} hand .") elif main_wield and not off_wield: owner.msg(f"You are wielding {main_item} in your {main_desc} hand and holding {off_item} in your {off_desc} hand.") elif off_wield and main_wield: owner.msg(f"You are wielding {main_item} in your {main_desc} hand and {off_item} in your {off_desc} hand.") elif both_wield: owner.msg(f"You are wielding {both_wield.name} in both hands.") else: owner.msg(f"You are holding {main_item} in your {main_desc} hand and {off_item} in your {off_desc} hand.") def get_inventory(self, arg_type): owner = self.owner items = owner.contents main_hand, off_hand = owner.db.hands.values() equip_items = owner.db.equipment.values() # Remove hands and append all other items to a new list. filtered_items = [] for i in items: if i not in [main_hand, off_hand]: if i not in equip_items: filtered_items.append(i) if not filtered_items: string = "Your inventory is empty." else: # if arg_type == 0: # Generate summary # Count the number of items in the inventory. # Show the maximum number of inventory slots. # Show each category that has an item and how many items are in the category # Show items in hands. # Show currency. if arg_type == 1: table = EvTable(border="header") string = self.get_all_items(filtered_items, table) else: final_list = self.get_inv_final_list(filtered_items, arg_type) table = EvTable(border="header") for item in final_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") category_string = self.get_category_string(arg_type) string = f"|wYou are carrying:\n{category_string}\n{table}" # Add currency string = f"{string}\n{coin.all_coin_types_to_string(owner.db.coin)}" owner.msg(string) def get_inv_final_list(self, filtered_items, arg_type): final_list = [] for item in filtered_items: if arg_type == 2 and item.tags.get('favorite'): final_list.append(item) elif arg_type == 3 and item.tags.get('weapon'): final_list.append(item) elif arg_type == 4 and item.tags.get('armor'): final_list.append(item) elif arg_type == 5 and item.tags.get('clothing'): final_list.append(item) elif arg_type == 6 and item.tags.get('container'): final_list.append(item) elif arg_type == 7 and item.tags.get('jewelry'): final_list.append(item) elif arg_type == 8 and item.tags.get('relic'): final_list.append(item) elif arg_type == 9 and item.tags.get('consumable'): final_list.append(item) elif arg_type == 10 and item.tags.get('quest'): final_list.append(item) elif arg_type == 11 and item.tags.get('craft'): final_list.append(item) elif arg_type == 12: final_list.append(item) return final_list def get_category_string(self, arg_type): if arg_type == 0: category_string = '|cSummary:|n' elif arg_type == 1: category_string = '|cAll Items:|n' elif arg_type == 2: category_string = '|cFavorites:|n' elif arg_type == 3: category_string = '|cWeapons:|n' elif arg_type == 4: category_string = '|cArmor:|n' elif arg_type == 5: category_string = '|cClothing:|n' elif arg_type == 6: category_string = '|cContainers:|n' elif arg_type == 7: category_string = '|cJewelry:|n' elif arg_type == 8: category_string = '|cRelics:|n' elif arg_type == 9: category_string = '|cConsumables:|n' elif arg_type == 10: category_string = '|cQuest Items:|n' elif arg_type == 11: category_string = '|cCrafting Materials:|n' elif arg_type == 12: category_string = '|cMisc.|n' return category_string def get_all_items(self, filtered_items, table): fav_list = [] weap_list = [] arm_list = [] cloth_list = [] contain_list = [] jewel_list = [] relic_list = [] consume_list = [] quest_list = [] craft_list = [] misc_list = [] # Sort all items based on category into appropriate lists. for item in filtered_items: if item.tags.get('favorite'): fav_list.append(item) elif item.tags.get('weapon'): weap_list.append(item) elif item.tags.get('armor'): arm_list.append(item) elif item.tags.get('clothing'): cloth_list.append(item) elif item.tags.get('container'): contain_list.append(item) elif item.tags.get('jewelry'): jewel_list.append(item) elif item.tags.get('relic'): relic_list.append(item) elif item.tags.get('consumable'): consume_list.append(item) elif item.tags.get('quest'): quest_list.append(item) elif item.tags.get('craft'): craft_list.append(item) else: misc_list.append(item) # Generate table rows for each populated list based on category. if fav_list: category_string = self.get_category_string(2) table.add_row(f"{category_string}") for item in fav_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if weap_list: category_string = self.get_category_string(3) table.add_row(f"{category_string}") for item in weap_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if arm_list: category_string = self.get_category_string(4) table.add_row(f"{category_string}") for item in arm_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if cloth_list: category_string = self.get_category_string(5) table.add_row(f"{category_string}") for item in cloth_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if contain_list: category_string = self.get_category_string(6) table.add_row(f"{category_string}") for item in contain_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if jewel_list: category_string = self.get_category_string(7) table.add_row(f"{category_string}") for item in jewel_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if relic_list: category_string = self.get_category_string(8) table.add_row(f"{category_string}") for item in relic_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if consume_list: category_string = self.get_category_string(9) table.add_row(f"{category_string}") for item in consume_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if quest_list: category_string = self.get_category_string(10) table.add_row(f"{category_string}") for item in quest_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if craft_list: category_string = self.get_category_string(11) table.add_row(f"{category_string}") for item in craft_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") if misc_list: category_string = self.get_category_string(12) table.add_row(f"{category_string}") for item in misc_list: table.add_row(f"|C{item.name}|n {item.db.desc or ''}") string = f"|wYou are carrying:\n{table}" return string
hmuhtetpaing/keybase
shared/wallets/send-form/participants/container.js
<filename>shared/wallets/send-form/participants/container.js // @flow import Participants from '.' import * as RouteTree from '../../../actions/route-tree' import * as SearchGen from '../../../actions/search-gen' import * as WalletsGen from '../../../actions/wallets-gen' import * as TrackerGen from '../../../actions/tracker-gen' import {getAccount, getAccountIDs, searchKey} from '../../../constants/wallets' import {stringToAccountID} from '../../../constants/types/wallets' import {compose, connect, setDisplayName, type TypedState, type Dispatch} from '../../../util/container' const mapStateToProps = (state: TypedState) => { const build = state.wallets.buildingPayment const built = state.wallets.builtPayment const allAccounts = getAccountIDs(state) .map(accountID => { const account = getAccount(state, accountID) return { contents: account.balanceDescription, id: account.accountID, name: account.name || account.accountID, } }) .toArray() let fromAccount let toAccount if (build.recipientType === 'otherAccount') { const fromAccountFromState = getAccount(state, stringToAccountID(build.from)) fromAccount = { contents: fromAccountFromState.balanceDescription, id: fromAccountFromState.accountID, name: fromAccountFromState.name || fromAccountFromState.accountID, } if (build.to) { const toAccountFromState = getAccount(state, stringToAccountID(build.to)) toAccount = { contents: toAccountFromState.balanceDescription, id: toAccountFromState.accountID, name: toAccountFromState.name || toAccountFromState.accountID, } } } // Building section const recipientType = build.recipientType || 'keybaseUser' const toFieldInput = build.to // Built section const incorrect = built.toErrMsg const recipientUsername = built.toUsername return { allAccounts, fromAccount, incorrect, recipientType, recipientUsername, toAccount, toFieldInput, user: state.config.username, } } const mapDispatchToProps = (dispatch: Dispatch) => ({ onChangeFromAccount: (from: string) => { dispatch(WalletsGen.createSetBuildingFrom({from})) }, onChangeRecipient: (to: string) => { dispatch(WalletsGen.createSetBuildingTo({to})) }, onCreateNewAccount: () => dispatch( RouteTree.navigateAppend([ { props: {backButton: true}, selected: 'createNewAccount', }, ]) ), onLinkAccount: () => dispatch( RouteTree.navigateAppend([ { props: {backButton: true}, selected: 'linkExisting', }, ]) ), onRemoveProfile: () => dispatch(WalletsGen.createSetBuildingTo({to: ''})), onShowProfile: (username: string) => { dispatch(TrackerGen.createGetProfile({forceDisplay: true, ignoreCache: true, username})) }, onShowSuggestions: () => dispatch(SearchGen.createSearchSuggestions({searchKey})), }) export default compose( connect(mapStateToProps, mapDispatchToProps, (s, d, o) => ({...o, ...s, ...d})), setDisplayName('Participants') )(Participants)
projectPiki/pikmin2
src/Dolphin/NMWException.cp
/* * --INFO-- * Address: ........ * Size: 0000BC */ void __destroy_new_array3(void) { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000080 */ void __destroy_new_array2(void) { // UNUSED FUNCTION } /* * --INFO-- * Address: 800C1748 * Size: 00007C */ void __destroy_new_array(void) { /* .loc_0x0: stwu r1, -0x20(r1) mflr r0 stw r0, 0x24(r1) stmw r26, 0x8(r1) mr. r26, r3 mr r27, r4 beq- .loc_0x68 cmplwi r27, 0 beq- .loc_0x60 lwz r29, -0x10(r26) li r31, 0 lwz r30, -0xC(r26) mullw r0, r29, r30 add r28, r26, r0 b .loc_0x58 .loc_0x3C: sub r28, r28, r29 mr r12, r27 mr r3, r28 li r4, -0x1 mtctr r12 bctrl addi r31, r31, 0x1 .loc_0x58: cmplw r31, r30 blt+ .loc_0x3C .loc_0x60: subi r3, r26, 0x10 bl -0x9D6D4 .loc_0x68: lmw r26, 0x8(r1) lwz r0, 0x24(r1) mtlr r0 addi r1, r1, 0x20 blr */ } /* * --INFO-- * Address: 800C17C4 * Size: 000078 */ void __destroy_arr(void) { /* .loc_0x0: stwu r1, -0x20(r1) mflr r0 stw r0, 0x24(r1) stw r31, 0x1C(r1) stw r30, 0x18(r1) mr r30, r6 stw r29, 0x14(r1) mr r29, r5 mullw r0, r29, r30 stw r28, 0x10(r1) mr r28, r4 add r31, r3, r0 b .loc_0x50 .loc_0x34: sub r31, r31, r29 mr r12, r28 mr r3, r31 li r4, -0x1 mtctr r12 bctrl subi r30, r30, 0x1 .loc_0x50: cmplwi r30, 0 bne+ .loc_0x34 lwz r0, 0x24(r1) lwz r31, 0x1C(r1) lwz r30, 0x18(r1) lwz r29, 0x14(r1) lwz r28, 0x10(r1) mtlr r0 addi r1, r1, 0x20 blr */ } /* * --INFO-- * Address: 800C183C * Size: 0000FC */ void __construct_array(void) { /* .loc_0x0: stwu r1, -0x30(r1) mflr r0 stw r0, 0x34(r1) li r0, 0 stw r31, 0x2C(r1) mr r31, r3 stw r30, 0x28(r1) mr r30, r7 stw r29, 0x24(r1) mr r29, r6 stw r30, 0x18(r1) stw r28, 0x20(r1) mr r28, r4 stw r3, 0x8(r1) stw r29, 0xC(r1) stw r30, 0x10(r1) stw r5, 0x14(r1) stw r0, 0x18(r1) b .loc_0x70 .loc_0x4C: mr r12, r28 mr r3, r31 li r4, 0x1 mtctr r12 bctrl lwz r3, 0x18(r1) add r31, r31, r29 addi r0, r3, 0x1 stw r0, 0x18(r1) .loc_0x70: lwz r4, 0x18(r1) cmplw r4, r30 blt+ .loc_0x4C lwz r0, 0x10(r1) cmplw r4, r0 bge- .loc_0xDC lwz r0, 0x14(r1) cmplwi r0, 0 beq- .loc_0xDC lwz r0, 0xC(r1) lwz r3, 0x8(r1) mullw r0, r0, r4 add r31, r3, r0 b .loc_0xD0 .loc_0xA8: lwz r0, 0xC(r1) li r4, -0x1 lwz r12, 0x14(r1) sub r31, r31, r0 mr r3, r31 mtctr r12 bctrl lwz r3, 0x18(r1) subi r0, r3, 0x1 stw r0, 0x18(r1) .loc_0xD0: lwz r0, 0x18(r1) cmplwi r0, 0 bne+ .loc_0xA8 .loc_0xDC: lwz r0, 0x34(r1) lwz r31, 0x2C(r1) lwz r30, 0x28(r1) lwz r29, 0x24(r1) lwz r28, 0x20(r1) mtlr r0 addi r1, r1, 0x30 blr */ } /* * --INFO-- * Address: 800C1938 * Size: 0000B8 */ void __partial_array_destructor::~__partial_array_destructor() { /* .loc_0x0: stwu r1, -0x20(r1) mflr r0 stw r0, 0x24(r1) stw r31, 0x1C(r1) stw r30, 0x18(r1) mr r30, r4 stw r29, 0x14(r1) mr. r29, r3 beq- .loc_0x98 lwz r4, 0x10(r29) lwz r0, 0x8(r29) cmplw r4, r0 bge- .loc_0x88 lwz r0, 0xC(r29) cmplwi r0, 0 beq- .loc_0x88 lwz r0, 0x4(r29) lwz r3, 0x0(r29) mullw r0, r0, r4 add r31, r3, r0 b .loc_0x7C .loc_0x54: lwz r0, 0x4(r29) li r4, -0x1 lwz r12, 0xC(r29) sub r31, r31, r0 mr r3, r31 mtctr r12 bctrl lwz r3, 0x10(r29) subi r0, r3, 0x1 stw r0, 0x10(r29) .loc_0x7C: lwz r0, 0x10(r29) cmplwi r0, 0 bne+ .loc_0x54 .loc_0x88: extsh. r0, r30 ble- .loc_0x98 mr r3, r29 bl -0x9D918 .loc_0x98: lwz r0, 0x24(r1) mr r3, r29 lwz r31, 0x1C(r1) lwz r30, 0x18(r1) lwz r29, 0x14(r1) mtlr r0 addi r1, r1, 0x20 blr */ } /* * --INFO-- * Address: 800C19F0 * Size: 000104 */ void __construct_new_array(void) { /* .loc_0x0: stwu r1, -0x40(r1) mflr r0 stw r0, 0x44(r1) stmw r27, 0x2C(r1) mr. r30, r3 mr r27, r4 mr r28, r6 mr r29, r7 beq- .loc_0xEC stw r28, 0x0(r30) cmplwi r27, 0 stw r29, 0x4(r30) addi r30, r30, 0x10 beq- .loc_0xEC stw r29, 0x18(r1) li r0, 0 mr r31, r30 stw r30, 0x8(r1) stw r28, 0xC(r1) stw r29, 0x10(r1) stw r5, 0x14(r1) stw r0, 0x18(r1) b .loc_0x80 .loc_0x5C: mr r12, r27 mr r3, r31 li r4, 0x1 mtctr r12 bctrl lwz r3, 0x18(r1) add r31, r31, r28 addi r0, r3, 0x1 stw r0, 0x18(r1) .loc_0x80: lwz r4, 0x18(r1) cmplw r4, r29 blt+ .loc_0x5C lwz r0, 0x10(r1) cmplw r4, r0 bge- .loc_0xEC lwz r0, 0x14(r1) cmplwi r0, 0 beq- .loc_0xEC lwz r0, 0xC(r1) lwz r3, 0x8(r1) mullw r0, r0, r4 add r31, r3, r0 b .loc_0xE0 .loc_0xB8: lwz r0, 0xC(r1) li r4, -0x1 lwz r12, 0x14(r1) sub r31, r31, r0 mr r3, r31 mtctr r12 bctrl lwz r3, 0x18(r1) subi r0, r3, 0x1 stw r0, 0x18(r1) .loc_0xE0: lwz r0, 0x18(r1) cmplwi r0, 0 bne+ .loc_0xB8 .loc_0xEC: mr r3, r30 lmw r27, 0x2C(r1) lwz r0, 0x44(r1) mtlr r0 addi r1, r1, 0x40 blr */ } /* * --INFO-- * Address: ........ * Size: 00022C */ void __throw_catch_compare(void) { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000028 */ void std::unexpected() { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000010 */ void std::set_unexpected(void (*) ()) { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000028 */ void std::terminate() { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000010 */ void std::set_terminate(void (*) ()) { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000028 */ void std::duhandler() { // UNUSED FUNCTION } /* * --INFO-- * Address: ........ * Size: 000020 */ void std::dthandler() { // UNUSED FUNCTION }
philippzhang/leetcodeLearnJava
src/main/java/com/learn/java/leetcode/lc0233/Solution.java
<reponame>philippzhang/leetcodeLearnJava package com.learn.java.leetcode.lc0233; public class Solution { /** 《编程之美》上这样说: 设N = abcde ,其中abcde分别为十进制中各位上的数字。 如果要计算百位上1出现的次数,它要受到3方面的影响:百位上的数字,百位以下(低位)的数字,百位以上(高位)的数字。 如果百位上数字为0,百位上可能出现1的次数由更高位决定。比如:12013,则可以知道百位出现1的情况可能是: 100~199,1100~1199,2100~2199,,...,11100~11199,一共1200个。可以看出是由更高位数字(12)决定, 并且等于更高位数字(12)乘以 当前位数(100)。注意:高位数字不包括当前位 如果百位上数字为1,百位上可能出现1的次数不仅受更高位影响还受低位影响。比如:12113,则可以知道百位受高位影响出现的情况是: 100~199,1100~1199,2100~2199,,....,11100~11199,一共1200个。和上面情况一样,并且等于更高位数字(12)乘以 当前位数(100)。但同时它还受低位影响,百位出现1的情况是:12100~12113,一共14个,等于低位数字(13)+1。 注意:低位数字不包括当前数字 如果百位上数字大于1(2~9),则百位上出现1的情况仅由更高位决定,比如12213,则百位出现1的情况是: 100~199,1100~1199,2100~2199,...,11100~11199,12100~12199,一共有1300个,并且等于更高位数字+1(12+1) 乘以当前位数(100) */ public int countDigitOne(int n) { if (n < 1) { return 0; } int len = getLenOfNum(n); if (len == 1) { return 1; } int tmp = (int) Math.pow(10, len - 1); // 获取n的最高位数字 int first = n / tmp; // 获取n的最高位为1时有多少个数字 int firstOneNum = first == 1 ? n % tmp + 1 : tmp; // 在介于n % tmp到n之间的数字中,除了最高位为1,其余各个数字分别为1 的总数和 int otherOneNUm = first * (len - 1) * (tmp / 10); return firstOneNum + otherOneNUm + countDigitOne(n % tmp); } private int getLenOfNum(int n) { int len = 0; while (n != 0) { len++; n /= 10; } return len; } }
bluebackblue/brownie
source/project_test/common/common_sound_streamcallback.h
#pragma once /** * Copyright (c) blueback * Released under the MIT License * https://github.com/bluebackblue/brownie/blob/master/LICENSE.txt * http://bbbproject.sakura.ne.jp/wordpress/mitlicense * @brief コモン。ストリーミング再生。 */ /** include */ #pragma warning(push) #pragma warning(disable:4464) #include "../include.h" #pragma warning(pop) /** NTest::NCommon */ #if(BSYS_DSOUND_ENABLE) #pragma warning(push) #pragma warning(disable:4514 4710 4820) namespace NTest{namespace NCommon { /** SoundStreamCallback_Ogg */ class SoundStreamCallback_Ogg : public NBsys::NDsound::Dsound_StreamCallback_Base { private: /** lockobject */ LockObject lockobject; /** ogg_file */ sharedptr<NBsys::NFile::File_Object> ogg_file; /** stream */ sharedptr<NBsys::NWave::Wave_Stream> stream; /** playend */ bool playend; public: /** constructor */ SoundStreamCallback_Ogg(const sharedptr<NBsys::NFile::File_Object>& a_ogg_file) : //lockobject(), ogg_file(a_ogg_file), stream() { } /** destructor */ virtual ~SoundStreamCallback_Ogg() { } public: /** 初期化。 */ virtual NBsys::NWave::WaveType::Id Callback_Initialize() { AutoLock t_autolock(this->lockobject); this->stream = NBsys::NWave::CreateStream_Ogg(this->ogg_file->GetLoadData(),static_cast<s32>(this->ogg_file->GetLoadSize())); s32 t_channel = this->stream->GetChannel(); s32 t_bit = this->stream->GetBit(); s32 t_rate = this->stream->GetRate(); NBsys::NWave::WaveType::Id t_wavetype = NBsys::NWave::WaveType::None; if(t_rate == 44100){ if(t_bit == 8){ if(t_channel == 1){ t_wavetype = NBsys::NWave::WaveType::Mono_8_44100; }else if(t_channel == 2){ t_wavetype = NBsys::NWave::WaveType::Stereo_8_44100; } }else if(t_bit == 16){ if(t_channel == 1){ t_wavetype = NBsys::NWave::WaveType::Mono_16_44100; }else if(t_channel == 2){ t_wavetype = NBsys::NWave::WaveType::Stereo_16_44100; } } } return t_wavetype; } /** 再生。 */ virtual void Callback_Play() { this->stream->SeekStart(); this->playend = false; } /** データ取得。 */ virtual void Callback_GetData(RingBufferBase<u8>& a_buffer,s32 a_need_size,bool a_is_loop) { AutoLock t_autolock(this->lockobject); while(a_buffer.GetUseSize() < a_need_size){ bool t_ret = this->stream->Stream(a_buffer,a_is_loop); if(t_ret == false){ //終端。 this->playend = true; //残りの必要サイズ。 s32 t_need_size = a_need_size - a_buffer.GetUseSize(); s32 t_free_size = a_buffer.GetContinuousFreeSize(); if(t_free_size > t_need_size){ t_free_size = t_need_size; } if(t_free_size > 0){ NMemory::Set(a_buffer.GetItemFromFreeList(0),0,t_free_size); a_buffer.AddUse(t_free_size); } } } } /** 再生終了チェック。 */ virtual bool Callback_IsPlayEnd() { return this->playend; } }; }} #pragma warning(pop) #endif
bjhanfeng/qpid-jms-amqp-0-x
client/src/main/java/org/apache/qpid/transport/SessionHeader.java
package org.apache.qpid.transport; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.util.LinkedHashMap; import java.util.Map; import org.apache.qpid.transport.codec.Decoder; import org.apache.qpid.transport.codec.Encoder; public final class SessionHeader extends Struct { public static final int TYPE = -1; public final int getStructType() { return TYPE; } public final int getSizeWidth() { return 1; } public final int getPackWidth() { return 1; } public final boolean hasPayload() { return false; } public final byte getEncodedTrack() { return -1; } public final boolean isConnectionControl() { return false; } private byte packing_flags = 0; public SessionHeader() {} public SessionHeader(Option ... _options) { for (int i=0; i < _options.length; i++) { switch (_options[i]) { case SYNC: packing_flags |= 1; break; case NONE: break; default: throw new IllegalArgumentException("invalid option: " + _options[i]); } } } public final boolean hasSync() { return (packing_flags & 1) != 0; } public final SessionHeader clearSync() { packing_flags &= ~1; setDirty(true); return this; } public final boolean getSync() { return hasSync(); } public final SessionHeader setSync(boolean value) { if (value) { packing_flags |= 1; } else { packing_flags &= ~1; } setDirty(true); return this; } public final SessionHeader sync(boolean value) { return setSync(value); } public void write(Encoder enc) { enc.writeUint8(packing_flags); } public void read(Decoder dec) { packing_flags = (byte) dec.readUint8(); } public Map<String,Object> getFields() { Map<String,Object> result = new LinkedHashMap<String,Object>(); if ((packing_flags & 1) != 0) { result.put("sync", getSync()); } return result; } }
PuQian/WebRTC
common-utils/src/test/java/com/free4lab/utils/account/AccountUtilTest.java
package com.free4lab.utils.account; public class AccountUtilTest { public static void main(String[] args){ String accessToken = "deb71e9d5e23488b941134507cb450ae"; String result = AccountUtil.getAccessTokenInfo(accessToken); System.out.println(result); } }
shiva92/Contests
Topcoder/Solved/LastDigit.cpp
#include <vector> #include <iostream> #include <string> #include <vector> #include <algorithm> #include <cstdlib> #include <cstdio> #define vi vector<int> #define all(v) v.begin(), v.end() #define pii pair<int, int> #define mp make_pair #define ll long long using namespace std; class LastDigit { public: long long f(long long x) { long long res = x; while (x) { x /= 10; res += x; } return res; } long long findX(long long S) { long long low = 1, high = 1e18; while (low <= high) { long long mid = low + (high - low) / 2; long long temp = f(mid); if (temp >= S) { high = mid - 1; if (temp == S) { return mid; } } else { low = mid + 1; } } return -1; } }; // BEGIN CUT HERE #include <ctime> #include <cmath> #include <string> #include <vector> #include <sstream> #include <iostream> #include <algorithm> using namespace std; int main(int argc, char* argv[]) { if (argc == 1) { cout << "Testing LastDigit (500.0 points)" << endl << endl; for (int i = 0; i < 20; i++) { ostringstream s; s << argv[0] << " " << i; int exitCode = system(s.str().c_str()); if (exitCode) cout << "#" << i << ": Runtime Error" << endl; } int T = time(NULL) - 1474903633; double PT = T / 60.0, TT = 75.0; cout.setf(ios::fixed, ios::floatfield); cout.precision(2); cout << endl; cout << "Time : " << T / 60 << " minutes " << T % 60 << " secs" << endl; cout << "Score : " << 500.0 * (.3 + (.7 * TT * TT) / (10.0 * PT * PT + TT * TT)) << " points" << endl; } else { int _tc; istringstream(argv[1]) >> _tc; LastDigit _obj; long long _expected, _received; time_t _start = clock(); switch (_tc) { case 0: { long long S = 564LL; _expected = 509LL; _received = _obj.findX(S); break; } case 1: { long long S = 565LL; _expected = -1LL; _received = _obj.findX(S); break; } case 2: { long long S = 3000LL; _expected = 2701LL; _received = _obj.findX(S); break; } case 3: { long long S = 137174210616796LL; _expected = 123456789555123LL; _received = _obj.findX(S); break; } case 4: { long long S = 837592744927492746LL; _expected = -1LL; _received = _obj.findX(S); break; } case 5: { long long S = 999999999999999999LL; _expected = 900000000000000000LL; _received = _obj.findX(S); break; } /*case 6: { long long S = LL; _expected = LL; _received = _obj.findX(S); break; }*/ /*case 7: { long long S = LL; _expected = LL; _received = _obj.findX(S); break; }*/ /*case 8: { long long S = LL; _expected = LL; _received = _obj.findX(S); break; }*/ default: return 0; } cout.setf(ios::fixed, ios::floatfield); cout.precision(2); double _elapsed = (double)(clock() - _start) / CLOCKS_PER_SEC; if (_received == _expected) cout << "#" << _tc << ": Passed (" << _elapsed << " secs)" << endl; else { cout << "#" << _tc << ": Failed (" << _elapsed << " secs)" << endl; cout << " Expected: " << _expected << endl; cout << " Received: " << _received << endl; } } } // END CUT HERE
brendandburns/c
kubernetes/model/v1beta2_scale.h
/* * v1beta2_scale.h * * Scale represents a scaling request for a resource. */ #ifndef _v1beta2_scale_H_ #define _v1beta2_scale_H_ #include <string.h> #include "../external/cJSON.h" #include "../include/list.h" #include "../include/keyValuePair.h" #include "v1_object_meta.h" #include "v1beta2_scale_spec.h" #include "v1beta2_scale_status.h" typedef struct v1beta2_scale_t { char *api_version; // string char *kind; // string struct v1_object_meta_t *metadata; //model struct v1beta2_scale_spec_t *spec; //model struct v1beta2_scale_status_t *status; //model } v1beta2_scale_t; v1beta2_scale_t *v1beta2_scale_create( char *api_version, char *kind, v1_object_meta_t *metadata, v1beta2_scale_spec_t *spec, v1beta2_scale_status_t *status ); void v1beta2_scale_free(v1beta2_scale_t *v1beta2_scale); v1beta2_scale_t *v1beta2_scale_parseFromJSON(cJSON *v1beta2_scaleJSON); cJSON *v1beta2_scale_convertToJSON(v1beta2_scale_t *v1beta2_scale); #endif /* _v1beta2_scale_H_ */
acidburn0zzz/llvm-project
libcxx/test/std/strings/string.view/string.view.template/contains.string_view.pass.cpp
<gh_stars>10-100 //===----------------------------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // UNSUPPORTED: c++03, c++11, c++14, c++17, c++20 // <string_view> // constexpr bool contains(string_view x) const noexcept; #include <string_view> #include <cassert> #include "test_macros.h" constexpr bool test() { using SV = std::string_view; const char* s = "abcde"; SV sv0; SV sv1 {s + 1, 1}; SV sv2 {s + 1, 2}; SV sv3 {s + 1, 3}; SV sv4 {s + 1, 4}; SV sv5 {s , 5}; SV svNot {"xyz", 3}; SV svNot2 {"bd" , 2}; SV svNot3 {"dcb", 3}; ASSERT_NOEXCEPT(sv0.contains(sv0)); assert( sv0.contains(sv0)); assert(!sv0.contains(sv1)); assert( sv1.contains(sv0)); assert( sv1.contains(sv1)); assert(!sv1.contains(sv2)); assert(!sv1.contains(sv3)); assert(!sv1.contains(sv4)); assert(!sv1.contains(sv5)); assert(!sv1.contains(svNot)); assert(!sv1.contains(svNot2)); assert(!sv1.contains(svNot3)); assert( sv3.contains(sv0)); assert( sv3.contains(sv1)); assert( sv3.contains(sv2)); assert( sv3.contains(sv3)); assert(!sv3.contains(sv4)); assert(!sv3.contains(sv5)); assert(!sv3.contains(svNot)); assert(!sv3.contains(svNot2)); assert(!sv3.contains(svNot3)); assert( sv5.contains(sv0)); assert( sv5.contains(sv1)); assert( sv5.contains(sv2)); assert( sv5.contains(sv3)); assert( sv5.contains(sv4)); assert( sv5.contains(sv5)); assert(!sv5.contains(svNot)); assert(!sv5.contains(svNot2)); assert(!sv5.contains(svNot3)); assert( svNot.contains(sv0)); assert(!svNot.contains(sv1)); assert(!svNot.contains(sv2)); assert(!svNot.contains(sv3)); assert(!svNot.contains(sv4)); assert(!svNot.contains(sv5)); assert( svNot.contains(svNot)); assert(!svNot.contains(svNot2)); assert(!svNot.contains(svNot3)); return true; } int main(int, char**) { test(); static_assert(test()); return 0; }
m-nakagawa/sample
jena-3.0.1/jena-arq/src/main/java/org/apache/jena/riot/lang/TriX.java
<gh_stars>0 /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.riot.lang; /** TriX - see <a href="http://www.hpl.hp.com/techreports/2004/HPL-2004-56.html">HPL-2004-56</a> <NAME> and <NAME>. * Supported: * <li>Basic TriX as per the DTD in HPL-2004-56 * <li>Typed literal rdf:XMLLiteral with inline XML. * <li>&lt;qname&gt; (on reading) */ public class TriX { // DTD for TrIX : The schema is a much longer. /* <!-- TriX: RDF Triples in XML --> <!ELEMENT TriX (graph*)> <!ATTLIST TriX xmlns CDATA #FIXED "http://www.w3.org/2004/03/trix/trix-1/"> <!ELEMENT graph (uri*, triple*)> <!ELEMENT triple ((id|uri|plainLiteral|typedLiteral), uri, (id|uri|plainLiteral|typedLiteral))> <!ELEMENT id (#PCDATA)> <!ELEMENT uri (#PCDATA)> <!ELEMENT plainLiteral (#PCDATA)> <!ATTLIST plainLiteral xml:lang CDATA #IMPLIED> <!ELEMENT typedLiteral (#PCDATA)> <!ATTLIST typedLiteral datatype CDATA #REQUIRED> */ /* Constants for TriX */ public final static String NS = "http://www.w3.org/2004/03/trix/trix-1/" ; public final static String tagTriX = "TriX" ; public final static String tagGraph = "graph" ; public final static String tagTriple = "triple" ; public final static String tagURI = "uri" ; public final static String tagId = "id" ; public final static String tagQName = "qname" ; public final static String tagPlainLiteral = "plainLiteral" ; public final static String tagTypedLiteral = "typedLiteral" ; public final static String attrXmlLang = "lang" ; public final static String attrDatatype = "datatype" ; }
JRedOW/Minestom
src/autogenerated/java/net/minestom/server/potion/PotionType.java
package net.minestom.server.potion; import net.kyori.adventure.key.Key; import net.kyori.adventure.key.Keyed; import net.minestom.server.registry.Registries; import net.minestom.server.utils.NamespaceID; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * AUTOGENERATED by PotionTypeGenerator */ public enum PotionType implements Keyed { EMPTY(NamespaceID.from("minecraft:empty")), WATER(NamespaceID.from("minecraft:water")), MUNDANE(NamespaceID.from("minecraft:mundane")), THICK(NamespaceID.from("minecraft:thick")), AWKWARD(NamespaceID.from("minecraft:awkward")), NIGHT_VISION(NamespaceID.from("minecraft:night_vision")), LONG_NIGHT_VISION(NamespaceID.from("minecraft:long_night_vision")), INVISIBILITY(NamespaceID.from("minecraft:invisibility")), LONG_INVISIBILITY(NamespaceID.from("minecraft:long_invisibility")), LEAPING(NamespaceID.from("minecraft:leaping")), LONG_LEAPING(NamespaceID.from("minecraft:long_leaping")), STRONG_LEAPING(NamespaceID.from("minecraft:strong_leaping")), FIRE_RESISTANCE(NamespaceID.from("minecraft:fire_resistance")), LONG_FIRE_RESISTANCE(NamespaceID.from("minecraft:long_fire_resistance")), SWIFTNESS(NamespaceID.from("minecraft:swiftness")), LONG_SWIFTNESS(NamespaceID.from("minecraft:long_swiftness")), STRONG_SWIFTNESS(NamespaceID.from("minecraft:strong_swiftness")), SLOWNESS(NamespaceID.from("minecraft:slowness")), LONG_SLOWNESS(NamespaceID.from("minecraft:long_slowness")), STRONG_SLOWNESS(NamespaceID.from("minecraft:strong_slowness")), TURTLE_MASTER(NamespaceID.from("minecraft:turtle_master")), LONG_TURTLE_MASTER(NamespaceID.from("minecraft:long_turtle_master")), STRONG_TURTLE_MASTER(NamespaceID.from("minecraft:strong_turtle_master")), WATER_BREATHING(NamespaceID.from("minecraft:water_breathing")), LONG_WATER_BREATHING(NamespaceID.from("minecraft:long_water_breathing")), HEALING(NamespaceID.from("minecraft:healing")), STRONG_HEALING(NamespaceID.from("minecraft:strong_healing")), HARMING(NamespaceID.from("minecraft:harming")), STRONG_HARMING(NamespaceID.from("minecraft:strong_harming")), POISON(NamespaceID.from("minecraft:poison")), LONG_POISON(NamespaceID.from("minecraft:long_poison")), STRONG_POISON(NamespaceID.from("minecraft:strong_poison")), REGENERATION(NamespaceID.from("minecraft:regeneration")), LONG_REGENERATION(NamespaceID.from("minecraft:long_regeneration")), STRONG_REGENERATION(NamespaceID.from("minecraft:strong_regeneration")), STRENGTH(NamespaceID.from("minecraft:strength")), LONG_STRENGTH(NamespaceID.from("minecraft:long_strength")), STRONG_STRENGTH(NamespaceID.from("minecraft:strong_strength")), WEAKNESS(NamespaceID.from("minecraft:weakness")), LONG_WEAKNESS(NamespaceID.from("minecraft:long_weakness")), LUCK(NamespaceID.from("minecraft:luck")), SLOW_FALLING(NamespaceID.from("minecraft:slow_falling")), LONG_SLOW_FALLING(NamespaceID.from("minecraft:long_slow_falling")); private static final PotionType[] VALUES = values(); @NotNull private final NamespaceID id; PotionType(@NotNull NamespaceID id) { this.id = id; Registries.potionTypes.put(id, this); } @Override @NotNull public Key key() { return this.id; } public short getId() { return (short) ordinal(); } @NotNull public NamespaceID getNamespaceID() { return this.id; } @Nullable public static PotionType fromId(short id) { if(id >= 0 && id < VALUES.length) { return VALUES[id]; } return null; } @NotNull @Override public String toString() { return "[" + this.id + "]"; } }
gausie/core-js
tests/tests/es.string.small.js
<reponame>gausie/core-js QUnit.test('String#small', assert => { const { small } = String.prototype; assert.isFunction(small); assert.arity(small, 0); assert.name(small, 'small'); assert.looksNative(small); assert.nonEnumerable(String.prototype, 'small'); assert.same('a'.small(), '<small>a</small>', 'lower case'); });
smart-cow/scow
cow-ac/cow-ac-client/src/org/wiredwidgets/cow/ac/client/utils/TaskListCellRenderer.java
<reponame>smart-cow/scow package org.wiredwidgets.cow.ac.client.utils; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.ListCellRenderer; import javax.swing.UIDefaults; import javax.swing.UIManager; import org.wiredwidgets.cow.ac.client.controllers.TaskController; import org.wiredwidgets.cow.server.api.service.Task; /** * Decorates <code>Task</code>s a bit nicer and distinguishes between assigned and * available tasks. Could do much more. * * @author RYANMILLER * @see org.wiredwidgets.cow.ac.client.ui.TasksPanel */ public class TaskListCellRenderer extends JComponent implements ListCellRenderer { static Color listForeground, listBackground, listSelectionForeground, listSelectionBackground; static { UIDefaults uid = UIManager.getLookAndFeel().getDefaults(); listForeground = uid.getColor("List.foreground"); listBackground = uid.getColor("List.background"); listSelectionForeground = uid.getColor("List.selectionForeground"); listSelectionBackground = uid.getColor("List.selectionBackground"); } JLabel taskInfo; JCheckBox checkbox; Icon myTaskIcon; Icon availableTaskIcon; /** * Creates the CellRenderer and loads image resources. This initialize can * cause strange problems if the paths to the image resources aren't correct. */ public TaskListCellRenderer() { setLayout(new BorderLayout()); myTaskIcon = new ImageIcon( getClass().getResource("/org/wiredwidgets/cow/ac/client/images/assigned-task-icon.png")); availableTaskIcon = new ImageIcon( getClass().getResource("/org/wiredwidgets/cow/ac/client/images/available-task-icon_o.png")); // rdm 12/8/2011 Checkbox not used for now, so hiding //checkbox = new JCheckBox(); //checkbox.setOpaque(true); taskInfo = new JLabel(); taskInfo.setOpaque(true); // needed to make the background color show up //add(checkbox, BorderLayout.WEST); add(taskInfo, BorderLayout.WEST); // move to center if using checkbox } @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { if (value instanceof Task) // use the name of the task as the primary display element { Task t = (Task) value; taskInfo.setText(t.getProcessInstanceId() + ": " + t.getName()); // denote the task with some special markings if (TaskController.getInstance().getSpecializedTasksModel().isAvailableTask(t)) { taskInfo.setText(taskInfo.getText() + " (available)"); taskInfo.setIcon(availableTaskIcon); } else { taskInfo.setIcon(myTaskIcon); } } else // allows the list to display other objects in a very basic way that might get thrown in { taskInfo.setText(value.toString()); } // Don't actually want it to be selected when it's selected, as a check would indicate completeness //checkbox.setSelected(isSelected); // clean up coloring Component[] comps = getComponents(); for (int i = 0; i < comps.length; i++) { if (isSelected) { comps[i].setForeground(listSelectionForeground); comps[i].setBackground(listSelectionBackground); } else { comps[i].setForeground(listForeground); comps[i].setBackground(listBackground); } } return this; } }
ppartarr/azure-sdk-for-java
sdk/sql/mgmt-v2014_04_01/src/main/java/com/microsoft/azure/management/sql/v2014_04_01/implementation/ServerTableAuditingPolicyInner.java
<filename>sdk/sql/mgmt-v2014_04_01/src/main/java/com/microsoft/azure/management/sql/v2014_04_01/implementation/ServerTableAuditingPolicyInner.java /** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.sql.v2014_04_01.implementation; import java.util.UUID; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.azure.ProxyResource; /** * A server table auditing policy. */ @JsonFlatten public class ServerTableAuditingPolicyInner extends ProxyResource { /** * Resource kind. */ @JsonProperty(value = "kind", access = JsonProperty.Access.WRITE_ONLY) private String kind; /** * Resource location. */ @JsonProperty(value = "location", access = JsonProperty.Access.WRITE_ONLY) private String location; /** * The state of the policy. */ @JsonProperty(value = "properties.auditingState") private String auditingState; /** * The audit logs table name. */ @JsonProperty(value = "properties.auditLogsTableName") private String auditLogsTableName; /** * Comma-separated list of event types to audit. */ @JsonProperty(value = "properties.eventTypesToAudit") private String eventTypesToAudit; /** * The full audit logs table name. */ @JsonProperty(value = "properties.fullAuditLogsTableName") private String fullAuditLogsTableName; /** * The number of days to keep in the audit logs. */ @JsonProperty(value = "properties.retentionDays") private String retentionDays; /** * The key of the auditing storage account. */ @JsonProperty(value = "properties.storageAccountKey") private String storageAccountKey; /** * The table storage account name. */ @JsonProperty(value = "properties.storageAccountName") private String storageAccountName; /** * The table storage account resource group name. */ @JsonProperty(value = "properties.storageAccountResourceGroupName") private String storageAccountResourceGroupName; /** * The secondary key of the auditing storage account. */ @JsonProperty(value = "properties.storageAccountSecondaryKey") private String storageAccountSecondaryKey; /** * The table storage subscription Id. */ @JsonProperty(value = "properties.storageAccountSubscriptionId") private UUID storageAccountSubscriptionId; /** * The storage table endpoint. */ @JsonProperty(value = "properties.storageTableEndpoint") private String storageTableEndpoint; /** * Get resource kind. * * @return the kind value */ public String kind() { return this.kind; } /** * Get resource location. * * @return the location value */ public String location() { return this.location; } /** * Get the state of the policy. * * @return the auditingState value */ public String auditingState() { return this.auditingState; } /** * Set the state of the policy. * * @param auditingState the auditingState value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withAuditingState(String auditingState) { this.auditingState = auditingState; return this; } /** * Get the audit logs table name. * * @return the auditLogsTableName value */ public String auditLogsTableName() { return this.auditLogsTableName; } /** * Set the audit logs table name. * * @param auditLogsTableName the auditLogsTableName value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withAuditLogsTableName(String auditLogsTableName) { this.auditLogsTableName = auditLogsTableName; return this; } /** * Get comma-separated list of event types to audit. * * @return the eventTypesToAudit value */ public String eventTypesToAudit() { return this.eventTypesToAudit; } /** * Set comma-separated list of event types to audit. * * @param eventTypesToAudit the eventTypesToAudit value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withEventTypesToAudit(String eventTypesToAudit) { this.eventTypesToAudit = eventTypesToAudit; return this; } /** * Get the full audit logs table name. * * @return the fullAuditLogsTableName value */ public String fullAuditLogsTableName() { return this.fullAuditLogsTableName; } /** * Set the full audit logs table name. * * @param fullAuditLogsTableName the fullAuditLogsTableName value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withFullAuditLogsTableName(String fullAuditLogsTableName) { this.fullAuditLogsTableName = fullAuditLogsTableName; return this; } /** * Get the number of days to keep in the audit logs. * * @return the retentionDays value */ public String retentionDays() { return this.retentionDays; } /** * Set the number of days to keep in the audit logs. * * @param retentionDays the retentionDays value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withRetentionDays(String retentionDays) { this.retentionDays = retentionDays; return this; } /** * Get the key of the auditing storage account. * * @return the storageAccountKey value */ public String storageAccountKey() { return this.storageAccountKey; } /** * Set the key of the auditing storage account. * * @param storageAccountKey the storageAccountKey value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageAccountKey(String storageAccountKey) { this.storageAccountKey = storageAccountKey; return this; } /** * Get the table storage account name. * * @return the storageAccountName value */ public String storageAccountName() { return this.storageAccountName; } /** * Set the table storage account name. * * @param storageAccountName the storageAccountName value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageAccountName(String storageAccountName) { this.storageAccountName = storageAccountName; return this; } /** * Get the table storage account resource group name. * * @return the storageAccountResourceGroupName value */ public String storageAccountResourceGroupName() { return this.storageAccountResourceGroupName; } /** * Set the table storage account resource group name. * * @param storageAccountResourceGroupName the storageAccountResourceGroupName value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageAccountResourceGroupName(String storageAccountResourceGroupName) { this.storageAccountResourceGroupName = storageAccountResourceGroupName; return this; } /** * Get the secondary key of the auditing storage account. * * @return the storageAccountSecondaryKey value */ public String storageAccountSecondaryKey() { return this.storageAccountSecondaryKey; } /** * Set the secondary key of the auditing storage account. * * @param storageAccountSecondaryKey the storageAccountSecondaryKey value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageAccountSecondaryKey(String storageAccountSecondaryKey) { this.storageAccountSecondaryKey = storageAccountSecondaryKey; return this; } /** * Get the table storage subscription Id. * * @return the storageAccountSubscriptionId value */ public UUID storageAccountSubscriptionId() { return this.storageAccountSubscriptionId; } /** * Set the table storage subscription Id. * * @param storageAccountSubscriptionId the storageAccountSubscriptionId value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageAccountSubscriptionId(UUID storageAccountSubscriptionId) { this.storageAccountSubscriptionId = storageAccountSubscriptionId; return this; } /** * Get the storage table endpoint. * * @return the storageTableEndpoint value */ public String storageTableEndpoint() { return this.storageTableEndpoint; } /** * Set the storage table endpoint. * * @param storageTableEndpoint the storageTableEndpoint value to set * @return the ServerTableAuditingPolicyInner object itself. */ public ServerTableAuditingPolicyInner withStorageTableEndpoint(String storageTableEndpoint) { this.storageTableEndpoint = storageTableEndpoint; return this; } }
zthang/code2vec_treelstm
raw_dataset/59862146@solve@OK.java
public void solve(int testNumber, InputReader in, OutputWriter out) { int n = in.nextInt(); int m = in.nextInt(); int[][] mat = new int[n - 1][3]; int i; for (i = 0; i < n - 1; i++) { mat[i][0] = in.nextInt(); mat[i][1] = in.nextInt(); mat[i][2] = in.nextInt(); } Arrays.sort(mat, (int[] o1, int[] o2) -> { return o1[2] - o2[2]; }); TreeMap<Integer, Long> tm = new TreeMap<>(); DisjointSet ds = new DisjointSet(); for (i = 1; i <= n; i++) ds.makeSet(i); tm.put(0, 0l); for (i = 0; i < n - 1; i++) { Node a = ds.findSet(mat[i][0]); Node b = ds.findSet(mat[i][1]); long cur = 1l * a.size * b.size; tm.merge(mat[i][2], cur, (x, y) -> x + y); ds.union(mat[i][0], mat[i][1]); } // out.println(tm); long sum = 0; Iterator it = tm.keySet().iterator(); while (it.hasNext()) { int key = (int) it.next(); long val = tm.get(key); sum += val; tm.put(key, sum); } for (i = 0; i < m; i++) { int q = in.nextInt(); long ans = tm.get(tm.floorKey(q)); out.print(ans + " "); } out.println(); }
rlugojr/goojs
test/unit/addons/physicspack/colliders/MeshCollider-test.js
<gh_stars>1000+ describe('MeshCollider', function () { var MeshCollider = require('../../../../../src/goo/addons/physicspack/colliders/MeshCollider'); var Vector3 = require('../../../../../src/goo/math/Vector3'); var Sphere = require('../../../../../src/goo/shapes/Sphere'); var Transform = require('../../../../../src/goo/math/Transform'); it('can clone', function () { var collider = new MeshCollider({ meshData: new Sphere(10, 10, 1), scale: new Vector3(2, 3, 4) }); var clone = collider.clone(); expect(collider).toEqual(clone); }); it('can transform', function () { var collider = new MeshCollider({ meshData: new Sphere(10, 10, 1), scale: new Vector3(2, 3, 4) }); var transform = new Transform(); transform.scale.setDirect(1, 2, 3); collider.transform(transform, collider); expect(collider.scale).toEqual(new Vector3(2, 6, 12)); }); });
JayCGildea/DMDA
src/main/java/uk/co/fivium/dmda/server/SMTPStart.java
<gh_stars>1-10 package uk.co.fivium.dmda.server; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Logger; import uk.co.fivium.dmda.antivirus.AVScannerFactory; import uk.co.fivium.dmda.databaseconnection.DatabaseConnectionException; import uk.co.fivium.dmda.databaseconnection.DatabaseConnectionHandler; import uk.co.fivium.dmda.healthchecks.SMTPStatusHealthCheck; import uk.co.fivium.dmda.healthchecks.AvStatusHealthCheck; import uk.co.fivium.dmda.healthchecks.DatabaseStatusHealthCheck; import uk.co.fivium.dmda.healthchecks.HealthCheckService; import java.io.File; import java.io.IOException; public class SMTPStart { private SMTPServerWrapper mSMTPServer; private HealthCheckService mHealthCheckService; public static void main(String[] args) { SMTPStart lSMTPStart = new SMTPStart(); Thread lShutdownHook = new Thread(lSMTPStart::stop); lShutdownHook.setName("Shutdown Hook"); Runtime.getRuntime().addShutdownHook(lShutdownHook); lSMTPStart.start(); } public void start(){ BasicConfigurator.configure(); // Enable some sort of basic logging try { loadServerConfiguration(); startSMTPServer(); } catch (Exception ex){ Logger.getRootLogger().error("Error during startup. Server shutting down.", ex); } } public void stop() { DatabaseConnectionHandler.getInstance().shutDown(); mSMTPServer.stop(); mHealthCheckService.stopHealthCheckService(); Logger.getRootLogger().info("Shutdown signal received. Server shutting down."); } private void startSMTPServer() throws ServerStartupException { try { DatabaseConnectionHandler.getInstance().createConnectionPools(); } catch (DatabaseConnectionException ex) { throw new ServerStartupException("Failed to create database connection pools", ex); } try{ AVScannerFactory.getScanner().testConnection(); } catch (IOException ex) { throw new ServerStartupException("Failed to connect to anti-virus scanner", ex); } mSMTPServer = new SMTPServerWrapper(); if (SMTPConfig.getInstance().isHealthCheckEnabled()) { mHealthCheckService = new HealthCheckService(SMTPConfig.getInstance().getHealthCheckPort()); registerHealthChecks(); mHealthCheckService.startHealthCheckService(); } mSMTPServer.start(); } private void loadServerConfiguration() throws ServerStartupException { try { SMTPConfig.getInstance().loadConfig(new File("config.xml")); } catch (ConfigurationException ex) { throw new ServerStartupException("Failed to load server configuration", ex); } } private void registerHealthChecks() { mHealthCheckService.registerHealthCheck("/smtp-status", new SMTPStatusHealthCheck(mSMTPServer)); mHealthCheckService.registerHealthCheck("/db-status", new DatabaseStatusHealthCheck()); mHealthCheckService.registerHealthCheck("/av-status", new AvStatusHealthCheck()); } }
prckent/qmcpack
src/QMCWaveFunctions/Jastrow/eeI_JastrowBuilder.cpp
<reponame>prckent/qmcpack ////////////////////////////////////////////////////////////////////////////////////// // This file is distributed under the University of Illinois/NCSA Open Source License. // See LICENSE file in top directory for details. // // Copyright (c) 2016 <NAME> and QMCPACK developers. // // File developed by: <NAME>, <EMAIL>, University of Illinois at Urbana-Champaign // <NAME>, <EMAIL>, University of Illinois at Urbana-Champaign // <NAME>, <EMAIL>, University of Illinois at Urbana-Champaign // <NAME>, <EMAIL>, Oak Ridge National Laboratory // <NAME>, <EMAIL>, Oak Ridge National Laboratory // // File created by: <NAME>, <EMAIL>, University of Illinois at Urbana-Champaign ////////////////////////////////////////////////////////////////////////////////////// #include "Particle/DistanceTableData.h" #include "QMCWaveFunctions/Jastrow/eeI_JastrowBuilder.h" #ifndef ENABLE_SOA #include "QMCWaveFunctions/Jastrow/eeI_JastrowOrbital.h" #endif #include "QMCWaveFunctions/Jastrow/JeeIOrbitalSoA.h" #include "Utilities/ProgressReportEngine.h" #include "QMCWaveFunctions/Jastrow/PolynomialFunctor3D.h" namespace qmcplusplus { template<typename J3type> bool eeI_JastrowBuilder::putkids(xmlNodePtr kids, J3type& J3) { std::string jname = "JeeI"; SpeciesSet& iSet = sourcePtcl->getSpeciesSet(); SpeciesSet& eSet = targetPtcl.getSpeciesSet(); //read in xml while (kids != NULL) { std::string kidsname = (char*)kids->name; if (kidsname == "correlation") { RealType ee_cusp = 0.0; RealType eI_cusp = 0.0; std::string iSpecies, eSpecies1("u"), eSpecies2("u"); OhmmsAttributeSet rAttrib; rAttrib.add(iSpecies, "ispecies"); rAttrib.add(eSpecies1, "especies1"); rAttrib.add(eSpecies2, "especies2"); rAttrib.add(ee_cusp, "ecusp"); rAttrib.add(eI_cusp, "icusp"); rAttrib.put(kids); typedef typename J3type::FuncType FT; FT* functor = new FT(ee_cusp, eI_cusp); functor->iSpecies = iSpecies; functor->eSpecies1 = eSpecies1; functor->eSpecies2 = eSpecies2; int iNum = iSet.findSpecies(iSpecies); int eNum1 = eSet.findSpecies(eSpecies1); int eNum2 = eSet.findSpecies(eSpecies2); if (iNum == iSet.size()) { APP_ABORT("ion species " + iSpecies + " requested for Jastrow " + jname + " does not exist in ParticleSet " + sourcePtcl->getName()); } std::string illegal_eSpecies; if (eNum1 == eSet.size()) illegal_eSpecies = eSpecies1; if (eNum2 == eSet.size()) { if (illegal_eSpecies.size()) illegal_eSpecies += " and "; illegal_eSpecies += eSpecies2; } if (illegal_eSpecies.size()) APP_ABORT("electron species " + illegal_eSpecies + " requested for Jastrow " + jname + " does not exist in ParticleSet " + targetPtcl.getName()); functor->put(kids); if (sourcePtcl->Lattice.SuperCellEnum != SUPERCELL_OPEN) { const RealType WSRadius = sourcePtcl->Lattice.WignerSeitzRadius; if (functor->cutoff_radius > WSRadius) { if (functor->cutoff_radius - WSRadius > 1e-4) { APP_ABORT(" The eeI Jastrow cutoff specified should not be larger than Wigner-Seitz radius."); } else { app_log() << " The eeI Jastrow cutoff specified is slightly larger than the Wigner-Seitz radius."; app_log() << " Setting to Wigner-Seitz radius = " << WSRadius << ".\n"; functor->cutoff_radius = WSRadius; functor->reset(); } } if (functor->cutoff_radius < 1.0e-6) { app_log() << " eeI functor rcut is currently zero.\n" << " Setting to Wigner-Seitz radius = " << WSRadius << std::endl; functor->cutoff_radius = WSRadius; functor->reset(); } } else if (functor->cutoff_radius < 1.0e-6) { APP_ABORT(" eeI Jastrow cutoff unspecified. Cutoff must be given when using open boundary conditions"); } J3.addFunc(iNum, eNum1, eNum2, functor); } kids = kids->next; } //check that each ion species has up and down components J3.check_complete(); J3.setOptimizable(true); return true; } WaveFunctionComponent* eeI_JastrowBuilder::buildComponent(xmlNodePtr cur) { ReportEngine PRE(ClassName, "put(xmlNodePtr)"); xmlNodePtr kids = cur->xmlChildrenNode; // Create a three-body Jastrow if (sourcePtcl) { std::string ftype("polynomial"); OhmmsAttributeSet tAttrib; tAttrib.add(ftype, "function"); tAttrib.put(cur); SpeciesSet& iSet = sourcePtcl->getSpeciesSet(); if (ftype == "polynomial") { #ifdef ENABLE_SOA typedef JeeIOrbitalSoA<PolynomialFunctor3D> J3Type; #else typedef eeI_JastrowOrbital<PolynomialFunctor3D> J3Type; #endif J3Type* J3 = new J3Type(*sourcePtcl, targetPtcl, true); putkids(kids, *J3); return J3; } else { std::ostringstream err_msg; err_msg << "Unknown function \"" << ftype << "\" in" << " eeI_JastrowBuilder. Aborting.\n"; APP_ABORT(err_msg.str()); } } else APP_ABORT("You must specify the \"source\" particleset for a three-body Jastrow.\n"); return nullptr; } } // namespace qmcplusplus
mengxy/swc
crates/swc_ecma_minifier/tests/terser/compress/loops/issue_1648/input.js
<reponame>mengxy/swc function f() { x(); var b = 1; while (1); }
dillondow24/SleepApp
server/node_modules/@aws/dynamodb-batch-iterator/build/BatchGet.js
<filename>server/node_modules/@aws/dynamodb-batch-iterator/build/BatchGet.js "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var tslib_1 = require("tslib"); var BatchOperation_1 = require("./BatchOperation"); exports.MAX_READ_BATCH_SIZE = 100; /** * Retrieves items from DynamoDB in batches of 100 or fewer via one or more * BatchGetItem operations. The items may be from any number of tables. * * This method will automatically retry any get requests returned by DynamoDB as * unprocessed. Exponential backoff on unprocessed items is employed on a * per-table basis. */ var BatchGet = /** @class */ (function (_super) { tslib_1.__extends(BatchGet, _super); /** * @param client The AWS SDK client with which to communicate with * DynamoDB. * @param items A synchronous or asynchronous iterable of tuples * describing the reads to execute. The first member of the * tuple should be the name of the table from which to * read, and the second should be the marshalled key. * @param options Additional options to apply to the operations executed. */ function BatchGet(client, items, _a) { var _b = _a === void 0 ? {} : _a, ConsistentRead = _b.ConsistentRead, _c = _b.PerTableOptions, PerTableOptions = _c === void 0 ? {} : _c; var _this = _super.call(this, client, items) || this; _this.batchSize = exports.MAX_READ_BATCH_SIZE; _this.consistentRead = ConsistentRead; _this.options = PerTableOptions; return _this; } BatchGet.prototype.doBatchRequest = function () { return tslib_1.__awaiter(this, void 0, void 0, function () { var e_1, _a, e_2, _b, e_3, _c, operationInput, batchSize, _d, tableName, item, _e, projection, consistentRead, attributeNames, _f, _g, Responses, _h, UnprocessedKeys, unprocessedTables, _j, _k, table, _l, _m, table, tableData, _o, _p, item; return tslib_1.__generator(this, function (_q) { switch (_q.label) { case 0: operationInput = { RequestItems: {} }; batchSize = 0; while (this.toSend.length > 0) { _d = tslib_1.__read(this.toSend.shift(), 2), tableName = _d[0], item = _d[1]; if (operationInput.RequestItems[tableName] === undefined) { _e = this.state[tableName], projection = _e.projection, consistentRead = _e.consistentRead, attributeNames = _e.attributeNames; operationInput.RequestItems[tableName] = { Keys: [], ConsistentRead: consistentRead, ProjectionExpression: projection, ExpressionAttributeNames: attributeNames, }; } operationInput.RequestItems[tableName].Keys.push(item); if (++batchSize === this.batchSize) { break; } } return [4 /*yield*/, this.client.batchGetItem(operationInput).promise()]; case 1: _f = _q.sent(), _g = _f.Responses, Responses = _g === void 0 ? {} : _g, _h = _f.UnprocessedKeys, UnprocessedKeys = _h === void 0 ? {} : _h; unprocessedTables = new Set(); try { for (_j = tslib_1.__values(Object.keys(UnprocessedKeys)), _k = _j.next(); !_k.done; _k = _j.next()) { table = _k.value; unprocessedTables.add(table); this.handleThrottled(table, UnprocessedKeys[table].Keys); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_k && !_k.done && (_a = _j.return)) _a.call(_j); } finally { if (e_1) throw e_1.error; } } this.movePendingToThrottled(unprocessedTables); try { for (_l = tslib_1.__values(Object.keys(Responses)), _m = _l.next(); !_m.done; _m = _l.next()) { table = _m.value; tableData = this.state[table]; tableData.backoffFactor = Math.max(0, tableData.backoffFactor - 1); try { for (_o = tslib_1.__values(Responses[table]), _p = _o.next(); !_p.done; _p = _o.next()) { item = _p.value; this.pending.push([table, item]); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { if (_p && !_p.done && (_c = _o.return)) _c.call(_o); } finally { if (e_3) throw e_3.error; } } } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (_m && !_m.done && (_b = _l.return)) _b.call(_l); } finally { if (e_2) throw e_2.error; } } return [2 /*return*/]; } }); }); }; BatchGet.prototype.getInitialTableState = function (tableName) { var _a = this.options[tableName] || {}, ExpressionAttributeNames = _a.ExpressionAttributeNames, ProjectionExpression = _a.ProjectionExpression, _b = _a.ConsistentRead, ConsistentRead = _b === void 0 ? this.consistentRead : _b; return tslib_1.__assign({}, _super.prototype.getInitialTableState.call(this, tableName), { attributeNames: ExpressionAttributeNames, projection: ProjectionExpression, consistentRead: ConsistentRead }); }; return BatchGet; }(BatchOperation_1.BatchOperation)); exports.BatchGet = BatchGet; //# sourceMappingURL=BatchGet.js.map
doutriaux1/ocgis
src/ocgis/test/fragments/test_process_manager.py
import unittest import time from multiprocessing import Pool import numpy as np import itertools def iter_proc_args(): for ii in range(0,10): yield(ii) def operation(ii): time.sleep(np.random.randint(0,3)) return(ii*2) class SubsetOperation(object): def __init__(self,it_procs,serial=True,nprocs=1): self.it_procs = it_procs self.serial = serial self.nprocs = nprocs def __iter__(self): if self.serial: it = itertools.imap(operation,self.it_procs()) else: pool = Pool(processes=self.nprocs) it = pool.imap_unordered(operation,self.it_procs()) while True: try: yield(it.next()) except StopIteration: break def run(self): path = '/tmp/foo.txt' with open(path,'w') as f: for value in self: f.write(str(value)) return(path) class TestProcessManager(unittest.TestCase): def test(self): serial = False conv = SubsetOperation(iter_proc_args,serial=serial,nprocs=4) ret = conv.run() print ret if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()