repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
aliyun/alibabacloud-maxcompute-tool-migrate | mma-server/src/main/java/com/aliyun/odps/mma/server/task/HiveToMcTableDataTransmissionTask.java | /*
* Copyright 1999-2021 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyun.odps.mma.server.task;
import java.util.List;
import com.aliyun.odps.mma.config.JobConfiguration;
import com.aliyun.odps.mma.server.action.ActionExecutionContext;
import com.aliyun.odps.mma.server.action.HiveToMcTableDataTransmissionAction;
import com.aliyun.odps.mma.server.action.HiveVerificationAction;
import com.aliyun.odps.mma.server.action.McVerificationAction;
import com.aliyun.odps.mma.server.action.VerificationAction;
import com.aliyun.odps.mma.server.job.Job;
import com.aliyun.odps.mma.meta.MetaSource.TableMetaModel;
public class HiveToMcTableDataTransmissionTask extends TableDataTransmissionTask {
public HiveToMcTableDataTransmissionTask(
String id,
String rootJobId,
JobConfiguration config,
TableMetaModel hiveTableMetaModel,
TableMetaModel mcTableMetaModel,
Job job,
List<Job> subJobs) {
super(id, rootJobId, config, hiveTableMetaModel, mcTableMetaModel, job, subJobs);
init();
}
private void init() {
String executionProject = config.getOrDefault(
JobConfiguration.JOB_EXECUTION_MC_PROJECT,
config.get(JobConfiguration.DEST_CATALOG_NAME));
ActionExecutionContext context = new ActionExecutionContext(config);
HiveToMcTableDataTransmissionAction dataTransmissionAction =
new HiveToMcTableDataTransmissionAction(
id + ".DataTransmission",
config.get(JobConfiguration.DATA_DEST_MC_ACCESS_KEY_ID),
config.get(JobConfiguration.DATA_DEST_MC_ACCESS_KEY_SECRET),
executionProject,
config.get(JobConfiguration.DATA_DEST_MC_ENDPOINT),
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_URL),
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_USERNAME),
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_PASSWORD),
source,
dest,
this,
context);
dag.addVertex(dataTransmissionAction);
HiveVerificationAction hiveVerificationAction = new HiveVerificationAction(
id + ".HiveDataVerification",
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_URL),
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_USERNAME),
config.get(JobConfiguration.DATA_SOURCE_HIVE_JDBC_PASSWORD),
source,
true,
this,
context);
dag.addVertex(hiveVerificationAction);
McVerificationAction mcVerificationAction = new McVerificationAction(
id + ".McDataVerification",
config.get(JobConfiguration.DATA_DEST_MC_ACCESS_KEY_ID),
config.get(JobConfiguration.DATA_DEST_MC_ACCESS_KEY_SECRET),
executionProject,
config.get(JobConfiguration.DATA_DEST_MC_ENDPOINT),
dest,
false,
this,
context);
dag.addVertex(mcVerificationAction);
VerificationAction verificationAction = new VerificationAction(
id + ".FinalVerification",
source,
this,
context);
dag.addVertex(verificationAction);
dag.addEdge(dataTransmissionAction, hiveVerificationAction);
dag.addEdge(dataTransmissionAction, mcVerificationAction);
dag.addEdge(hiveVerificationAction, verificationAction);
dag.addEdge(mcVerificationAction, verificationAction);
}
@Override
void updateMetadata() {
job.setStatus(this);
}
}
|
mindcloud69/Sakura_mod | src/main/java/cn/mcmod/sakura/client/model/ModelSamuraiArmors2.java | package cn.mcmod.sakura.client.model;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EntityEquipmentSlot;
import org.lwjgl.opengl.GL11;
public class ModelSamuraiArmors2
extends ModelCustomArmor
{
ModelRenderer Helmet;
ModelRenderer HelmetR;
ModelRenderer HelmetL;
ModelRenderer HelmetB;
ModelRenderer capsthingy;
ModelRenderer BeltR;
ModelRenderer Mbelt;
ModelRenderer MbeltL;
ModelRenderer MbeltR;
ModelRenderer BeltL;
ModelRenderer Chestplate;
ModelRenderer Backplate;
ModelRenderer ShoulderR;
ModelRenderer GauntletR;
ModelRenderer GauntletstrapR;
ModelRenderer ShoulderplateRtop;
ModelRenderer ShoulderplateR1;
ModelRenderer ShoulderL;
ModelRenderer GauntletL;
ModelRenderer GauntletstrapL;
ModelRenderer ShoulderplateLtop;
ModelRenderer ShoulderplateL1;
ModelRenderer LegpanelR4;
ModelRenderer LegpanelR5;
// ModelRenderer LegpanelR6;
ModelRenderer SidepanelR1;
ModelRenderer SidepanelR2;
// ModelRenderer SidepanelR3;
ModelRenderer BackpanelR1;
ModelRenderer BackpanelR2;
// ModelRenderer BackpanelR3;
// ModelRenderer BackpanelL3;
ModelRenderer LegpanelL4;
ModelRenderer LegpanelL5;
// ModelRenderer LegpanelL6;
ModelRenderer SidepanelL1;
ModelRenderer SidepanelL2;
// ModelRenderer SidepanelL3;
ModelRenderer BackpanelL1;
ModelRenderer BackpanelL2;
ModelRenderer ShoesL;
ModelRenderer ShoesR;
ModelRenderer ShoesPartL;
ModelRenderer ShoesPartR;
public int state = 0;
public EntityEquipmentSlot armorSlot = EntityEquipmentSlot.CHEST;
public ModelSamuraiArmors2(EntityEquipmentSlot slot,int armor_state,float f)
{
super(f, 0, 128, 64);
this.textureWidth = 128;
this.textureHeight = 64;
this.state = armor_state;
this.armorSlot=slot;
this.Helmet = new ModelRenderer(this, 41, 8);
this.Helmet.addBox(-4.5F, -9.0F, -4.5F, 9, 4, 9);
this.Helmet.setRotationPoint(0.0F, 0.0F, 0.0F);
this.Helmet.setTextureSize(128, 64);
setRotation(this.Helmet, 0.0F, 0.0F, 0.0F);
this.HelmetR = new ModelRenderer(this, 21, 13);
this.HelmetR.addBox(-5.5F, -5.0F, -4.5F, 1, 5, 9);
this.HelmetR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.HelmetR.setTextureSize(128, 64);
setRotation(this.HelmetR, 0.0F, 0.0F, 0.15235988F);
this.HelmetL = new ModelRenderer(this, 21, 13);
this.HelmetL.mirror = true;
this.HelmetL.addBox(4.50F, -5.0F, -4.5F, 1, 5, 9);
this.HelmetL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.HelmetL.setTextureSize(128, 64);
setRotation(this.HelmetL, 0.0F, 0.0F, -0.15235988F);
this.HelmetB = new ModelRenderer(this, 41, 21);
this.HelmetB.addBox(-4.5F, -3.0F, 5.5F, 9, 5, 1);
this.HelmetB.setRotationPoint(0.0F, 0.0F, 0.0F);
this.HelmetB.setTextureSize(128, 64);
setRotation(this.HelmetB, 0.5235988F, 0.0F, 0.0F);
this.capsthingy = new ModelRenderer(this, 21, 0);
this.capsthingy.addBox(-4.5F, -6.0F, -6.5F, 9, 1, 2);
this.capsthingy.setRotationPoint(0.0F, 0.0F, 0.0F);
this.capsthingy.setTextureSize(128, 64);
setRotation(this.capsthingy, 0.0F, 0.0F, 0.0F);
this.BeltR = new ModelRenderer(this, 76, 44);
this.BeltR.addBox(-5.0F, 4.0F, -3.0F, 1, 3, 6);
this.BeltR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BeltR.setTextureSize(128, 64);
setRotation(this.BeltR, 0.0F, 0.0F, 0.0F);
this.Mbelt = new ModelRenderer(this, 56, 55);
this.Mbelt.addBox(-4.0F, 8.0F, -3.0F, 8, 4, 1);
this.Mbelt.setRotationPoint(0.0F, 0.0F, 0.0F);
this.Mbelt.setTextureSize(128, 64);
setRotation(this.Mbelt, 0.0F, 0.0F, 0.0F);
this.MbeltL = new ModelRenderer(this, 76, 44);
this.MbeltL.addBox(4.0F, 8.0F, -3.0F, 1, 3, 6);
this.MbeltL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.MbeltL.setTextureSize(128, 64);
setRotation(this.MbeltL, 0.0F, 0.0F, 0.0F);
this.MbeltR = new ModelRenderer(this, 76, 44);
this.MbeltR.addBox(-5.0F, 8.0F, -3.0F, 1, 3, 6);
this.MbeltR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.MbeltR.setTextureSize(128, 64);
setRotation(this.MbeltR, 0.0F, 0.0F, 0.0F);
this.ShoesL = new ModelRenderer(this, 84, 4);
this.ShoesL.addBox(-2.0F, 5F, -2.0F, 4, 4, 4,f+0.0005f);
this.ShoesL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoesL.setTextureSize(128, 64);
setRotation(this.ShoesL, 0.0F, 0.0F, 0.0F);
this.ShoesR = new ModelRenderer(this, 84, 4);
this.ShoesR.addBox(-2.0F, 5F, -2.0F, 4, 4, 4,f+0.0005f);
this.ShoesR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoesR.setTextureSize(128, 64);
setRotation(this.ShoesR, 0.0F, 0.0F, 0.0F);
this.ShoesPartL = new ModelRenderer(this, 100, 4);
this.ShoesPartL.addBox(-2.5F, 10.5F, -3.6F, 5, 2, 6);
this.ShoesPartL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoesPartL.setTextureSize(128, 64);
setRotation(this.ShoesPartL, 0.0F, 0.0F, 0.0F);
this.ShoesPartR = new ModelRenderer(this, 100, 4);
this.ShoesPartR.addBox(-2.5F, 10.5F, -3.6F, 5, 2, 6);
this.ShoesPartR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoesPartR.setTextureSize(128, 64);
setRotation(this.ShoesPartR, 0.0F, 0.0F, 0.0F);
this.BeltL = new ModelRenderer(this, 76, 44);
this.BeltL.addBox(4.0F, 4.0F, -3.0F, 1, 3, 6);
this.BeltL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BeltL.setTextureSize(128, 64);
setRotation(this.BeltL, 0.0F, 0.0F, 0.0F);
this.Chestplate = new ModelRenderer(this, 56, 45);
this.Chestplate.addBox(-4.0F, 1.0F, -4.0F, 8, 7, 2);
this.Chestplate.setRotationPoint(0.0F, 0.0F, 0.0F);
this.Chestplate.setTextureSize(128, 64);
setRotation(this.Chestplate, 0.0F, 0.0F, 0.0F);
this.Backplate = new ModelRenderer(this, 36, 45);
this.Backplate.addBox(-4.0F, 1.0F, 2.0F, 8, 11, 2);
this.Backplate.setRotationPoint(0.0F, 0.0F, 0.0F);
this.Backplate.setTextureSize(128, 64);
setRotation(this.Backplate, 0.0F, 0.0F, 0.0F);
this.ShoulderR = new ModelRenderer(this, 56, 35);
this.ShoulderR.addBox(-3.5F, -2.5F, -2.5F, 5, 5, 5);
this.ShoulderR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderR.setTextureSize(128, 64);
setRotation(this.ShoulderR, 0.0F, 0.0F, 0.0F);
this.GauntletR = new ModelRenderer(this, 100, 26);
this.GauntletR.addBox(-3.5F, 4F, -2.5F, 2, 5, 5);
this.GauntletR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.GauntletR.setTextureSize(128, 64);
setRotation(this.GauntletR, 0.0F, 0.0F, 0.0F);
this.GauntletstrapR = new ModelRenderer(this, 84, 31);
this.GauntletstrapR.addBox(-1.5F, 2.5F, -2.5F, 3, 7, 5);
this.GauntletstrapR.setRotationPoint(0.0F, 0.0F, 0.0F);
this.GauntletstrapR.setTextureSize(128, 64);
setRotation(this.GauntletstrapR, 0.0F, 0.0F, 0.0F);
this.ShoulderplateRtop = new ModelRenderer(this, 110, 37);
this.ShoulderplateRtop.addBox(-5.5F, -2.5F, -3.5F, 2, 1, 7);
this.ShoulderplateRtop.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderplateRtop.setTextureSize(128, 64);
setRotation(this.ShoulderplateRtop, 0.0F, 0.0F, 0.4363323F);
this.ShoulderplateR1 = new ModelRenderer(this, 110, 45);
this.ShoulderplateR1.addBox(-4.5F, -1.5F, -3.5F, 1, 4, 7);
this.ShoulderplateR1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderplateR1.setTextureSize(128, 64);
setRotation(this.ShoulderplateR1, 0.0F, 0.0F, 0.4363323F);
this.ShoulderL = new ModelRenderer(this, 56, 35);
this.ShoulderL.mirror = true;
this.ShoulderL.addBox(-1.5F, -2.5F, -2.5F, 5, 5, 5);
this.ShoulderL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderL.setTextureSize(128, 64);
setRotation(this.ShoulderL, 0.0F, 0.0F, 0.0F);
this.GauntletL = new ModelRenderer(this, 114, 26);
this.GauntletL.addBox(1.5F, 4F, -2.5F, 2, 5, 5);
this.GauntletL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.GauntletL.setTextureSize(128, 64);
setRotation(this.GauntletL, 0.0F, 0.0F, 0.0F);
this.GauntletstrapL = new ModelRenderer(this, 84, 31);
this.GauntletstrapL.mirror = true;
this.GauntletstrapL.addBox(-1.5F, 2.5F, -2.5F, 3, 7, 5);
this.GauntletstrapL.setRotationPoint(0.0F, 0.0F, 0.0F);
this.GauntletstrapL.setTextureSize(128, 64);
setRotation(this.GauntletstrapL, 0.0F, 0.0F, 0.0F);
this.ShoulderplateLtop = new ModelRenderer(this, 110, 37);
this.ShoulderplateLtop.mirror = true;
this.ShoulderplateLtop.addBox(3.5F, -2.5F, -3.5F, 2, 1, 7);
this.ShoulderplateLtop.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderplateLtop.setTextureSize(128, 64);
setRotation(this.ShoulderplateLtop, 0.0F, 0.0F, -0.4363323F);
this.ShoulderplateL1 = new ModelRenderer(this, 110, 45);
this.ShoulderplateL1.mirror = true;
this.ShoulderplateL1.addBox(3.5F, -1.5F, -3.5F, 1, 4, 7);
this.ShoulderplateL1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.ShoulderplateL1.setTextureSize(128, 64);
setRotation(this.ShoulderplateL1, 0.0F, 0.0F, -0.4363323F);
this.LegpanelR4 = new ModelRenderer(this, 0, 18);
this.LegpanelR4.addBox(-3.0F, 0.5F, -3.5F, 5, 3, 1);
this.LegpanelR4.setRotationPoint(0.0F, 0.0F, 0.0F);
this.LegpanelR4.setTextureSize(128, 64);
setRotation(this.LegpanelR4, -0.4363323F, 0.0F, 0.0F);
this.LegpanelR5 = new ModelRenderer(this, 0, 18);
this.LegpanelR5.addBox(-3.0F, 2.5F, -2.5F, 5, 3, 1);
this.LegpanelR5.setRotationPoint(0.0F, 0.0F, 0.0F);
this.LegpanelR5.setTextureSize(128, 64);
setRotation(this.LegpanelR5, -0.4363323F, 0.0F, 0.0F);
this.SidepanelR1 = new ModelRenderer(this, 0, 22);
this.SidepanelR1.addBox(-2.5F, 0.5F, -2.5F, 1, 4, 5);
this.SidepanelR1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.SidepanelR1.setTextureSize(128, 64);
setRotation(this.SidepanelR1, 0.0F, 0.0F, 0.4363323F);
this.SidepanelR2 = new ModelRenderer(this, 0, 31);
this.SidepanelR2.addBox(-1.5F, 3.5F, -2.5F, 1, 3, 5);
this.SidepanelR2.setRotationPoint(0.0F, 0.0F, 0.0F);
this.SidepanelR2.setTextureSize(128, 64);
setRotation(this.SidepanelR2, 0.0F, 0.0F, 0.4363323F);
this.BackpanelR1 = new ModelRenderer(this, 0, 18);
this.BackpanelR1.addBox(-3.0F, 0.5F, 2.5F, 5, 3, 1);
this.BackpanelR1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BackpanelR1.setTextureSize(128, 64);
setRotation(this.BackpanelR1, 0.4363323F, 0.0F, 0.0F);
this.BackpanelR2 = new ModelRenderer(this, 0, 18);
this.BackpanelR2.addBox(-3.0F, 2.5F, 1.5F, 5, 3, 1);
this.BackpanelR2.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BackpanelR2.setTextureSize(128, 64);
setRotation(this.BackpanelR2, 0.4363323F, 0.0F, 0.0F);
this.LegpanelL4 = new ModelRenderer(this, 0, 18);
this.LegpanelL4.mirror = true;
this.LegpanelL4.addBox(-2F, 0.5F, -3.5F, 5, 3, 1);
this.LegpanelL4.setRotationPoint(0.0F, 0.0F, 0.0F);
this.LegpanelL4.setTextureSize(128, 64);
setRotation(this.LegpanelL4, -0.4363323F, 0.0F, 0.0F);
this.LegpanelL5 = new ModelRenderer(this, 0, 18);
this.LegpanelL5.mirror = true;
this.LegpanelL5.addBox(-2.0F, 2.5F, -2.5F, 5, 3, 1);
this.LegpanelL5.setRotationPoint(0.0F, 0.0F, 0.0F);
this.LegpanelL5.setTextureSize(128, 64);
setRotation(this.LegpanelL5, -0.4363323F, 0.0F, 0.0F);
this.SidepanelL1 = new ModelRenderer(this, 0, 22);
this.SidepanelL1.mirror = true;
this.SidepanelL1.addBox(1.5F, 0.5F, -2.5F, 1, 4, 5);
this.SidepanelL1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.SidepanelL1.setTextureSize(128, 64);
setRotation(this.SidepanelL1, 0.0F, 0.0F, -0.4363323F);
this.SidepanelL2 = new ModelRenderer(this, 0, 31);
this.SidepanelL2.mirror = true;
this.SidepanelL2.addBox(0.5F, 3.5F, -2.5F, 1, 3, 5);
this.SidepanelL2.setRotationPoint(0.0F, 0.0F, 0.0F);
this.SidepanelL2.setTextureSize(128, 64);
setRotation(this.SidepanelL2, 0.0F, 0.0F, -0.4363323F);
this.BackpanelL1 = new ModelRenderer(this, 0, 18);
this.BackpanelL1.mirror = true;
this.BackpanelL1.addBox(-2.0F, 0.5F, 2.5F, 5, 3, 1);
this.BackpanelL1.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BackpanelL1.setTextureSize(128, 64);
setRotation(this.BackpanelL1, 0.4363323F, 0.0F, 0.0F);
this.BackpanelL2 = new ModelRenderer(this, 0, 18);
this.BackpanelL2.mirror = true;
this.BackpanelL2.addBox(-2.0F, 2.5F, 1.5F, 5, 3, 1);
this.BackpanelL2.setRotationPoint(0.0F, 0.0F, 0.0F);
this.BackpanelL2.setTextureSize(128, 64);
setRotation(this.BackpanelL2, 0.4363323F, 0.0F, 0.0F);
this.bipedHeadwear.cubeList.clear();
this.bipedHead.cubeList.clear();
this.bipedHead.addChild(this.Helmet);
this.bipedHead.addChild(this.HelmetR);
this.bipedHead.addChild(this.HelmetL);
this.bipedHead.addChild(this.HelmetB);
this.bipedHead.addChild(this.capsthingy);
this.bipedBody.cubeList.clear();
if (f < 1.0F)
{
this.bipedBody.addChild(this.Mbelt);
this.bipedBody.addChild(this.MbeltL);
this.bipedBody.addChild(this.MbeltR);
}
else
{
this.bipedBody.addChild(this.BeltR);
this.bipedBody.addChild(this.BeltL);
this.bipedBody.addChild(this.Chestplate);
this.bipedBody.addChild(this.Backplate);
}
this.bipedRightArm.cubeList.clear();
this.bipedRightArm.addChild(this.ShoulderR);
this.bipedRightArm.addChild(this.GauntletR);
this.bipedRightArm.addChild(this.GauntletstrapR);
this.bipedRightArm.addChild(this.ShoulderplateRtop);
this.bipedRightArm.addChild(this.ShoulderplateR1);
this.bipedLeftArm.cubeList.clear();
this.bipedLeftArm.addChild(this.ShoulderL);
this.bipedLeftArm.addChild(this.GauntletL);
this.bipedLeftArm.addChild(this.GauntletstrapL);
this.bipedLeftArm.addChild(this.ShoulderplateLtop);
this.bipedLeftArm.addChild(this.ShoulderplateL1);
this.bipedRightLeg.cubeList.clear();
this.bipedRightLeg.addChild(this.LegpanelR4);
this.bipedRightLeg.addChild(this.LegpanelR5);
this.bipedRightLeg.addChild(this.SidepanelR1);
this.bipedRightLeg.addChild(this.SidepanelR2);
this.bipedRightLeg.addChild(this.BackpanelR1);
this.bipedRightLeg.addChild(this.BackpanelR2);
this.bipedRightLeg.addChild(this.ShoesR);
this.bipedRightLeg.addChild(this.ShoesPartR);
this.bipedLeftLeg.cubeList.clear();
this.bipedLeftLeg.addChild(this.LegpanelL4);
this.bipedLeftLeg.addChild(this.LegpanelL5);
this.bipedLeftLeg.addChild(this.SidepanelL1);
this.bipedLeftLeg.addChild(this.SidepanelL2);
this.bipedLeftLeg.addChild(this.BackpanelL1);
this.bipedLeftLeg.addChild(this.BackpanelL2);
this.bipedLeftLeg.addChild(this.ShoesL);
this.bipedLeftLeg.addChild(this.ShoesPartL);
}
public void render(Entity entity, float f, float f1, float f2, float f3, float f4, float f5)
{
this.LegpanelL4.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.LegpanelL5.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.Chestplate.isHidden = false;
this.LegpanelR4.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.LegpanelR5.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.BackpanelL1.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.BackpanelL2.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.BackpanelR1.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.BackpanelR2.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.SidepanelR1.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.SidepanelL1.isHidden = (state < 1)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.SidepanelR2.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.SidepanelL2.isHidden = (state < 2)||(armorSlot!=EntityEquipmentSlot.LEGS);
this.ShoesL.isHidden=(armorSlot!=EntityEquipmentSlot.FEET);
this.ShoesR.isHidden=(armorSlot!=EntityEquipmentSlot.FEET);
this.ShoesPartL.isHidden=(armorSlot!=EntityEquipmentSlot.FEET);
this.ShoesPartR.isHidden=(armorSlot!=EntityEquipmentSlot.FEET);
this.ShoulderplateLtop.isHidden = (state < 2);
this.ShoulderplateL1.isHidden = (state < 2);
this.ShoulderplateRtop.isHidden = (state < 2);
this.ShoulderplateR1.isHidden = (state < 2);
this.BeltL.isHidden = true;
this.BeltR.isHidden = true;
this.GauntletL.isHidden = (state < 2);
this.GauntletR.isHidden = (state < 2);
this.GauntletstrapL.isHidden = (state < 2);
this.GauntletstrapR.isHidden = (state < 2);
setRotationAngles(f, f1, f2, f3, f4, f5, entity);
if (this.isChild)
{
float f6 = 2.0F;
GL11.glPushMatrix();
GL11.glScalef(1.5F / f6, 1.5F / f6, 1.5F / f6);
GL11.glTranslatef(0.0F, 16.0F * f5, 0.0F);
this.bipedHead.render(f5);
GL11.glPopMatrix();
GL11.glPushMatrix();
GL11.glScalef(1.0F / f6, 1.0F / f6, 1.0F / f6);
GL11.glTranslatef(0.0F, 24.0F * f5, 0.0F);
this.bipedBody.render(f5);
this.bipedRightArm.render(f5);
this.bipedLeftArm.render(f5);
this.bipedRightLeg.render(f5);
this.bipedLeftLeg.render(f5);
this.bipedHeadwear.render(f5);
GL11.glPopMatrix();
}
else
{
GL11.glPushMatrix();
GL11.glScalef(1.01F, 1.01F, 1.01F);
this.bipedHead.render(f5);
GL11.glPopMatrix();
this.bipedBody.render(f5);
this.bipedRightArm.render(f5);
this.bipedLeftArm.render(f5);
this.bipedRightLeg.render(f5);
this.bipedLeftLeg.render(f5);
this.bipedHeadwear.render(f5);
}
}
private void setRotation(ModelRenderer model, float x, float y, float z)
{
model.rotateAngleX = x;
model.rotateAngleY = y;
model.rotateAngleZ = z;
}
}
|
xuxiannian/knet-modules | knet-moudles-aipage/src/main/java/com/baidubce/services/aipage/model/SiteRenewItemModel.java | package com.baidubce.services.aipage.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.List;
@Data
@AllArgsConstructor
public class SiteRenewItemModel {
private SiteRenewItemConfigModel config;
private List<PaymentMethodModel> paymentMethod;
public SiteRenewItemModel(String siteId, String time) {
this.config = new SiteRenewItemConfigModel(siteId,time);
}
}
|
suluner/tencentcloud-sdk-cpp | tcss/include/tencentcloud/tcss/v20201101/model/DescribeAssetImageScanStatusResponse.h | /*
* Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef TENCENTCLOUD_TCSS_V20201101_MODEL_DESCRIBEASSETIMAGESCANSTATUSRESPONSE_H_
#define TENCENTCLOUD_TCSS_V20201101_MODEL_DESCRIBEASSETIMAGESCANSTATUSRESPONSE_H_
#include <string>
#include <vector>
#include <map>
#include <tencentcloud/core/AbstractModel.h>
namespace TencentCloud
{
namespace Tcss
{
namespace V20201101
{
namespace Model
{
/**
* DescribeAssetImageScanStatus返回参数结构体
*/
class DescribeAssetImageScanStatusResponse : public AbstractModel
{
public:
DescribeAssetImageScanStatusResponse();
~DescribeAssetImageScanStatusResponse() = default;
CoreInternalOutcome Deserialize(const std::string &payload);
std::string ToJsonString() const;
/**
* 获取镜像个数
* @return ImageTotal 镜像个数
*/
uint64_t GetImageTotal() const;
/**
* 判断参数 ImageTotal 是否已赋值
* @return ImageTotal 是否已赋值
*/
bool ImageTotalHasBeenSet() const;
/**
* 获取扫描镜像个数
* @return ImageScanCnt 扫描镜像个数
*/
uint64_t GetImageScanCnt() const;
/**
* 判断参数 ImageScanCnt 是否已赋值
* @return ImageScanCnt 是否已赋值
*/
bool ImageScanCntHasBeenSet() const;
/**
* 获取扫描状态
* @return Status 扫描状态
*/
std::string GetStatus() const;
/**
* 判断参数 Status 是否已赋值
* @return Status 是否已赋值
*/
bool StatusHasBeenSet() const;
/**
* 获取扫描进度 ImageScanCnt/ImageTotal *100
* @return Schedule 扫描进度 ImageScanCnt/ImageTotal *100
*/
uint64_t GetSchedule() const;
/**
* 判断参数 Schedule 是否已赋值
* @return Schedule 是否已赋值
*/
bool ScheduleHasBeenSet() const;
/**
* 获取安全个数
* @return SuccessCount 安全个数
*/
uint64_t GetSuccessCount() const;
/**
* 判断参数 SuccessCount 是否已赋值
* @return SuccessCount 是否已赋值
*/
bool SuccessCountHasBeenSet() const;
/**
* 获取风险个数
* @return RiskCount 风险个数
*/
uint64_t GetRiskCount() const;
/**
* 判断参数 RiskCount 是否已赋值
* @return RiskCount 是否已赋值
*/
bool RiskCountHasBeenSet() const;
/**
* 获取剩余扫描时间
* @return LeftSeconds 剩余扫描时间
*/
uint64_t GetLeftSeconds() const;
/**
* 判断参数 LeftSeconds 是否已赋值
* @return LeftSeconds 是否已赋值
*/
bool LeftSecondsHasBeenSet() const;
private:
/**
* 镜像个数
*/
uint64_t m_imageTotal;
bool m_imageTotalHasBeenSet;
/**
* 扫描镜像个数
*/
uint64_t m_imageScanCnt;
bool m_imageScanCntHasBeenSet;
/**
* 扫描状态
*/
std::string m_status;
bool m_statusHasBeenSet;
/**
* 扫描进度 ImageScanCnt/ImageTotal *100
*/
uint64_t m_schedule;
bool m_scheduleHasBeenSet;
/**
* 安全个数
*/
uint64_t m_successCount;
bool m_successCountHasBeenSet;
/**
* 风险个数
*/
uint64_t m_riskCount;
bool m_riskCountHasBeenSet;
/**
* 剩余扫描时间
*/
uint64_t m_leftSeconds;
bool m_leftSecondsHasBeenSet;
};
}
}
}
}
#endif // !TENCENTCLOUD_TCSS_V20201101_MODEL_DESCRIBEASSETIMAGESCANSTATUSRESPONSE_H_
|
joevandyk/monkeycharger | vendor/plugins/active_merchant/test/remote_tests/remote_linkpoint_test.rb | #
# In order for this test to pass, a valid store number and PEM file
# are required. Unfortunately, with LinkPoint YOU CAN'T JUST USE ANY
# OLD STORE NUMBER. Also, you can't just generate your own PEM file.
# You'll need to use a special PEM file provided by LinkPoint.
#
# Go to http://www.linkpoint.com/support/sup_teststore.asp to set up
# a test account. Once you receive your test account you can get your
# pem file by clicking the Support link on the navigation menu and then
# clicking the Download Center link.
#
# You will also want to change your test account's fraud settings
# while running these tests. Click the admin link at the top of
# LinkPoint Central. Then click "set lockout times" under Fraud Settings
# You will want to set Duplicate lockout time to 0 so that you can run
# the tests more than once without triggering this fraud detection.
require File.dirname(__FILE__) + '/../test_helper'
ActiveMerchant::Billing::LinkpointGateway.pem_file = File.read( File.dirname(__FILE__) + '/../mycert.pem' )
class LinkpointTest < Test::Unit::TestCase
def setup
ActiveMerchant::Billing::Base.gateway_mode = :test
# You can also pass in the complete certificate as a string
# with the :pem option
# Signup for a test LinkPoint account and use the Store Number
# as the login argument.
@gateway = LinkpointGateway.new(
:login => '1909597035'
)
# Test credit card numbers
# American Express: 371111111111111
# Discover: 6011-1111-1111-1111
# JCB: 311111111111111
# MasterCard: 5111-1111-1111-1111
# MasterCard: 5419-8400-0000-0003
# Visa: 4111-1111-1111-1111
@creditcard = CreditCard.new(
:number => '4111111111111111',
:month => Time.now.month.to_s,
:year => (Time.now + 1.year).year,
:first_name => 'Captain',
:last_name => 'Jack',
:verification_value => '123'
)
@address = {
:address1 => '1313 lucky lane',
:city => 'Lost Angeles',
:state => 'ON',
:zip => 'K2P2A6',
:country => 'CA',
:address2 => 'Apartment 1',
:phone => '(555)555-5555'
}
end
def test_successful_authorization
assert response = @gateway.authorize(1000, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert_equal Response, response.class
assert_equal true, response.success?
assert_equal "APPROVED", response.params["approved"]
end
def test_successful_authorization_and_capture
assert authorization = @gateway.authorize(100, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert authorization.success?
assert authorization.test?
assert capture = @gateway.capture(100, authorization.authorization)
assert capture.success?
assert_equal 'ACCEPTED', capture.message
end
def test_successful_purchase_without_cvv2_code
@creditcard.verification_value = nil
assert response = @gateway.purchase(2400, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert_equal Response, response.class
assert_equal true, response.success?
assert_equal "APPROVED", response.params["approved"]
assert_equal 'NNN', response.params["avs"]
end
def test_successful_purchase_with_cvv2_code
assert response = @gateway.purchase(2400, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert_equal Response, response.class
assert_equal true, response.success?
assert_equal "APPROVED", response.params["approved"]
assert_equal 'NNNM', response.params["avs"]
end
def test_successful_purchase_and_void
purchase = @gateway.purchase(100, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert purchase.success?
assert void = @gateway.void(purchase.authorization)
assert void.success?
end
def test_successfull_purchase_and_credit
assert purchase = @gateway.purchase(2400, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert_equal true, purchase.success?
assert credit = @gateway.credit(2400, purchase.authorization)
assert credit.success?
end
def test_successful_recurring_payment
assert response = @gateway.recurring(2400, @creditcard,
:order_id => generate_order_id,
:installments => 12,
:startdate => "immediate",
:periodicity => :monthly,
:address => @address
)
assert_equal Response, response.class
assert_equal true, response.success?
assert_equal "APPROVED", response.params["approved"]
end
def test_declined_purchase_with_invalid_credit_card
@creditcard.number = '1111111111111111'
assert response = @gateway.purchase(100, @creditcard,
:order_id => generate_order_id,
:address => @address
)
assert_equal Response, response.class
assert_equal false, response.success?
assert_equal "DECLINED", response.params["approved"]
end
end
|
aman1309/rest.li | r2-filter-compression/src/main/java/com/linkedin/r2/filter/compression/GzipCompressor.java | /*
Copyright (c) 2013 LinkedIn Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.linkedin.r2.filter.compression;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.io.IOUtils;
/**
* Wrapper class for gzip compression
* */
public class GzipCompressor implements Compressor
{
private static final String HTTP_NAME = "gzip";
//Consider changing input param as streams rather than fixed bytes?
@Override
public byte[] inflate(InputStream data) throws CompressionException
{
ByteArrayOutputStream out;
GZIPInputStream gzip = null;
try
{
out = new ByteArrayOutputStream();
gzip = new GZIPInputStream(data);
IOUtils.copy(gzip, out);
}
catch (IOException e)
{
throw new CompressionException(CompressionConstants.DECODING_ERROR + getContentEncodingName(), e);
}
finally
{
if (gzip != null)
{
IOUtils.closeQuietly(gzip);
}
}
return out.toByteArray();
}
@Override
public byte[] deflate(InputStream data) throws CompressionException
{
ByteArrayOutputStream out;
GZIPOutputStream gzip = null;
try
{
out = new ByteArrayOutputStream();
gzip = new GZIPOutputStream(out);
IOUtils.copy(data, gzip);
}
catch (IOException e)
{
throw new CompressionException(CompressionConstants.DECODING_ERROR + getContentEncodingName(), e);
}
finally
{
if (gzip != null)
{
IOUtils.closeQuietly(gzip);
}
}
return out.toByteArray();
}
@Override
public String getContentEncodingName()
{
return HTTP_NAME;
}
}
|
upframe/shopy | cmd/shopy/config.go | package main
import (
"encoding/json"
"os"
)
type config struct {
Development bool
Key1 string
Key2 string
Domain string
Errors string
Port int
Scheme string
Assets string
InviteOnly bool
DefaultInvites int
Database struct {
User string
Password string
Host string
Port string
Name string
}
SMTP struct {
User string
Password string
Host string
Port string
}
PayPal struct {
Client string
Secret string
}
}
// ConfigFile ...
func configFile(path string) (*config, error) {
file := &config{}
configFile, err := os.Open("config.json")
if err != nil {
return file, err
}
jsonParser := json.NewDecoder(configFile)
err = jsonParser.Decode(&file)
return file, err
}
|
erick-rivas/reactjs-web-template | src/seed/examples/views/scores/Form.js | /*
__Seed builder__
(Read_only) Example view
Be careful copying content
*/
import React from "react";
import PropTypes from "prop-types";
import { Formik, Field, Form } from "formik";
import { MultiField, FileField } from "seed/helpers";
const ScoreForm = ({ score= {}, players= [], matches= [], onSubmit, error }) =>
<div class="card">
{/* Header */}
<div class="card-header">
<h3 class="card-header-title">Score</h3>
</div>
{/* Body */}
<div class="card-body">
<div class="row">
<div class="col">
<Formik
initialValues={score}
onSubmit={onSubmit}>
{({ values, setFieldValue}) =>
<Form>
<div class="mb-3">
{/* Min */}
<div class="form-group">
<label class="input-label">Min</label>
<Field type="number" name="min"
class="form-control" />
</div>
{/* Player */}
<div class="form-group">
<div>
<label class="input-label">Player</label>
<Field component="select" name="player.id"
class="form-control" >
<option value="">Select an option</option>
{players.map((e, idx) => <option key={idx} value={e.id}>{e.id}</option>) }
</Field>
</div>
</div>
{/* Match */}
<div class="form-group">
<div>
<label class="input-label">Match</label>
<Field component="select" name="match.id"
class="form-control" >
<option value="">Select an option</option>
{matches.map((e, idx) => <option key={idx} value={e.id}>{e.id}</option>) }
</Field>
</div>
</div>
</div>
{error ? <div class="alert alert-soft-danger">{error}</div> : null}
<button type="submit" class="btn btn-block btn-primary">Send</button>
</Form> }
</Formik>
</div>
</div>
</div>
</div>;
ScoreForm.propTypes = {
score: PropTypes.object,
players: PropTypes.array,
matches: PropTypes.array,
onSubmit: PropTypes.func.isRequired,
error: PropTypes.string
};
export default ScoreForm; |
Crossroads-Development/Crossroads | src/main/java/com/Da_Technomancer/crossroads/items/itemSets/OreProfileItem.java | <filename>src/main/java/com/Da_Technomancer/crossroads/items/itemSets/OreProfileItem.java
package com.Da_Technomancer.crossroads.items.itemSets;
import com.Da_Technomancer.crossroads.items.CRItems;
import net.minecraft.core.NonNullList;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.network.chat.Component;
import net.minecraft.network.chat.TranslatableComponent;
import net.minecraft.world.item.CreativeModeTab;
import net.minecraft.world.item.Item;
import net.minecraft.world.item.ItemStack;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
public class OreProfileItem extends Item{
protected static final String KEY = "material";
public OreProfileItem(Item.Properties prop){
super(prop);
CRItems.toRegister.add(this);
}
public ItemStack withMaterial(OreSetup.OreProfile mat, int count){
if(mat == null){
mat = OreSetup.getDefaultMaterial();
}
ItemStack out = new ItemStack(this, count);
out.setTag(new CompoundTag());
out.getTag().putString(KEY, mat.getId());
return out;
}
public static OreSetup.OreProfile getProfile(ItemStack stack){
Item item = stack.getItem();
if(item instanceof OreProfileItem){
return ((OreProfileItem) item).getSelfProfile(stack);
}
return OreSetup.getDefaultMaterial();
}
protected OreSetup.OreProfile getSelfProfile(ItemStack stack){
String matKey;
if(!stack.hasTag()){
return OreSetup.getDefaultMaterial();
}else{
matKey = stack.getTag().getString(KEY);
}
return OreSetup.findMaterial(matKey);
}
@Override
public String getDescriptionId(ItemStack stack){
// return super.getTranslationKey(stack);
//We 'cheat' here. Instead of returning the translation key, we return the translated text, w/ formatting applied.
//This is because most things calling this method don't know to pass the material name as a formatter argument (and most things use getDisplayName instead)
//This is mainly important for WAILA
return getName(stack).getString();
}
@Override
@OnlyIn(Dist.CLIENT)
public Component getDescription(){
//Incorrectly displays the default material for all variants- we don't have access to an itemstack/nbt to differentiate
return getName(withMaterial(null, 1));
}
@Override
public Component getName(ItemStack stack){
OreSetup.OreProfile mat = getProfile(stack);
//Note that we use the super of getTranslationKey to prevent an infinite loop
return new TranslatableComponent(super.getDescriptionId(stack), mat == null ? "INVALID" : mat.getName());
}
@Override
public void fillItemCategory(CreativeModeTab group, NonNullList<ItemStack> items){
if(allowdedIn(group)){
//Add every material variant of this item
for(OreSetup.OreProfile mat : OreSetup.getMaterials()){
items.add(withMaterial(mat, 1));
}
}
}
}
|
sgholamian/log-aware-clone-detection | NLPCCd/Hive/1851_2.java | <gh_stars>0
//,temp,sample_3150.java,2,19,temp,sample_4077.java,2,19
//,3
public class xxx {
public void dummy_method(){
for (Map.Entry<Path, ArrayList<String>> e : pathToAliases) {
if (lDrvStat != null && lDrvStat.isAborted()) {
throw new IOException("Operation is Canceled.");
}
Path file = e.getKey();
List<String> aliases = e.getValue();
if (aliases.contains(alias)) {
if (file != null) {
isEmptyTable = false;
} else {
log.info("found a null path for alias");
}
}
}
}
}; |
mozsakalli/GamaVM | classpath/src/main/java/java/net/URL.java | <gh_stars>0
/*
* Copyright (C) 2019 Digitoy Games.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.net;
import java.io.IOException;
import java.io.InputStream;
public final class URL {
//private final URLStreamHandler handler;
private String protocol;
private String host;
private int port;
private String file;
private String path;
private String query;
private String ref;
public URL(String s) throws MalformedURLException {
int colon = s.indexOf(':');
int slash = s.indexOf('/');
if (colon > 0 && (slash < 0 || colon < slash)) {
//handler = null;//findHandler(s.substring(0, colon));
//handler.parseURL(this, s, colon + 1, s.length());
} else {
throw new MalformedURLException(s);
}
}
/*
public String toString() {
return handler.toExternalForm(this);
}
*/
public String getProtocol() {
return protocol;
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public String getFile() {
return file;
}
public String getRef() {
return ref;
}
public String getPath() {
return path;
}
public String getQuery() {
return query;
}
/*
public URLConnection openConnection() throws IOException {
return null;//handler.openConnection(this);
}
*/
public InputStream openStream() throws IOException {
return null;//openConnection().getInputStream();
}
public Object getContent() throws IOException {
return openStream();
}
public void set(String protocol, String host, int port, String file,
String ref)
{
this.protocol = protocol;
this.host = host;
this.port = port;
this.file = file;
this.ref = ref;
int q = file == null ? -1 : file.lastIndexOf('?');
if (q != -1) {
this.query = file.substring(q + 1);
this.path = file.substring(0, q);
} else {
this.path = file;
}
}
}
|
flo-l/systemds | src/main/java/org/apache/sysds/lops/compile/linearization/ILinearize.java | <gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.lops.compile.linearization;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysds.conf.ConfigurationManager;
import org.apache.sysds.conf.DMLConfig;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.lops.Lop;
/**
* A interface for the linearization algorithms that order the DAG nodes into a sequence of instructions to execute.
*
* https://en.wikipedia.org/wiki/Linearizability#Linearization_points
*/
public interface ILinearize {
public static Log LOG = LogFactory.getLog(ILinearize.class.getName());
public enum DagLinearization {
DEPTH_FIRST, BREADTH_FIRST, MIN_INTERMEDIATE
}
public static List<Lop> linearize(List<Lop> v) {
try {
DMLConfig dmlConfig = ConfigurationManager.getDMLConfig();
DagLinearization linearization = DagLinearization
.valueOf(dmlConfig.getTextValue(DMLConfig.DAG_LINEARIZATION).toUpperCase());
switch(linearization) {
case MIN_INTERMEDIATE:
return doMinIntermediateSort(v);
case BREADTH_FIRST:
return doBreadthFirstSort(v);
case DEPTH_FIRST:
default:
return depthFirst(v);
}
}
catch(Exception e) {
LOG.warn("Invalid or failed DAG_LINEARIZATION, fallback to DEPTH_FIRST ordering");
return depthFirst(v);
}
}
/**
* Sort lops depth-first
*
* previously called doTopologicalSortTwoLevelOrder
*
* @param v List of lops to sort
* @return Sorted list of lops
*/
private static List<Lop> depthFirst(List<Lop> v) {
// partition nodes into leaf/inner nodes and dag root nodes,
// + sort leaf/inner nodes by ID to force depth-first scheduling
// + append root nodes in order of their original definition
// (which also preserves the original order of prints)
List<Lop> nodes = Stream
.concat(v.stream().filter(l -> !l.getOutputs().isEmpty()).sorted(Comparator.comparing(l -> l.getID())),
v.stream().filter(l -> l.getOutputs().isEmpty()))
.collect(Collectors.toList());
// NOTE: in contrast to hadoop execution modes, we avoid computing the transitive
// closure here to ensure linear time complexity because its unnecessary for CP and Spark
return nodes;
}
private static List<Lop> doBreadthFirstSort(List<Lop> v) {
List<Lop> nodes = v.stream().sorted(Comparator.comparing(Lop::getLevel)).collect(Collectors.toList());
return nodes;
}
/**
* Sort lops to execute them in an order that minimizes the memory requirements of intermediates
*
* @param v List of lops to sort
* @return Sorted list of lops
*/
private static List<Lop> doMinIntermediateSort(List<Lop> v) {
List<Lop> nodes = new ArrayList<>(v.size());
// Get the lowest level in the tree to move upwards from
List<Lop> lowestLevel = v.stream().filter(l -> l.getOutputs().isEmpty()).collect(Collectors.toList());
// Traverse the tree bottom up, choose nodes with higher memory requirements, then reverse the list
List<Lop> remaining = new LinkedList<>(v);
sortRecursive(nodes, lowestLevel, remaining);
// In some cases (function calls) some output lops are not in the list of nodes to be sorted.
// With the next layer up having output lops, they are not added to the initial list of lops and are
// subsequently never reached by the recursive sort.
// We work around this issue by checking for remaining lops after the initial sort.
while(!remaining.isEmpty()) {
// Start with the lowest level lops, this time by level instead of no outputs
int maxLevel = remaining.stream().mapToInt(Lop::getLevel).max().orElse(-1);
List<Lop> lowestNodes = remaining.stream().filter(l -> l.getLevel() == maxLevel).collect(Collectors.toList());
sortRecursive(nodes, lowestNodes, remaining);
}
// All lops were added bottom up, from highest to lowest memory consumption, now reverse this
Collections.reverse(nodes);
return nodes;
}
private static void sortRecursive(List<Lop> result, List<Lop> input, List<Lop> remaining) {
// Sort input lops by memory estimate
// Lowest level nodes (those with no outputs) receive a memory estimate of 0 to preserve order
// This affects prints, writes, ...
List<Map.Entry<Lop, Long>> memEst = input.stream().distinct().map(l -> new AbstractMap.SimpleEntry<>(l,
l.getOutputs().isEmpty() ? 0 : OptimizerUtils.estimateSizeExactSparsity(l.getOutputParameters().getNumRows(),
l.getOutputParameters().getNumCols(), l.getOutputParameters().getNnz())))
.sorted(Comparator.comparing(e -> ((Map.Entry<Lop, Long>) e).getValue())).collect(Collectors.toList());
// Start with the highest memory estimate because the entire list is reversed later
Collections.reverse(memEst);
for(Map.Entry<Lop, Long> e : memEst) {
// Skip if the node is already in the result list
// Skip if one of the lop's outputs is not in the result list yet (will be added once the output lop is
// traversed), but only if any of the output lops is bound to be added to the result at a later stage
if(result.contains(e.getKey()) || (!result.containsAll(e.getKey().getOutputs()) &&
remaining.stream().anyMatch(l -> e.getKey().getOutputs().contains(l))))
continue;
result.add(e.getKey());
remaining.remove(e.getKey());
// Add input lops recursively
sortRecursive(result, e.getKey().getInputs(), remaining);
}
}
}
|
netfanely/cli | lib/dependencies.js | 'use strict';
let versionMap = require('./dependencies.json');
exports.getSupportedVersion = function(name) {
return versionMap[name] || 'latest';
};
|
Pandaaaa906/product_spider | product_spider/spiders/srinidhiindsynth_spider.py | import re
from scrapy import Request
from product_spider.items import RawData
from product_spider.utils.functions import strip
from product_spider.utils.spider_mixin import BaseSpider
# TODO extract info from text
class SrinidhiindSynthSpider(BaseSpider):
name = "srinidhiindsynth"
allowd_domains = ["srinidhiindsynth.com/"]
start_urls = ["http://srinidhiindsynth.com/products/"]
base_url = "http://srinidhiindsynth.com/"
def parse(self, response):
a_nodes = response.xpath('//p/a')
for a in a_nodes:
parent = a.xpath('./text()').get()
url = a.xpath('./@href').get()
yield Request(url, callback=self.parse_list, meta={'parent': parent})
def parse_list(self, response):
tables = response.xpath('//table')
for table in tables:
en_name = table.xpath('.//td[@class="info"]/h5[not(@class)]/strong//text()').get('')
short_desc = table.xpath('normalize-space(.//td[@class="info"]/h5[@class="short_desc"]/strong//text())').get('')
en_name = en_name.strip(' :')
tmp = short_desc.split(';')
tmp = map(str.strip, tmp)
tmp = tuple(filter(bool, tmp))
m_cas = re.search(r'\d+-\d{2}-\d', short_desc)
m_mw = re.search(r'Mol\. Wt\.: ([^;]+);', short_desc)
m_mf = re.search(r'CAS : [^;]+; ([^;]+)', short_desc)
d = {
'brand': 'srinidhiindsynth',
'parent': response.meta.get('parent'),
'cat_no': en_name,
'en_name': en_name,
'cas': m_cas and m_cas.group(),
'mf': m_mf and strip(m_mf.group(1)),
'mw': m_mw and strip(m_mw.group(1)),
'img_url': table.xpath('.//img/@src').get(),
'prd_url': response.url,
}
# yield RawData(**d)
|
gmYuan/project | mini_react/src/router/react-router-dom/index.js | export * from '../react-router';
export { default as HashRouter} from './HashRouter'
export { default as BrowserRouter } from './BrowserRouter';
export {default as Link} from './Link';
export {default as NavLink} from './NavLink'; |
lubo-ivanov/SoftUniAdvanced | setsAndMaps/exercise/Task1.java | package javaAdvanced.setsAndMaps.exercise;
import java.util.LinkedHashSet;
import java.util.Scanner;
public class Task1 {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
int count = Integer.parseInt(scanner.nextLine());
LinkedHashSet<String> words = new LinkedHashSet<>();
while (count-- >0) {
String input = scanner.nextLine();
words.add(input);
}
for (String word : words) {
System.out.println(word);
}
}
}
|
SaltAPI/SaltAPI.github.io | Doxygen/html/search/functions_4.js | <reponame>SaltAPI/SaltAPI.github.io
var searchData=
[
['end',['end',['../class_h_t_t_p_1_1_processing_list.html#ae9e221da82b4aacf1b92f60c460a9c2b',1,'HTTP::ProcessingList']]]
];
|
MarlonDValencia/SistemaDeInformacionAcademica-DDD | notas/src/main/java/com/universidad/informacionacademica/domain/tutor/events/TesisCalificada.java | <reponame>MarlonDValencia/SistemaDeInformacionAcademica-DDD<gh_stars>0
package com.universidad.informacionacademica.domain.tutor.events;
import co.com.sofka.domain.generic.DomainEvent;
import com.universidad.informacionacademica.domain.tutor.TesisAsignada;
public class TesisCalificada extends DomainEvent {
protected TesisAsignada tesisAsignada;
public TesisCalificada(TesisAsignada tesisAsignada) {
super("universidad.informacionacademica.tesiscalificada");
this.tesisAsignada = tesisAsignada;
}
public TesisAsignada getTesisAsignada() {
return tesisAsignada;
}
}
|
ArcheGraphics/Arche-cpp | vox.render/ui/widgets/visual/separator.h | // Copyright (c) 2022 <NAME>
//
// I am making my contributions/submissions to this project solely in my
// personal capacity and am not conveying any rights to any intellectual
// property of any third parties.
#ifndef separator_hpp
#define separator_hpp
#include "ui/widgets/widget.h"
namespace vox {
namespace ui {
/**
* Simple widget that display a separator
*/
class Separator : public Widget {
protected:
void _draw_Impl() override;
};
}
}
#endif /* separator_hpp */
|
GDVFox/dflow | lib/go-actionlib/input_test.go | <gh_stars>0
package actionlib
import (
"bytes"
"encoding/binary"
"testing"
"github.com/stretchr/testify/assert"
)
func TestReadMessage(t *testing.T) {
var buff bytes.Buffer
stdin = &buff
messages := [][]byte{
[]byte("Hello"),
[]byte("World"),
{0x1, 0x3, 0x3, 0x7},
[]byte("🍪𓅿"),
}
for _, msg := range messages {
assert.NoError(t, binary.Write(&buff, binary.BigEndian, uint32(len(msg))))
assert.NoError(t, binary.Write(&buff, binary.BigEndian, msg))
}
for i, msg := range messages {
data, err := ReadMessage()
assert.NoError(t, err)
assert.EqualValuesf(t, msg, data, "Failed #%d:", i)
}
}
|
eternita/frontcache | frontcache-core/src/main/java/org/frontcache/hystrix/fr/FallbackResolverFactory.java | <gh_stars>1-10
/**
* Copyright 2017 Eternita LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.frontcache.hystrix.fr;
import org.apache.http.client.HttpClient;
import org.frontcache.FCConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FallbackResolverFactory {
private static Logger logger = LoggerFactory.getLogger(FallbackResolverFactory.class);
private FallbackResolverFactory() {}
private static FallbackResolver instance;
public static FallbackResolver getInstance(){
if (null == instance) {
throw new RuntimeException("FallbackResolver is not initialized.");
}
return instance;
}
public static FallbackResolver init(HttpClient client){
if (null == instance) {
instance = getFallbackResolver();
instance.init(client);
}
return instance;
}
/**
*
* @return
*/
private static FallbackResolver getFallbackResolver()
{
String implStr = FCConfig.getProperty("front-cache.fallback-resolver.impl");
if (null == implStr)
{
logger.info("Default implementation is loaded: " + FileBasedFallbackResolver.class.getCanonicalName());
return new FileBasedFallbackResolver();
}
try
{
@SuppressWarnings("rawtypes")
Class clazz = Class.forName(implStr);
Object obj = clazz.newInstance();
if (null != obj && obj instanceof FallbackResolver)
{
logger.info("FallbackResolver implementation loaded: " + implStr);
FallbackResolver fallbackResolver = (FallbackResolver) obj;
return fallbackResolver;
}
} catch (Exception ex) {
logger.error("Cant instantiate " + implStr + ". Default implementation is loaded: " + FileBasedFallbackResolver.class.getCanonicalName());
return new FileBasedFallbackResolver();
}
return new FileBasedFallbackResolver();
}
public static void destroy()
{
instance = null;
return;
}
}
|
shuwenjin/dcwlt | dcwlt-common/dcwlt-common-pay/src/main/java/com/dcits/dcwlt/pay/api/domain/dcep/common/Amt.java | package com.dcits.dcwlt.pay.api.domain.dcep.common;
import com.alibaba.fastjson.annotation.JSONField;
import org.hibernate.validator.constraints.Length;
public class Amt {
@Length(max = 3)
private String ccy;
@Length(max = 18)
private String value;
@JSONField(name = "Ccy")
public String getCcy() {
return ccy;
}
public void setCcy(String ccy) {
this.ccy = ccy;
}
@JSONField(name = "value")
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString(){
return "Amt [ " +
"ccy=" + ccy +
", value=" + value +
"]";
}
}
|
dvirtz/conan-center-index | recipes/qwt/all/conanfile.py | <gh_stars>1-10
import os
from conans import ConanFile, tools
required_conan_version = ">=1.33.0"
class QwtConan(ConanFile):
name = "qwt"
license = "LGPL-2.1-or-later"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://qwt.sourceforge.io/"
topics = ("conan", "archive", "compression")
description = (
"The Qwt library contains GUI Components and utility classes which are primarily useful for programs "
"with a technical background. Beside a framework for 2D plots it provides scales, sliders, dials, compasses, "
"thermometers, wheels and knobs to control or display values, arrays, or ranges of type double."
)
settings = "os", "compiler", "build_type", "arch"
options = {
"shared": [True, False],
"fPIC": [True, False],
"plot": [True, False],
"widgets": [True, False],
"svg": [True, False],
"opengl": [True, False],
"mathml": [True, False],
"designer": [True, False]
}
default_options = {
"shared": False,
"fPIC": True,
"plot": True,
"widgets": True,
"opengl": True,
"designer": True,
"mathml": False,
"svg": False
}
generators = "qmake"
@property
def _source_subfolder(self):
return "source_subfolder"
def build_requirements(self):
if self.settings.os == "Windows" and self.settings.compiler == "Visual Studio":
self.build_requires("jom/1.1.3")
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
def requirements(self):
self.requires("qt/5.15.2")
def source(self):
tools.get(**self.conan_data["sources"][self.version], strip_root=True, destination=self._source_subfolder)
def _patch_qwt_config_files(self):
# qwtconfig.pri
qwtconfig_path = os.path.join(self.source_folder, self._source_subfolder, "qwtconfig.pri")
qwtconfig = tools.load(qwtconfig_path)
qwtconfig = "CONFIG += conan_basic_setup\ninclude(../conanbuildinfo.pri)\n" + qwtconfig
qwtconfig += "QWT_CONFIG {}= QwtDll\n".format("+" if self.options.shared else "-")
qwtconfig += "QWT_CONFIG {}= QwtPlot\n".format("+" if self.options.plot else "-")
qwtconfig += "QWT_CONFIG {}= QwtWidgets\n".format("+" if self.options.widgets else "-")
qwtconfig += "QWT_CONFIG {}= QwtSvg\n".format("+" if self.options.svg else "-")
qwtconfig += "QWT_CONFIG {}= QwtOpenGL\n".format("+" if self.options.opengl else "-")
qwtconfig += "QWT_CONFIG {}= QwtMathML\n".format("+" if self.options.mathml else "-")
qwtconfig += "QWT_CONFIG {}= QwtDesigner\n".format("+" if self.options.designer else "-")
tools.save(qwtconfig_path, qwtconfig)
# qwtbuild.pri
qwtbuild_path = os.path.join(self.source_folder, self._source_subfolder, "qwtbuild.pri")
qwtbuild = tools.load(qwtbuild_path)
# set build type
qwtbuild += "CONFIG -= debug_and_release\n"
qwtbuild += "CONFIG -= build_all\n"
qwtbuild += "CONFIG -= release\n"
qwtbuild += "CONFIG += {}\n".format("debug" if self.settings.build_type == "Debug" else "release")
if self.settings.build_type == "RelWithDebInfo":
qwtbuild += "CONFIG += force_debug_info\n"
tools.save(qwtbuild_path, qwtbuild)
def build(self):
self._patch_qwt_config_files()
if self.settings.compiler == "Visual Studio":
vcvars = tools.vcvars_command(self.settings)
self.run("{} && qmake {}".format(vcvars, self._source_subfolder), run_environment=True)
self.run("{} && jom".format(vcvars))
else:
self.run("qmake {}".format(self._source_subfolder), run_environment=True)
self.run("make -j {}".format(tools.cpu_count()))
def package(self):
self.copy("COPYING", src=os.path.join(self._source_subfolder), dst="licenses")
self.copy("*.h", dst="include", src=os.path.join(self._source_subfolder, "src"))
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.so*", dst="lib", keep_path=False, symlinks=True)
self.copy("*.dylib", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
postfix = ""
if self.settings.build_type == "Debug":
if self.settings.os == "Windows":
postfix += "d"
elif self.settings.os == "Macos":
postfix += "_debug"
self.cpp_info.libs = ["qwt" + postfix]
self.env_info.QT_PLUGIN_PATH.append(os.path.join(self.package_folder, 'bin'))
self.env_info.QT_PLUGIN_PATH.append(os.path.join(self.package_folder, 'lib'))
self.cpp_info.defines = ['HAVE_QWT', 'QWT_DLL'] if self.options.shared else ['HAVE_QWT']
|
ObjectivitySRC/PVGPlugins | Filters/SumBillboard/vtkSumBillboard.h | <reponame>ObjectivitySRC/PVGPlugins
#ifndef __vtkSumBillboard_h
#define __vtkSumBillboard_h
#include "vtkTableAlgorithm.h"
class VTK_EXPORT vtkSumBillboard : public vtkTableAlgorithm
{
public:
static vtkSumBillboard* New();
vtkTypeRevisionMacro(vtkSumBillboard, vtkTableAlgorithm);
void PrintSelf(ostream& os, vtkIndent indent);
// Description:
// Get/Set the text string to generate in the output.
vtkSetStringMacro(Format);
vtkGetStringMacro(Format);
// Description:
// When the component mode is UseSelected, this ivar indicated the selected
// component. The default value is 0.
vtkSetClampMacro(SelectedComponent,int,0,VTK_INT_MAX);
vtkGetMacro(SelectedComponent,int);
vtkSetMacro(NDecimals, int);
vtkGetMacro(NDecimals, int);
protected:
vtkSumBillboard();
~vtkSumBillboard();
virtual int RequestData(vtkInformation* request, vtkInformationVector** inputVector, vtkInformationVector* outputVector);
virtual int FillInputPortInformation(int port, vtkInformation* info);
char* Format;
int SelectedComponent;
int NDecimals;
private:
vtkSumBillboard(const vtkSumBillboard&); // Not implemented
void operator=(const vtkSumBillboard&); // Not implemented
};
#endif
|
eventstorm-projects/eventstorm | eventstorm-sql/src/test/java/eu/eventstorm/sql/builder/SelectBuilderFromSubSelectTest.java | package eu.eventstorm.sql.builder;
import static com.google.common.collect.ImmutableList.of;
import static eu.eventstorm.sql.dialect.Dialects.h2;
import static eu.eventstorm.sql.expression.AggregateFunctions.rowNumber;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
import eu.eventstorm.sql.Database;
import eu.eventstorm.sql.Module;
import eu.eventstorm.sql.RawSqlExecutor;
import eu.eventstorm.sql.desc.SqlColumn;
import eu.eventstorm.sql.desc.SqlSingleColumn;
import eu.eventstorm.sql.desc.SqlTable;
import eu.eventstorm.sql.expression.Expressions;
import eu.eventstorm.sql.expression.OverPartitions;
import eu.eventstorm.test.LoggerInstancePostProcessor;
@ExtendWith(LoggerInstancePostProcessor.class)
class SelectBuilderFromSubSelectTest {
private static final SqlTable TABLE_T1 = new SqlTable("T1", "a");
private static final SqlTable TABLE_T2 = new SqlTable("T2", "b");
private static final SqlTable TABLE_T3 = new SqlTable("T3", "c");
private static final SqlColumn COL_T1_01 = new SqlSingleColumn(TABLE_T1, "col_T1_01", false, true, true);
private static final SqlColumn COL_T1_02 = new SqlSingleColumn(TABLE_T1, "col_T1_02", false, true, true);
private static final SqlColumn COL_T1_03 = new SqlSingleColumn(TABLE_T1, "col_T1_03", false, true, true);
private static final SqlColumn COL_T2_01 = new SqlSingleColumn(TABLE_T2, "col_T2_01", false, true, true);
private Database database;
@BeforeEach
void before() {
database = Mockito.mock(Database.class);
Module module = new Module("test") {
};
when(database.dialect()).thenReturn(h2(database));
when(database.getModule(TABLE_T1)).thenReturn(module);
when(database.getModule(TABLE_T2)).thenReturn(module);
when(database.getModule(TABLE_T3)).thenReturn(module);
Mockito.when(database.rawSqlExecutor()).thenReturn(Mockito.mock(RawSqlExecutor.class));
}
@Test
void testSelect() {
SelectBuilder builder = new SelectBuilder(database, of(COL_T1_01, COL_T1_02, COL_T1_03));
builder.from(TABLE_T1);
SelectBuilderFromSubSelect sbuilder = new SelectBuilderFromSubSelect(database,
SubSelects.from(builder.build()));
assertEquals("SELECT * FROM (SELECT col_T1_01,col_T1_02,col_T1_03 FROM T1)", sbuilder.build().sql());
}
@Test
void testSelectWithAlias() {
SelectBuilder builder = new SelectBuilder(database, of(COL_T1_01, COL_T1_02, COL_T1_03.as("toto")));
builder.from(TABLE_T1);
SelectBuilderFromSubSelect sbuilder = new SelectBuilderFromSubSelect(database,
SubSelects.from(builder.build()));
assertEquals("SELECT * FROM (SELECT col_T1_01,col_T1_02,col_T1_03 toto FROM T1)", sbuilder.build().sql());
}
@Test
void testSelectWithOver() {
SelectBuilder builder = new SelectBuilder(database, of(COL_T1_01, COL_T1_02,rowNumber(OverPartitions.by(COL_T1_03, "maxRowNumber"))));
builder.from(TABLE_T1);
SelectBuilderFromSubSelect sbuilder = new SelectBuilderFromSubSelect(database, SubSelects.from(builder.build()));
assertEquals(
"SELECT * FROM (SELECT col_T1_01,col_T1_02,ROW_NUMBER() OVER (PARTITION BY col_T1_03) maxRowNumber FROM T1)",
sbuilder.build().sql());
sbuilder.where(Expressions.raw("maxRowNumber = 1"));
assertEquals(
"SELECT * FROM (SELECT col_T1_01,col_T1_02,ROW_NUMBER() OVER (PARTITION BY col_T1_03) maxRowNumber FROM T1) WHERE maxRowNumber = 1",
sbuilder.build().sql());
}
@Test
void testSelectWithInnerJoin() {
SelectBuilder builder = new SelectBuilder(database, of(COL_T1_01, COL_T1_02, COL_T1_03.as("alias_03"),rowNumber(OverPartitions.by(COL_T1_03, "maxRowNumber"))));
builder.from(TABLE_T1);
builder.innerJoin(TABLE_T2, COL_T2_01, COL_T1_01);
SelectBuilderFromSubSelect sbuilder = new SelectBuilderFromSubSelect(database, SubSelects.from(builder.build(), "toto"));
sbuilder.where(Expressions.raw("maxRowNumber = 1"));
assertEquals("SELECT * FROM (SELECT a.col_T1_01,a.col_T1_02,a.col_T1_03 alias_03,ROW_NUMBER() OVER (PARTITION BY a.col_T1_03) maxRowNumber FROM T1 a INNER JOIN T2 b ON b.col_T2_01=a.col_T1_01) WHERE maxRowNumber = 1",
sbuilder.build().sql());
}
} |
simonzcx/quinn-service-framework | quinn-service-biz/src/main/java/com/quinn/framework/configuration/JsonSerializerConfiguration.java | <filename>quinn-service-biz/src/main/java/com/quinn/framework/configuration/JsonSerializerConfiguration.java
package com.quinn.framework.configuration;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateDeserializer;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer;
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateSerializer;
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer;
import com.quinn.framework.api.CustomSerializable;
import com.quinn.framework.component.serializer.BaseResultSerializer;
import com.quinn.framework.component.serializer.BatchResultSerializer;
import com.quinn.framework.component.serializer.CustomSerializer;
import com.quinn.util.base.model.BaseResult;
import com.quinn.util.constant.DateConstant;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
/**
* JSON序列化配置类
*
* @author Qunhua.Liao
* @since 2020-04-02
*/
@Configuration
public class JsonSerializerConfiguration {
@Value("${com.quinn-service.date-pattern:" + DateConstant.DEFAULT_DATE_FORMAT + "}")
private String datePattern;
@Value("${com.quinn-service.datetime-pattern:" + DateConstant.DEFAULT_DATE_TIME_FORMAT + "}")
private String dateTimePattern;
@Bean
public LocalDateTimeSerializer localDateTimeSerializer() {
return new LocalDateTimeSerializer(DateTimeFormatter.ofPattern(dateTimePattern));
}
@Bean
public LocalDateSerializer localDateSerializer() {
return new LocalDateSerializer(DateTimeFormatter.ofPattern(datePattern));
}
@Bean
public LocalDateTimeDeserializer localDateTimeDeserializer() {
return new LocalDateTimeDeserializer(DateTimeFormatter.ofPattern(dateTimePattern));
}
@Bean
public LocalDateDeserializer localDateDeserializer() {
return new LocalDateDeserializer(DateTimeFormatter.ofPattern(datePattern));
}
@Bean
public CustomSerializer customSerializer() {
return new CustomSerializer();
}
@Bean
public BaseResultSerializer baseResultSerializer() {
return new BaseResultSerializer();
}
@Bean
public BatchResultSerializer batchResultSerializer() {
return new BatchResultSerializer();
}
@Bean
public Jackson2ObjectMapperBuilderCustomizer jackson2ObjectMapperBuilderCustomizer() {
return builder -> builder
.serializerByType(LocalDateTime.class, localDateTimeSerializer())
.serializerByType(LocalDate.class, localDateSerializer())
.deserializerByType(LocalDateTime.class, localDateTimeDeserializer())
.deserializerByType(LocalDate.class, localDateDeserializer())
.serializerByType(CustomSerializable.class, customSerializer())
.serializerByType(BaseResult.class, baseResultSerializer())
.serializerByType(BatchResultSerializer.class, batchResultSerializer())
;
}
} |
fyndiq/fyndiq-ui | packages/fyndiq-ui-test/stories/component-message.js | <filename>packages/fyndiq-ui-test/stories/component-message.js
import React from 'react'
import { storiesOf } from '@storybook/react'
import Button, { Wrapper as ButtonWrapper } from 'fyndiq-component-button'
import { Message, Wrapper, addMessage } from 'fyndiq-component-message'
import { Error, Truck, Warning, Checkmark } from 'fyndiq-icons'
storiesOf('Message', module)
.addWithInfo('default', () => <Message>Content</Message>)
.addWithInfo('color themes', () => (
<div>
<div>
<Message icon={<Truck />}>Type info</Message>
</div>
<div>
<Message icon={<Checkmark />} type="confirm">
Type confirm
</Message>
</div>
<div>
<Message icon={<Warning />} type="warn">
Type warn
</Message>
</div>
<div>
<Message icon={<Error />} type="error">
Type error
</Message>
</div>
</div>
))
.addWithInfo('addMessage utility', () => (
<React.Fragment>
<Wrapper />
<Button
onClick={() =>
addMessage(
<Message icon={<Checkmark />} type="confirm">
The message has been successfully shown
</Message>,
)
}
>
Show message
</Button>
</React.Fragment>
))
.addWithInfo('custom timeout', () => (
<React.Fragment>
<Wrapper />
<ButtonWrapper>
<Button
onClick={() =>
addMessage(
<Message icon={<Warning />} type="warn" timeout={1000}>
I don't stay very long
</Message>,
)
}
>
Show 1s message
</Button>
<Button
onClick={() =>
addMessage(
<Message timeout={10000}>You OK if I stay a bit long?</Message>,
)
}
>
Show 10s message
</Button>
</ButtonWrapper>
</React.Fragment>
))
|
marcofavorito/agents-aea | packages/fetchai/protocols/http/message.py | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2021 fetchai
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains http's message definition."""
import logging
from typing import Set, Tuple, cast
from aea.configurations.base import PublicId
from aea.exceptions import AEAEnforceError, enforce
from aea.protocols.base import Message
_default_logger = logging.getLogger("aea.packages.fetchai.protocols.http.message")
DEFAULT_BODY_SIZE = 4
class HttpMessage(Message):
"""A protocol for HTTP requests and responses."""
protocol_id = PublicId.from_str("fetchai/http:0.11.0")
class Performative(Message.Performative):
"""Performatives for the http protocol."""
REQUEST = "request"
RESPONSE = "response"
def __str__(self):
"""Get the string representation."""
return str(self.value)
_performatives = {"request", "response"}
__slots__: Tuple[str, ...] = tuple()
class _SlotsCls:
__slots__ = (
"body",
"dialogue_reference",
"headers",
"message_id",
"method",
"performative",
"status_code",
"status_text",
"target",
"url",
"version",
)
def __init__(
self,
performative: Performative,
dialogue_reference: Tuple[str, str] = ("", ""),
message_id: int = 1,
target: int = 0,
**kwargs,
):
"""
Initialise an instance of HttpMessage.
:param message_id: the message id.
:param dialogue_reference: the dialogue reference.
:param target: the message target.
:param performative: the message performative.
"""
super().__init__(
dialogue_reference=dialogue_reference,
message_id=message_id,
target=target,
performative=HttpMessage.Performative(performative),
**kwargs,
)
@property
def valid_performatives(self) -> Set[str]:
"""Get valid performatives."""
return self._performatives
@property
def dialogue_reference(self) -> Tuple[str, str]:
"""Get the dialogue_reference of the message."""
enforce(self.is_set("dialogue_reference"), "dialogue_reference is not set.")
return cast(Tuple[str, str], self.get("dialogue_reference"))
@property
def message_id(self) -> int:
"""Get the message_id of the message."""
enforce(self.is_set("message_id"), "message_id is not set.")
return cast(int, self.get("message_id"))
@property
def performative(self) -> Performative: # type: ignore # noqa: F821
"""Get the performative of the message."""
enforce(self.is_set("performative"), "performative is not set.")
return cast(HttpMessage.Performative, self.get("performative"))
@property
def target(self) -> int:
"""Get the target of the message."""
enforce(self.is_set("target"), "target is not set.")
return cast(int, self.get("target"))
@property
def body(self) -> bytes:
"""Get the 'body' content from the message."""
enforce(self.is_set("body"), "'body' content is not set.")
return cast(bytes, self.get("body"))
@property
def headers(self) -> str:
"""Get the 'headers' content from the message."""
enforce(self.is_set("headers"), "'headers' content is not set.")
return cast(str, self.get("headers"))
@property
def method(self) -> str:
"""Get the 'method' content from the message."""
enforce(self.is_set("method"), "'method' content is not set.")
return cast(str, self.get("method"))
@property
def status_code(self) -> int:
"""Get the 'status_code' content from the message."""
enforce(self.is_set("status_code"), "'status_code' content is not set.")
return cast(int, self.get("status_code"))
@property
def status_text(self) -> str:
"""Get the 'status_text' content from the message."""
enforce(self.is_set("status_text"), "'status_text' content is not set.")
return cast(str, self.get("status_text"))
@property
def url(self) -> str:
"""Get the 'url' content from the message."""
enforce(self.is_set("url"), "'url' content is not set.")
return cast(str, self.get("url"))
@property
def version(self) -> str:
"""Get the 'version' content from the message."""
enforce(self.is_set("version"), "'version' content is not set.")
return cast(str, self.get("version"))
def _is_consistent(self) -> bool:
"""Check that the message follows the http protocol."""
try:
enforce(
type(self.dialogue_reference) == tuple,
"Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.".format(
type(self.dialogue_reference)
),
)
enforce(
type(self.dialogue_reference[0]) == str,
"Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.".format(
type(self.dialogue_reference[0])
),
)
enforce(
type(self.dialogue_reference[1]) == str,
"Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.".format(
type(self.dialogue_reference[1])
),
)
enforce(
type(self.message_id) == int,
"Invalid type for 'message_id'. Expected 'int'. Found '{}'.".format(
type(self.message_id)
),
)
enforce(
type(self.target) == int,
"Invalid type for 'target'. Expected 'int'. Found '{}'.".format(
type(self.target)
),
)
# Light Protocol Rule 2
# Check correct performative
enforce(
type(self.performative) == HttpMessage.Performative,
"Invalid 'performative'. Expected either of '{}'. Found '{}'.".format(
self.valid_performatives, self.performative
),
)
# Check correct contents
actual_nb_of_contents = len(self._body) - DEFAULT_BODY_SIZE
expected_nb_of_contents = 0
if self.performative == HttpMessage.Performative.REQUEST:
expected_nb_of_contents = 5
enforce(
type(self.method) == str,
"Invalid type for content 'method'. Expected 'str'. Found '{}'.".format(
type(self.method)
),
)
enforce(
type(self.url) == str,
"Invalid type for content 'url'. Expected 'str'. Found '{}'.".format(
type(self.url)
),
)
enforce(
type(self.version) == str,
"Invalid type for content 'version'. Expected 'str'. Found '{}'.".format(
type(self.version)
),
)
enforce(
type(self.headers) == str,
"Invalid type for content 'headers'. Expected 'str'. Found '{}'.".format(
type(self.headers)
),
)
enforce(
type(self.body) == bytes,
"Invalid type for content 'body'. Expected 'bytes'. Found '{}'.".format(
type(self.body)
),
)
elif self.performative == HttpMessage.Performative.RESPONSE:
expected_nb_of_contents = 5
enforce(
type(self.version) == str,
"Invalid type for content 'version'. Expected 'str'. Found '{}'.".format(
type(self.version)
),
)
enforce(
type(self.status_code) == int,
"Invalid type for content 'status_code'. Expected 'int'. Found '{}'.".format(
type(self.status_code)
),
)
enforce(
type(self.status_text) == str,
"Invalid type for content 'status_text'. Expected 'str'. Found '{}'.".format(
type(self.status_text)
),
)
enforce(
type(self.headers) == str,
"Invalid type for content 'headers'. Expected 'str'. Found '{}'.".format(
type(self.headers)
),
)
enforce(
type(self.body) == bytes,
"Invalid type for content 'body'. Expected 'bytes'. Found '{}'.".format(
type(self.body)
),
)
# Check correct content count
enforce(
expected_nb_of_contents == actual_nb_of_contents,
"Incorrect number of contents. Expected {}. Found {}".format(
expected_nb_of_contents, actual_nb_of_contents
),
)
# Light Protocol Rule 3
if self.message_id == 1:
enforce(
self.target == 0,
"Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.".format(
self.target
),
)
else:
enforce(
0 < self.target < self.message_id,
"Invalid 'target'. Expected an integer between 1 and {} inclusive. Found {}.".format(
self.message_id - 1, self.target,
),
)
except (AEAEnforceError, ValueError, KeyError) as e:
_default_logger.error(str(e))
return False
return True
|
bhkjersten/autopsy | Core/src/org/sleuthkit/autopsy/coreutils/ModuleSettings.java | /*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.coreutils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.logging.Level;
/**
* This class contains the framework to read, add, update, and remove from the
* property files located at %USERDIR%/Config/x.properties
*/
public class ModuleSettings {
// The directory where the properties file is located
private final static String moduleDirPath = PlatformUtil.getUserConfigDirectory();
public static final String DEFAULT_CONTEXT = "GeneralContext"; //NON-NLS
public static final String MAIN_SETTINGS = "Case"; //NON-NLS
public static final String CURRENT_CASE_TYPE = "Current_Case_Type"; //NON-NLS
/**
* the constructor
*/
private ModuleSettings() {
}
/**
* Makes a new config file of the specified name. Do not include the
* extension.
*
* @param moduleName - The name of the config file to make
*
* @return True if successfully created, false if already exists or an error
* is thrown.
*/
public static boolean makeConfigFile(String moduleName) {
if (!configExists(moduleName)) {
File propPath = new File(moduleDirPath + File.separator + moduleName + ".properties");
File parent = new File(propPath.getParent());
if (!parent.exists()) {
parent.mkdirs();
}
Properties props = new Properties();
try {
propPath.createNewFile();
FileOutputStream fos = new FileOutputStream(propPath);
props.store(fos, "");
fos.close();
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Was not able to create a new properties file.", e); //NON-NLS
return false;
}
return true;
}
return false;
}
/**
* Determines if a given properties file exists or not.
*
* @param moduleName - The name of the config file to evaluate
*
* @return true if the config exists, false otherwise.
*/
public static boolean configExists(String moduleName) {
File f = new File(moduleDirPath + File.separator + moduleName + ".properties");
return f.exists();
}
public static boolean settingExists(String moduleName, String settingName) {
if (!configExists(moduleName)) {
return false;
}
try {
Properties props = fetchProperties(moduleName);
return (props.getProperty(settingName) != null);
} catch (IOException e) {
return false;
}
}
/**
* Returns the path of the given properties file.
*
* @param moduleName - The name of the config file to evaluate
*
* @return The path of the given config file. Returns null if the config
* file doesn't exist.
*/
private static String getPropertyPath(String moduleName) {
if (configExists(moduleName)) {
return moduleDirPath + File.separator + moduleName + ".properties"; //NON-NLS
}
return null;
}
/**
* Returns the given properties file's setting as specific by settingName.
*
* @param moduleName - The name of the config file to read from.
* @param settingName - The setting name to retrieve.
*
* @return - the value associated with the setting.
*
* @throws IOException
*/
public static String getConfigSetting(String moduleName, String settingName) {
if (!configExists(moduleName)) {
makeConfigFile(moduleName);
Logger.getLogger(ModuleSettings.class.getName()).log(Level.INFO, "File did not exist. Created file [" + moduleName + ".properties]"); //NON-NLS NON-NLS
}
try {
Properties props = fetchProperties(moduleName);
return props.getProperty(settingName);
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Could not read config file [" + moduleName + "]", e); //NON-NLS
return null;
}
}
/**
* Returns the given properties file's map of settings.
*
* @param moduleName - the name of the config file to read from.
*
* @return - the map of all key:value pairs representing the settings of the
* config.
*
* @throws IOException
*/
public static Map< String, String> getConfigSettings(String moduleName) {
if (!configExists(moduleName)) {
makeConfigFile(moduleName);
Logger.getLogger(ModuleSettings.class.getName()).log(Level.INFO, "File did not exist. Created file [" + moduleName + ".properties]"); //NON-NLS NON-NLS
}
try {
Properties props = fetchProperties(moduleName);
Set<String> keys = props.stringPropertyNames();
Map<String, String> map = new HashMap<String, String>();
for (String s : keys) {
map.put(s, props.getProperty(s));
}
return map;
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Could not read config file [" + moduleName + "]", e); //NON-NLS
return null;
}
}
/**
* Sets the given properties file to the given setting map.
*
* @param moduleName - The name of the module to be written to.
* @param settings - The mapping of all key:value pairs of settings to add
* to the config.
*/
public static synchronized void setConfigSettings(String moduleName, Map<String, String> settings) {
if (!configExists(moduleName)) {
makeConfigFile(moduleName);
Logger.getLogger(ModuleSettings.class.getName()).log(Level.INFO, "File did not exist. Created file [" + moduleName + ".properties]"); //NON-NLS NON-NLS
}
try {
Properties props = fetchProperties(moduleName);
for (Map.Entry<String, String> kvp : settings.entrySet()) {
props.setProperty(kvp.getKey(), kvp.getValue());
}
File path = new File(getPropertyPath(moduleName));
FileOutputStream fos = new FileOutputStream(path);
props.store(fos, "Changed config settings(batch)"); //NON-NLS
fos.close();
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Property file exists for [" + moduleName + "] at [" + getPropertyPath(moduleName) + "] but could not be loaded.", e); //NON-NLS NON-NLS NON-NLS
}
}
/**
* Sets the given properties file to the given settings.
*
* @param moduleName - The name of the module to be written to.
* @param settingName - The name of the setting to be modified.
* @param settingVal - the value to set the setting to.
*/
public static synchronized void setConfigSetting(String moduleName, String settingName, String settingVal) {
if (!configExists(moduleName)) {
makeConfigFile(moduleName);
Logger.getLogger(ModuleSettings.class.getName()).log(Level.INFO, "File did not exist. Created file [" + moduleName + ".properties]"); //NON-NLS NON-NLS
}
try {
Properties props = fetchProperties(moduleName);
props.setProperty(settingName, settingVal);
File path = new File(getPropertyPath(moduleName));
FileOutputStream fos = new FileOutputStream(path);
props.store(fos, "Changed config settings(single)"); //NON-NLS
fos.close();
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Property file exists for [" + moduleName + "] at [" + getPropertyPath(moduleName) + "] but could not be loaded.", e); //NON-NLS NON-NLS NON-NLS
}
}
/**
* Removes the given key from the given properties file.
*
* @param moduleName - The name of the properties file to be modified.
* @param key - the name of the key to remove.
*/
public static synchronized void removeProperty(String moduleName, String key) {
try {
if (getConfigSetting(moduleName, key) != null) {
Properties props = fetchProperties(moduleName);
props.remove(key);
File path = new File(getPropertyPath(moduleName));
FileOutputStream fos = new FileOutputStream(path);
props.store(fos, "Removed " + key); //NON-NLS
fos.close();
}
} catch (IOException e) {
Logger.getLogger(ModuleSettings.class.getName()).log(Level.WARNING, "Could not remove property from file, file not found", e); //NON-NLS
}
}
/**
* Returns the properties file as specified by moduleName.
*
* @param moduleName
*
* @return Properties file as specified by moduleName.
*
* @throws IOException
*/
private static Properties fetchProperties(String moduleName) throws IOException {
InputStream inputStream = new FileInputStream(getPropertyPath(moduleName));
Properties props = new Properties();
props.load(inputStream);
inputStream.close();
return props;
}
/**
* Gets the property file as specified.
*
* @param moduleName
*
* @return A new file handle, returns null if the file does not exist.
*/
public static File getPropertyFile(String moduleName) {
String path = getPropertyPath(moduleName);
if (path == null) {
return null;
} else {
return new File(getPropertyPath(moduleName));
}
}
}
|
gracielundell/scienceBlog | tmp/babel-output_path-eHmlLazt.tmp/modules/liquid-fire/is-browser.js | export default isBrowser;
function isBrowser() {
return typeof window !== 'undefined' && window && typeof document !== 'undefined' && document;
} |
knightjdr/prohits-viz-analysis | pkg/stats/mean_test.go | <reponame>knightjdr/prohits-viz-analysis<gh_stars>1-10
package stats_test
import (
. "github.com/knightjdr/prohits-viz-analysis/pkg/stats"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Mean string", func() {
It("Should return mean of a slice of strings", func() {
slice := []string{"7", "6", "3", "9", "11"}
Expect(MeanString(slice)).To(Equal(7.2))
})
It("Should return zero for empty slice", func() {
slice := []string{}
Expect(MeanString(slice)).To(Equal(float64(0)))
})
It("Should return zero for slice containing a value that cannot be parsed", func() {
slice := []string{"7", "6", "a", "9", "11"}
Expect(MeanString(slice)).To(Equal(float64(0)))
})
})
|
leopardoooo/cambodia | boss-report/src/main/java/com/ycsoft/report/dao/config/RepTOsdSqlDao.java | <reponame>leopardoooo/cambodia
package com.ycsoft.report.dao.config;
import java.sql.PreparedStatement;
import org.springframework.stereotype.Component;
import com.ycsoft.beans.config.TOsdSql;
import com.ycsoft.commons.exception.ReportException;
import com.ycsoft.daos.abstracts.BaseEntityDao;
@Component
public class RepTOsdSqlDao extends BaseEntityDao<TOsdSql> {
public RepTOsdSqlDao(){}
public void saveRepTOsdSql(TOsdSql o) throws ReportException{
PreparedStatement pst = null;
try {
pst =this.getConnection().prepareStatement(
"INSERT INTO T_OSD_SQL(query_id,title,sql_content,status,optr_id,create_time) VALUES(?,?,?,?,?,sysdate)");
pst.setString(1, o.getQuery_id());
pst.setString(2, o.getTitle());
pst.setString(3, o.getSql_content());
pst.setString(4, o.getStatus());
pst.setString(5, o.getOptr_id());
pst.addBatch();
pst.executeBatch();
}catch(Exception e){
throw new ReportException(e);
}finally{
try{
if(pst!=null)
pst.close();
}catch(Exception e){}
}
}
}
|
5GinFIRE/eu.5ginfire.nbi.osm5java | src/main/java/osm5/ns/yang/nfvo/mano/types/rev170208/guest/epa/guest/epa/numa/policy/numa/aware/numa/node/policy/node/om/numa/type/PairedThreads.java | package osm5.ns.yang.nfvo.mano.types.rev170208.guest.epa.guest.epa.numa.policy.numa.aware.numa.node.policy.node.om.numa.type;
import javax.annotation.Nullable;
import org.opendaylight.yangtools.yang.binding.Augmentable;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.common.QName;
import osm5.ns.yang.nfvo.mano.types.rev170208.$YangModuleInfoImpl;
import osm5.ns.yang.nfvo.mano.types.rev170208.guest.epa.guest.epa.numa.policy.numa.aware.numa.node.policy.node.OmNumaType;
/**
*
* <p>
* This class represents the following YANG schema fragment defined in module <b>mano-types</b>
* <pre>
* case paired-threads {
* container paired-threads {
* leaf num-paired-threads {
* type uint8;
* }
* list paired-thread-ids {
* max-elements 16;
* key thread-a;
* leaf thread-a {
* type uint8;
* }
* leaf thread-b {
* type uint8;
* }
* }
* }
* }
* </pre>The schema path to identify an instance is
* <i>mano-types/guest-epa/guest-epa/numa-policy/numa-aware/numa-node-policy/node/om-numa-type/paired-threads</i>
*
*/
public interface PairedThreads
extends
DataObject,
Augmentable<PairedThreads>,
OmNumaType
{
public static final QName QNAME = $YangModuleInfoImpl.qnameOf("paired-threads");
/**
* @return <code>org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.nfvo.mano.types.rev170208.guest.epa.guest.epa.numa.policy.numa.aware.numa.node.policy.node.om.numa.type.paired.threads.PairedThreads</code> <code>pairedThreads</code>, or <code>null</code> if not present
*/
@Nullable
osm5.ns.yang.nfvo.mano.types.rev170208.guest.epa.guest.epa.numa.policy.numa.aware.numa.node.policy.node.om.numa.type.paired.threads.PairedThreads getPairedThreads();
}
|
syin2/openthread | third_party/silabs/gecko_sdk_suite/v1.0/protocol/thread_2.2/app/util/bootload/tftp/tftp.c | // File: tftp.c
//
// Description: TFTP Bootloader headers and defines
//
// Copyright 2013 by Silicon Laboratories. All rights reserved. *80*
#include PLATFORM_HEADER
#include "stack/include/ember.h"
#include "stack/core/log.h"
#include "hal/hal.h"
#include "plugin/serial/serial.h"
#include "app/util/bootload/tftp/tftp.h"
#include <stdlib.h>
uint16_t emTftpRemoteTid = 0xFFFF;
uint16_t emTftpLocalTid = 0xFFFF;
uint16_t emTftpBlockNumber = 0;
EmberIpv6Address emTftpRemoteIp = {{0}};
uint16_t emTftpBlockSize = TFTP_MAX_BLOCK_SIZE;
bool emTftpScripting = false;
void emReallyResetTftp(void)
{
emTftpLocalTid = 0xFFFF;
emTftpRemoteTid = 0xFFFF;
emTftpBlockNumber = 0;
emTftpBlockSize = TFTP_MAX_BLOCK_SIZE;
}
void emTftpListen(bool randomizeTid)
{
if (randomizeTid) {
emTftpLocalTid = rand() % 0xFFFF;
}
uint8_t maxTries = 15;
uint8_t i = 0;
EmberIpv6Address localAddress = {{0}};
assert(emberGetLocalIpAddress(0, &localAddress));
if (! emTftpScripting) {
while (emberUdpListen(emTftpLocalTid, localAddress.bytes) != EMBER_SUCCESS) {
assert(randomizeTid
&& ++i < maxTries);
emTftpLocalTid = rand() % 0xFFFF;
}
emLogLine(BOOTLOAD, "TFTP Listening on port %u", emTftpLocalTid);
}
emTftpListenStatusHandler(emTftpLocalTid, &localAddress);
}
void quitCommand(void)
{
emberSerialPrintfLine(APP_SERIAL, "Bye");
exit(0);
}
void chooseInterfaceCommand(void)
{
#ifdef UNIX_HOST
emTftpReallyChooseInterface((emberCommandArgumentCount() == 1
? emberUnsignedCommandArgument(0)
: 0xFF),
NULL);
#endif
}
|
reki2000/rekicc | test/062-void-return/test.c | void sub() {
return;
}
int main() {
sub();
}
|
Leafly-com/ad-hok-js | src/__tests__/addPropTypes.js | <filename>src/__tests__/addPropTypes.js
/* eslint-disable no-console */
import React from 'react'
import {render} from 'react-testing-library'
import 'jest-dom/extend-expect'
import PropTypes from 'prop-types'
import {addPropTypes, addProps, flowMax} from '..'
const Comp = flowMax(addPropTypes({a: PropTypes.number.isRequired}), ({a}) => (
<div>
<div data-testid="a">{a}</div>
</div>
))
const Comp2 = flowMax(
addProps({b: 3}),
addPropTypes({c: PropTypes.number.isRequired}),
({c}) => (
<div>
<div data-testid="c">{c}</div>
</div>
)
)
describe('addPropTypes', () => {
beforeAll(() => {
jest.spyOn(console, 'error').mockImplementation(() => {})
})
afterAll(() => {
console.error.mockRestore()
})
afterEach(() => {
console.error.mockClear()
})
test('non-initial works', () => {
render(<Comp2 c={3} />)
expect(console.error).not.toHaveBeenCalled()
})
test('non-initial works with wrong prop types', () => {
render(<Comp2 c />)
expect(console.error).toHaveBeenCalled()
})
test('works', () => {
render(<Comp a={3} />)
expect(console.error).not.toHaveBeenCalled()
})
test('works with wrong prop types', () => {
render(<Comp a />)
expect(console.error).toHaveBeenCalled()
})
})
|
babbel/aws-record-extensions | lib/aws-record-extensions/nested_documents/object_attr.rb | module AwsRecordExtensions
module NestedDocuments
module ObjectAttr
extend ActiveSupport::Concern
class_methods do
def object_attr(
name,
type_cast:,
# By default assume that type from type_cast has
# a pair of methods from_h/to_h:
serializer: AwsRecordExtensions::HashConversion::ObjectSerializers::SelfHashSerializer.new(type_cast),
**opts
)
marshaller = ObjectMarshaller.new(
type_cast: type_cast,
serializer: serializer
)
attr(name, marshaller, { dynamodb_type: 'M' }.merge(opts))
AwsRecordExtensions::HashConversion::ObjectSerializers::TypecastOnSetAttribute.extend_attr_setter_with_typecast(name, marshaller)
define_method("#{name}?") { send(name).present? }
end
end
end
class ObjectMarshaller
def initialize(type_cast:, serializer:)
@type_cast = type_cast
@serializer = serializer
end
def type_cast(raw_value)
if raw_value.nil?
nil
elsif raw_value.is_a?(@type_cast)
raw_value
else
@serializer.from_primitive_type(raw_value)
end
end
def serialize(raw_value)
obj = type_cast(raw_value)
obj && @serializer.to_primitive_type(obj)
end
end
end
end
|
gonayl/Chaste | heart/src/odes/ionicmodels/noncardiac/CorriasBuistSMCModified.hpp | <gh_stars>0
/*
Copyright (c) 2005-2017, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
This file is part of Chaste.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CorriasBuistSMCModified_HPP_
#define CorriasBuistSMCModified_HPP_
#include "ChasteSerialization.hpp"
#include <boost/serialization/base_object.hpp>
#include "AbstractCardiacCell.hpp"
#include "AbstractStimulusFunction.hpp"
/**
* This class is a modified version of the model of a gastric
* Smooth Muscle Cell.
*
* Reference publication is:
*
* <NAME>, Buist ML.
* "A quantitative model of gastric smooth muscle cellular activation."
* Ann Biomed Eng. 2007 Sep;35(9):1595-607. Epub 2007 May 8.
*
* Modifications include:
* - ability to include/exclude built-in fake ICC stimulus
* - ability to set K+ channels-affecting CO concentrations
*/
class CorriasBuistSMCModified : public AbstractCardiacCell
{
friend class boost::serialization::access;
/**
* Boost Serialization method for archiving/checkpointing.
* Archives the object and its member variables.
*
* @param archive The boost archive.
* @param version The current version of this class.
*/
template<class Archive>
void serialize(Archive & archive, const unsigned int version)
{
archive & boost::serialization::base_object<AbstractCardiacCell >(*this);
}
private:
/**
* Scale factor for CO-affected currents
* Note that this the number that multiply the currents, hence it is not [CO],
* but a function of [CO] (for example, 2.8*[CO] - 0.1)
*/
double mScaleFactorCarbonMonoxide;
/**
* True if the fake built-in ICC stimulus is present
*/
bool mFakeIccStimulusPresent;
double Cm;/**< membrane capacitance, pF*/
double Asurf_in_cm_square;/**< Surface area in cm^2*/
double Asurf;/**< surface area (mm^2)*/
double VolCell;/**< cell volume (mm^3)*/
double hCa;/**< conc for half inactivation of fCa */
double sCa;/**< lope factor for inactivation of fCa */
/* concentrations */
double Ki; /**< intra K conc (mM)*/
double Nai; /**< intra Na conc (mM)*/
double ACh; /**< acetylcholine conc (mM)*/
double CaiRest; /**< baseline Ca conc (mM)*/
/* maximum conductances*/
double gLVA_max; /**< max conductance of ILVA*/ // (0.18 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gCaL_max; /**< max conductance of ICaL*/ // (65.0 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gBK_max; /**< max conductance of IBK)*/ // (45.7 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gKb_max; /**< max conductance of IKb*/ // (0.0144 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gKA_max; /**< max conductance of IKA*/ // (9.0 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gKr_max; /**< max conductance of IKr*/ // (35.0 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gNa_max; /**< max conductance of INa*/ // (3.0 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gnsCC_max; /**< max conductance of InsCC*/ // (50.0 nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double gcouple; /**< coupling conductance bewteen fake ICC and SMC*/ // 1.3 nS * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
double JCaExt_max; /**< max flux of CaSR (mM/ms)*/
/* Temperature corrections */
double Q10Ca; /**< (dim)*/
double Q10K; /**< (dim)*/ //1.365
double Q10Na; /**< (dim)*/
double Texp; /**< (degK)*/
double T_correct_Ca ;/**< temperature correction for Ca (dim)*/
double T_correct_K ; /**< temperature correction for K (dim)*/
double T_correct_Na;/**< temperature correction for Na (dim)*/
double T_correct_gBK; /**< temperature correction for gBK*/ // (nS) * 1e-6 (mS/nS) / Asurf (mm2) = mS/mm2
/* Nernst potentials */
double EK; /**< Nernst potential for K (mV)*/
double ENa ; /**< Nernst potential for Na (mV)*/
double EnsCC; /**< Nernst potential for nsCC (mV)*/
double Ca_o; /**< mM */
double K_o; /**< mM */
double Na_o; /**< mM */
/* Nernst parameters */
double R; /**< pJ/nmol/K*/
double T; /**< degK*/
double F; /**< nC/nmol*/
double FoRT; /**< 1/mV*/
double RToF; /**< mV*/
public:
/**
* Constructor
*
* @param pSolver is a pointer to the ODE solver
* @param pIntracellularStimulus is a pointer to the intracellular stimulus
*/
CorriasBuistSMCModified(boost::shared_ptr<AbstractIvpOdeSolver> pSolver, boost::shared_ptr<AbstractStimulusFunction> pIntracellularStimulus);
/**
* Destructor
*/
~CorriasBuistSMCModified();
/**
* Now empty
*/
void VerifyStateVariables();
/**
* Calculates the ionic current
*
* @param pStateVariables the state variables of this model
* @return the total ionic current
*/
double GetIIonic(const std::vector<double>* pStateVariables=NULL);
/**
* Compute the RHS of the FitHugh-Nagumo system of ODEs
*
* @param time the current time, in milliseconds
* @param rY current values of the state variables
* @param rDY to be filled in with derivatives
*/
void EvaluateYDerivatives(double time, const std::vector<double>& rY, std::vector<double>& rDY);
/**
* Set whether we want the fake ICC stimulus or not.
* It changes the member variable mFakeIccStimulusPresent (which is true by default).
*
* @param present - true if we want the fake ICC stimulus, false otherwise
*/
void SetFakeIccStimulusPresent(bool present);
/**
* @return true if the fake ICC stimulus is present
*/
bool GetFakeIccStimulusPresent();
/**
* @return the Carbon Monoxide scale for
*/
double SetCarbonMonoxideScaleFactor();
/**
* Set the carbon monoxide scale factor.
* This will multiply the following currents: I_kr, I_Ka, Ibk
*
* @param scaleFactor the scale factor that multiply the currents.
*/
void SetCarbonMonoxideScaleFactor(double scaleFactor);
/**
* @return the Carbon Monoxide scale factor
*/
double GetCarbonMonoxideScaleFactor();
};
// Needs to be included last
#include "SerializationExportWrapper.hpp"
CHASTE_CLASS_EXPORT(CorriasBuistSMCModified)
namespace boost
{
namespace serialization
{
template<class Archive>
inline void save_construct_data(
Archive & ar, const CorriasBuistSMCModified * t, const unsigned int fileVersion)
{
const boost::shared_ptr<AbstractIvpOdeSolver> p_solver = t->GetSolver();
const boost::shared_ptr<AbstractStimulusFunction> p_stimulus = t->GetStimulusFunction();
ar << p_solver;
ar << p_stimulus;
}
template<class Archive>
inline void load_construct_data(
Archive & ar, CorriasBuistSMCModified * t, const unsigned int fileVersion)
{
boost::shared_ptr<AbstractIvpOdeSolver> p_solver;
boost::shared_ptr<AbstractStimulusFunction> p_stimulus;
ar >> p_solver;
ar >> p_stimulus;
::new(t)CorriasBuistSMCModified(p_solver, p_stimulus);
}
}
}
#endif // CorriasBuistSMCModified_HPP_
|
vietruyn/ShareFile | app/src/main/java/com/zjk/wifiproject/util/FileUtils.java | package com.zjk.wifiproject.util;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import com.orhanobut.logger.Logger;
import com.zjk.wifiproject.entity.FileStyle;
import com.zjk.wifiproject.entity.WFile;
import com.zjk.wifiproject.music.MusicEntity;
import com.zjk.wifiproject.picture.PictureEntity;
import com.zjk.wifiproject.picture.PictureFolderEntity;
import com.zjk.wifiproject.vedio.VedioEntity;
import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
/**
* @fileName FileUtils.java
* @package szu.wifichat.android.util
* @description 文件工具类
*/
public class FileUtils {
/**
* 判断SD
*
* @return
*/
public static boolean isSdcardExist() {
return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED);
}
/**
* 判断内部存储
*
* @return
*/
public static File isSdcard0Exist() {
return Environment.getExternalStorageDirectory();
}
/**
* 创建根目录
*
* @param path
* 目录路径
*/
public static void createDirFile(String path) {
File dir = new File(path);
if (!dir.exists()) {
dir.mkdirs();
}
}
/**
* 创建文件
*
* @param path
* 文件路径
* @return 创建的文件
*/
public static File createNewFile(String path) {
File file = new File(path);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
return null;
}
}
return file;
}
/**
* 删除文件夹
*
* @param folderPath
* 文件夹的路径
*/
public static void delFolder(String folderPath) {
delAllFile(folderPath);
String filePath = folderPath;
filePath = filePath.toString();
java.io.File myFilePath = new java.io.File(filePath);
myFilePath.delete();
}
/**
* 删除文件
*
* @param path
* 文件的路径
*/
public static void delAllFile(String path) {
File file = new File(path);
if (!file.exists()) {
return;
}
if (!file.isDirectory()) {
return;
}
String[] tempList = file.list();
File temp = null;
int mLength = tempList.length;
for (int i = 0; i < mLength; i++) {
if (path.endsWith(File.separator)) {
temp = new File(path + tempList[i]);
} else {
temp = new File(path + File.separator + tempList[i]);
}
if (temp.isFile()) {
temp.delete();
}
if (temp.isDirectory()) {
delAllFile(path + "/" + tempList[i]);
delFolder(path + "/" + tempList[i]);
}
}
}
/**
* 获取文件的Uri
*
* @param path
* 文件的路径
* @return
*/
public static Uri getUriFromFile(String path) {
File file = new File(path);
return Uri.fromFile(file);
}
/**
* 换算文件大小
*
* @param size
* @return
*/
public static String formatFileSize(long size) {
DecimalFormat df = new DecimalFormat("#.00");
String fileSizeString = "未知大小";
if (size < 1024) {
fileSizeString = df.format((double) size) + "B";
} else if (size < 1048576) {
fileSizeString = df.format((double) size / 1024) + "K";
} else if (size < 1073741824) {
fileSizeString = df.format((double) size / 1048576) + "M";
} else {
fileSizeString = df.format((double) size / 1073741824) + "G";
}
return fileSizeString;
}
/**
* 通过路径获得文件名字
*
* @param fullpath
* @return
*/
public static String getPathByFullPath(String fullpath) {
return fullpath.substring(0, fullpath.lastIndexOf(File.separator));
}
/**
* 通过路径获得文件名字
*
* @param path
* @return
*/
public static String getNameByPath(String path) {
return path.substring(path.lastIndexOf(File.separator) + 1);
}
/**
* 通过判断文件是否存在
*
* @param path
* @return
*/
public static boolean isFileExists(String path) {
try {
File file = new File(path);
if (!file.exists()) {
return false;
}
} catch (Exception e) {
// TODO: handle exception
return false;
}
return true;
}
/**
* 获得SD卡路径
*
* @param
* @return String
*/
public static String getSDPath() {
File sdDir = null;
boolean sdCardExist = Environment.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED); // 判断sd卡是否存在
if (sdCardExist) {
sdDir = Environment.getExternalStorageDirectory();// 获取跟目录
return sdDir.getAbsolutePath();
}else{
return Environment.getDataDirectory().getAbsolutePath();
}
}
/**
* 获取所有图片的list
*
* @version 1.0
* @author zyh
* @param context
* @return
*/
public static List<PictureFolderEntity> getPictureFolderList(Context context) {
List<PictureFolderEntity> list = new ArrayList<PictureFolderEntity>();
/**
* 临时的辅助类,用于防止同一个文件夹的多次扫描
* string:文件夹路径,integer:list里文件夹的序号
*/
HashMap<String, Integer> tmpDir = new HashMap<String, Integer>();
ContentResolver mContentResolver = context.getContentResolver();
Cursor mCursor = mContentResolver.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
new String[]{MediaStore.Images.ImageColumns.DATA}, "", null,
MediaStore.MediaColumns.DATE_ADDED + " DESC");
if (mCursor.moveToFirst()) {
int _date = mCursor.getColumnIndex(MediaStore.Images.Media.DATA);
do {
// 获取图片的路径
String path = mCursor.getString(_date);
// 获取该图片的父路径名
File parentFile = new File(path).getParentFile();
if (parentFile == null) {
continue;
}
PictureFolderEntity pictureFoldery = null;
String dirPath = parentFile.getAbsolutePath();
if (!tmpDir.containsKey(dirPath)) { //文件夹不在列表里就创建一个文件夹
// 初始化PictureFolderEntity
pictureFoldery = new PictureFolderEntity();
pictureFoldery.setDir(dirPath);
pictureFoldery.setFirstImagePath(path);
list.add(pictureFoldery);
// Log.d("zyh", dirPath + "," + path);
tmpDir.put(dirPath, list.indexOf(pictureFoldery));
} else {
pictureFoldery = list.get(tmpDir.get(dirPath)); //从list得到图片文件夹
}
pictureFoldery.images.add(new PictureEntity(path));//将图片添加到应该在的文件夹里
} while (mCursor.moveToNext());
}
if (mCursor != null) {
mCursor.close();
}
tmpDir = null;
return list;
}
/**
* 获取音乐列表
* @param context
* @return
*/
public static List<MusicEntity> getMusicList(Context context) {
List<MusicEntity> list = new ArrayList<MusicEntity>();
ContentResolver resolver = context.getContentResolver();
Cursor cursor = resolver.query(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,
/* 这个字符串数组表示要查询的列 */
new String[] { MediaStore.Video.Media.TITLE, // 音乐名
MediaStore.Audio.Media.DURATION, // 音乐的总时间
MediaStore.Audio.Media.ARTIST, // 艺术家
MediaStore.Audio.Media._ID, // id号
MediaStore.Audio.Media.DISPLAY_NAME, // 音乐文件名
MediaStore.Audio.Media.DATA // 音乐文件的路径
}, null, // 查询条件,相当于sql中的where语句
null, // 查询条件中使用到的数据
null);
while (cursor.moveToNext()) {
MusicEntity music = new MusicEntity(cursor.getString(5));
music.setTitle(cursor.getString(0));
music.setDuration(cursor.getLong(1));
music.setArtist(cursor.getString(2));
music.setId(cursor.getInt(3));
music.setDisplayName(cursor.getString(4));
// L.i(music.toString());
if(music.getDuration() < 60*1000)
continue;
list.add(music);
}
if (cursor != null) {
cursor.close();
}
return list;
}
/**
* 获取视频列表
* @param context
* @return
*/
public static List<VedioEntity> getVedioList(Context context) {
List<VedioEntity> list = new ArrayList<VedioEntity>();
ContentResolver resolver = context.getContentResolver();
Cursor cursor = resolver.query(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, //
new String[] { MediaStore.Video.Media._ID,//
MediaStore.Video.Media.DATA, //
MediaStore.Video.Media.DURATION, //
MediaStore.Video.Media.DISPLAY_NAME,//
MediaStore.Video.Media.SIZE //
}, null, // 查询条件,相当于sql中的where语句
null, // 查询条件中使用到的数据
null);
while (cursor.moveToNext()) {
VedioEntity vedio = new VedioEntity(cursor.getString(1));
vedio.setId(cursor.getInt(0));
vedio.setDuration(cursor.getLong(2));
vedio.setDisplayName(cursor.getString(3));
// vedio.setSize(cursor.getLong(4));
L.i(vedio.toString());
list.add(vedio);
}
return list;
}
/**
*
* @param path
* @return 当是文件时返回null
*/
public static List<WFile> getCurrentFileList(String path) {
Logger.d("查询路径" + path);
if(path==null){
return null;
}
List<WFile> list = new ArrayList<>();
File file = new File(path);
if(file.exists() && file.isDirectory()){
for(File f : file.listFiles()){
L.d("-----" + f.getAbsolutePath());
WFile wf = new WFile(f.getAbsolutePath());
list.add(wf);
}
}else{
return null;
}
Collections.sort(list,new FileStyle());
return list;
}
public static String getProjectDir() {
File file = new File(getSDPath()+"/WifiProject");
if(!file.exists()){
file.mkdirs();
}
return file.getAbsolutePath();
}
public static String getProjectPictureDir() {
File file = new File(getProjectDir()+"/pictures");
if(!file.exists()){
file.mkdirs();
}else {
for (File f : file.listFiles()){
f.delete();
}
}
return file.getAbsolutePath();
}
}
|
Henrique-GM/Exercicios_de_Python | Pacote download/Exercicios/tuplas.py | lanche = ('Hambúrguer', 'Suco', 'Pizza', 'Pudim')
print(lanche[1])
print(lanche[-2])
print(lanche[1:3])
print(lanche[:2])
print(lanche[2:])
print(lanche[-2:])
print(lanche[-3:])
print(lanche)
# 1º jeito com for
print('\n')
for i in lanche:
print(f'O {i} é delicioso')
print('Todos muito bons!')
print('\n')
print(len(lanche))
# 2º jeito com for
print('\n')
for i in range(0, len(lanche)):
print(lanche[i])
# Tuplas são imutáveis
# lanche[1] = 'refrigerante'
# print(lanche[1])
# 3º jeito com for indicando posições
print('\n')
for i in range(0, len(lanche)):
print(lanche[i] + f' na posição {i}')
# 4º jeito com for indicando posições
print('\n')
for proximo, i in enumerate(lanche):
print(f'Eu vou comer {i} na posição {proximo}')
# Vai printar em ordem
print('\n')
print(sorted(lanche))
# Tuplas com números
print('\n')
a = (2, 3, 4)
b = (5, 6, 7, 8, 5)
# concatena
c = a + b
print(c)
# quantidade
print(len(c))
# Quantas vezes aparece
print(c.count(2))
# mostra o index
print(c.index(5))
print('\n')
# mostra index atravez de deslocamento
print(c.index(5, 4))
# Pode ser inserido dados de tipos diferentes
pessoa = ('Henrique', 22, 'M', 83, 50)
# Apagando variáveis ou tuplas
# del(pessoa)
print(pessoa)
|
richeyphu/ITE-428-LAB | ITE-428/collection2/list2.py | # แก้ไขข้อมูล
score = [10, 56, 67, 88, 98, 3, 21]
print("{}".format(score))
score[0] = 20
print("{}".format(score))
score.append(56)
print("{}".format(score))
score = score + [77, 88, 99]
print("{}".format(score))
del score[3]
print("{}".format(score))
score.sort()
print("{}".format(score))
score.sort(reverse=True)
print("{}".format(score))
print("-" * 30)
|
baihongwei4/leetcode-exercise-playground | src/hongwei/leetcode/playground/other/na2/Question1.java | <filename>src/hongwei/leetcode/playground/other/na2/Question1.java
package hongwei.leetcode.playground.other.na2;
public class Question1 {
public String canBePalindromeByChangingOneCharacter(String s) {
int halfLen = s.length() / 2;
int unsymmetryCounter = 0;
for (int i = 0; i < halfLen; i++) {
char cLelf = s.charAt(i);
char cRight = s.charAt(s.length() - i - 1);
if (cLelf != cRight) {
unsymmetryCounter++;
if (unsymmetryCounter > 1) {
return "NO";
}
}
}
return "YES";
}
}
|
cvisionai/tator-py | test/test_job_cluster.py | <gh_stars>1-10
import random
import uuid
import pytest
import tator
def random_job_cluster_spec():
uid = str(uuid.uuid4())
return {
"name": f"Job Cluster {uid}",
"host": "test-host",
"port": random.randint(4000, 6000),
"token": <PASSWORD>,
"cert": f"{uid}.cert",
}
def test_job_cluster_crud(host, token, organization):
tator_api = tator.get_api(host, token)
user = tator_api.whoami()
tator_api.create_affiliation(organization, {"permission": "Admin", "user": user.id})
job_cluster_spec = random_job_cluster_spec()
# Test creation
response = tator_api.create_job_cluster(organization, job_cluster_spec)
assert hasattr(response, "id")
jc_id = response.id
response = tator_api.get_job_cluster(jc_id)
assert response.id == jc_id
for k, v in job_cluster_spec.items():
assert getattr(response, k) == v
# Test read
jc_list = tator_api.get_job_cluster_list(organization)
assert len(jc_list) == 1
job_cluster = jc_list[0]
assert job_cluster.id == jc_id
for k, v in job_cluster_spec.items():
assert getattr(job_cluster, k) == v
# Test update
job_cluster_spec = random_job_cluster_spec()
response = tator_api.update_job_cluster(jc_id, job_cluster_spec)
assert response.message == f"Job Cluster {jc_id} successfully updated!"
response = tator_api.get_job_cluster(jc_id)
assert response.id == jc_id
for k, v in job_cluster_spec.items():
assert getattr(response, k) == v
# Test delete
response = tator_api.delete_job_cluster(jc_id)
assert response.message == "Job cluster deleted successfully!"
with pytest.raises(tator.openapi.tator_openapi.exceptions.ApiException):
tator_api.get_job_cluster(jc_id)
|
trgswe/fs2open.github.com | qtfred/src/mission/dialogs/MissionGoalsDialogModel.cpp | //
//
#include "MissionGoalsDialogModel.h"
namespace fso {
namespace fred {
namespace dialogs {
MissionGoalsDialogModel::MissionGoalsDialogModel(QObject* parent, fso::fred::EditorViewport* viewport) :
AbstractDialogModel(parent, viewport) {
}
bool MissionGoalsDialogModel::apply()
{
for (int i = 0; i < Num_goals; i++)
free_sexp2(Mission_goals[i].formula);
auto changes_detected = query_modified();
SCP_vector<std::pair<SCP_string, SCP_string>> names;
for (int i = 0; i < Num_goals; i++)
Mission_goals[i].satisfied = 0; // use this as a processed flag
// rename all sexp references to old events
for (int i = 0; i < m_num_goals; i++)
if (m_sig[i] >= 0) {
names.emplace_back(Mission_goals[m_sig[i]].name, m_goals[i].name);
Mission_goals[m_sig[i]].satisfied = 1;
}
// invalidate all sexp references to deleted events.
for (int i = 0; i < Num_goals; i++)
if (!Mission_goals[i].satisfied) {
names.emplace_back(Mission_goals[i].name, SCP_string("<") + Mission_goals[i].name + ">");
}
Num_goals = m_num_goals;
for (int i = 0; i < Num_goals; i++) {
Mission_goals[i] = m_goals[i];
Mission_goals[i].formula = _sexp_tree->save_tree(Mission_goals[i].formula);
if (The_mission.game_type & MISSION_TYPE_MULTI_TEAMS) {
Assert(Mission_goals[i].team != -1);
}
}
// now update all sexp references
for (const auto& entry : names) {
update_sexp_references(entry.first.c_str(), entry.second.c_str(), OPF_GOAL_NAME);
}
// Only fire the signal after the changes have been applied to make sure the other parts of the code see the updated
// state
if (changes_detected) {
_editor->missionChanged();
}
return true;
}
void MissionGoalsDialogModel::reject() {
// Nothing to do here
}
mission_goal& MissionGoalsDialogModel::getCurrentGoal() {
Assertion(cur_goal >= 0 && cur_goal < m_num_goals, "Current goal index is not valid!");
return m_goals[cur_goal];
}
bool MissionGoalsDialogModel::isCurrentGoalValid() const {
return cur_goal >= 0 && cur_goal < m_num_goals;
}
void MissionGoalsDialogModel::initializeData() {
m_num_goals = Num_goals;
for (auto i = 0; i < Num_goals; i++) {
m_goals[i] = Mission_goals[i];
m_sig[i] = i;
if (strlen(m_goals[i].name) <= 0) {
strcpy_s(m_goals[i].name, "<unnamed>");
}
}
cur_goal = -1;
modelChanged();
}
std::array<mission_goal, MAX_GOALS>& MissionGoalsDialogModel::getGoals() {
return m_goals;
}
void MissionGoalsDialogModel::setCurrentGoal(int index) {
cur_goal = index;
modelChanged();
}
bool MissionGoalsDialogModel::isGoalVisible(const mission_goal& goal) const {
return (goal.type & GOAL_TYPE_MASK) == m_display_goal_types;
}
int MissionGoalsDialogModel::getNumGoals() const {
return m_num_goals;
}
void MissionGoalsDialogModel::setGoalDisplayType(int type) {
m_display_goal_types = type;
}
bool MissionGoalsDialogModel::query_modified() {
int i;
if (modified)
return true;
if (Num_goals != m_num_goals)
return true;
for (i=0; i<Num_goals; i++) {
if (stricmp(Mission_goals[i].name, m_goals[i].name))
return true;
if (stricmp(Mission_goals[i].message, m_goals[i].message))
return true;
if (Mission_goals[i].type != m_goals[i].type)
return true;
if ( Mission_goals[i].score != m_goals[i].score )
return true;
if ( Mission_goals[i].team != m_goals[i].team )
return true;
}
return false;
}
void MissionGoalsDialogModel::setTreeControl(sexp_tree* tree) {
_sexp_tree = tree;
}
void MissionGoalsDialogModel::deleteGoal(int formula) {
int goal;
for (goal=0; goal<m_num_goals; goal++){
if (m_goals[goal].formula == formula){
break;
}
}
Assert(goal < m_num_goals);
while (goal < m_num_goals - 1) {
m_goals[goal] = m_goals[goal + 1];
m_sig[goal] = m_sig[goal + 1];
goal++;
}
m_num_goals--;
modelChanged();
}
void MissionGoalsDialogModel::changeFormula(int old_form, int new_form) {
int i;
for (i=0; i<m_num_goals; i++){
if (m_goals[i].formula == old_form){
break;
}
}
Assert(i < m_num_goals);
m_goals[i].formula = new_form;
modelChanged();
}
mission_goal& MissionGoalsDialogModel::createNewGoal() {
Assert(m_num_goals < MAX_GOALS);
m_goals[m_num_goals].type = m_display_goal_types; // this also marks the goal as valid since bit not set
m_sig[m_num_goals] = -1;
strcpy_s(m_goals[m_num_goals].name, "Goal name");
strcpy_s(m_goals[m_num_goals].message, "Mission goal text");
m_goals[m_num_goals].score = 0;
// team defaults to the first team.
m_goals[m_num_goals].team = 0;
auto new_goal = m_num_goals++;
return m_goals[new_goal];
}
void MissionGoalsDialogModel::setCurrentGoalMessage(const char* text) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
strcpy_s(getCurrentGoal().message, text);
modelChanged();
}
void MissionGoalsDialogModel::setCurrentGoalCategory(int type) {
// change the type being sure to keep the invalid bit if set
auto otype = m_goals[cur_goal].type;
m_goals[cur_goal].type = type;
if ( otype & INVALID_GOAL ){
m_goals[cur_goal].type |= INVALID_GOAL;
}
modelChanged();
}
void MissionGoalsDialogModel::setCurrentGoalScore(int value) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
getCurrentGoal().score = value;
modelChanged();
}
void MissionGoalsDialogModel::setCurrentGoalName(const char* name) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
strcpy_s(getCurrentGoal().name, name);
modelChanged();
}
void MissionGoalsDialogModel::setCurrentGoalInvalid(bool invalid) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
if (invalid) {
getCurrentGoal().type |= INVALID_GOAL;
} else {
getCurrentGoal().type &= ~INVALID_GOAL;
}
}
void MissionGoalsDialogModel::setCurrentGoalNoMusic(bool noMusic) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
if (noMusic) {
getCurrentGoal().type |= MGF_NO_MUSIC;
} else {
getCurrentGoal().type &= ~MGF_NO_MUSIC;
}
}
void MissionGoalsDialogModel::setCurrentGoalTeam(int team) {
Assertion(isCurrentGoalValid(), "Current goal is not valid!");
getCurrentGoal().team = team;
}
}
}
}
|
sahin88/Django_Rest_Framework_Redux_React_Estate_App_FullStack | node_modules/redex/test/adhoc/thunk.js | <gh_stars>1-10
import fs from 'fs';
async function readFile(file) {
return cb => {
fs.readFile(file, cb);
};
}
async function test() {
return await readFile('README.md');
}
test().then(result => {
console.log('done', result);
});
|
hustjl22/accumulo | test/src/test/java/org/apache/accumulo/test/DeleteRowsIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test;
import static org.junit.Assert.assertEquals;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.test.functional.FunctionalTestUtils;
import org.apache.accumulo.test.functional.SimpleMacIT;
import org.junit.Test;
public class DeleteRowsIT extends SimpleMacIT {
@Test(timeout = 5 * 60 * 1000)
public void test() throws Exception {
Connector c = getConnector();
String[] tableNames = this.getUniqueNames(20);
for (String tableName : tableNames) {
c.tableOperations().create(tableName);
c.tableOperations().deleteRows(tableName, null, null);
Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY);
assertEquals(0, FunctionalTestUtils.count(scanner));
}
}
}
|
yacob21/Century | app/src/main/java/com/example/user/century/UnggahActivity.java | <filename>app/src/main/java/com/example/user/century/UnggahActivity.java<gh_stars>0
package com.example.user.century;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.OnProgressListener;
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
import com.squareup.picasso.Picasso;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import static android.R.attr.data;
public class UnggahActivity extends AppCompatActivity {
protected static final int SELECT_PHOTO = 100;
protected static final int CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE = 90;
Uri selectedImage,imageUri;
FirebaseStorage storage;
StorageReference storageRef,imageRef;
ProgressDialog progressDialog;
UploadTask uploadTask;
ImageView imageView;
LinearLayout btnGallery,btnKamera,btnUnggah;
Context context;
Bitmap bitmap;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_unggah);
context = this;
imageView = (ImageView) findViewById(R.id.imageView);
btnGallery = (LinearLayout) findViewById(R.id.btnGallery);
btnKamera = (LinearLayout) findViewById(R.id.btnKamera);
btnUnggah = (LinearLayout) findViewById(R.id.btnUpload);
storage = FirebaseStorage.getInstance();
//creates a storage reference
storageRef = storage.getReference();
String[] PERMISSIONS = {android.Manifest.permission.CAMERA,android.Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!hasPermissions(context, PERMISSIONS)) {
ActivityCompat.requestPermissions((Activity) context, PERMISSIONS, 0 );
}
btnUnggah.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(selectedImage != null) {
uploadImage(v);
}
else{
Toast.makeText(context, "Harap masukkan gambar terlebih dahulu", Toast.LENGTH_SHORT).show();
}
}
});
btnKamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (Build.VERSION.SDK_INT >= 23) {
String[] PERMISSIONS = {android.Manifest.permission.CAMERA};
if (!hasPermissions(context, PERMISSIONS)) {
ActivityCompat.requestPermissions((Activity) context, PERMISSIONS, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE );
} else {
takeImage();
}
} else {
takeImage();
}
}
});
btnGallery.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (Build.VERSION.SDK_INT >= 23) {
String[] PERMISSIONS = {android.Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!hasPermissions(context, PERMISSIONS)) {
ActivityCompat.requestPermissions((Activity) context, PERMISSIONS, SELECT_PHOTO );
} else {
selectImage(v);
}
} else {
selectImage(v);
}
}
});
}
public void selectImage(View view) {
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
startActivityForResult(photoPickerIntent, SELECT_PHOTO);
}
public void takeImage(){
Intent i = getIntent();
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
File photo = new File(Environment.getExternalStorageDirectory(),"picture"+i.getStringExtra("kode")+".jpg");
imageUri = Uri.fromFile(photo);
startActivityForResult(intent, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
super.onActivityResult(requestCode, resultCode, imageReturnedIntent);
if ( requestCode == SELECT_PHOTO && resultCode == RESULT_OK) {
Toast.makeText(UnggahActivity.this,"Image selected, click on upload button",Toast.LENGTH_SHORT).show();
selectedImage = imageReturnedIntent.getData();
Toast.makeText(context, String.valueOf(selectedImage), Toast.LENGTH_SHORT).show();
Picasso.with(UnggahActivity.this).load(selectedImage).into(imageView);
}
else if (requestCode == CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE && resultCode == RESULT_OK && imageReturnedIntent != null && imageReturnedIntent.getData() != null) {
Toast.makeText(UnggahActivity.this,"Image selected, click on upload button",Toast.LENGTH_SHORT).show();
selectedImage = imageReturnedIntent.getData();
Toast.makeText(context, String.valueOf(selectedImage), Toast.LENGTH_SHORT).show();
try {
bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), selectedImage);
imageView.setImageBitmap(bitmap);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void uploadImage(View view) {
//create reference to images folder and assing a name to the file that will be uploaded
Intent i = getIntent();
imageRef = storageRef.child("images/"+i.getStringExtra("kode"));
//creating and showing progress dialog
progressDialog = new ProgressDialog(this);
progressDialog.setMax(100);
progressDialog.setMessage("Uploading...");
progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
progressDialog.show();
progressDialog.setCancelable(false);
//starting upload
uploadTask = imageRef.putFile(selectedImage);
// Observe state change events such as progress, pause, and resume
uploadTask.addOnProgressListener(new OnProgressListener<UploadTask.TaskSnapshot>() {
@Override
public void onProgress(UploadTask.TaskSnapshot taskSnapshot) {
double progress = (100.0 * taskSnapshot.getBytesTransferred()) / taskSnapshot.getTotalByteCount();
//sets and increments value of progressbar
progressDialog.incrementProgressBy((int) progress);
}
});
// Register observers to listen for when the download is done or if it fails
uploadTask.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
// Handle unsuccessful uploads
Toast.makeText(UnggahActivity.this,"Error in uploading!",Toast.LENGTH_SHORT).show();
progressDialog.dismiss();
}
}).addOnSuccessListener(new OnSuccessListener<UploadTask.TaskSnapshot>() {
@Override
public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) {
// taskSnapshot.getMetadata() contains file metadata such as size, content-type, and download URL.
// Uri downloadUrl = taskSnapshot.getDownloadUrl();
Toast.makeText(UnggahActivity.this,"Upload successful",Toast.LENGTH_SHORT).show();
progressDialog.dismiss();
finish();
//showing the uploaded image in ImageView using the download url
// Picasso.with(UnggahActivity.this).load(downloadUrl).into(imageView);
}
});
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
}
}
case SELECT_PHOTO:{
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
}
}
}
}
private static boolean hasPermissions(Context context, String... permissions) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
}
|
yangyimincn/ucloud-sdk-go | private/utils/patch.go | package utils
import (
"regexp"
)
// Patch is the patch object to provider a converter function
type Patch interface {
Patch([]byte) []byte
}
// RegexpPatcher a patch object to provider a converter function from regular expression
type RegexpPatcher struct {
pattern *regexp.Regexp
replacement string
}
// NewRegexpPatcher will return a patch object to provider a converter function from regular expression
func NewRegexpPatcher(regex string, repl string) *RegexpPatcher {
return &RegexpPatcher{
pattern: regexp.MustCompile(regex),
replacement: repl,
}
}
// Patch will convert a bytes to another bytes with patch rules
func (p *RegexpPatcher) Patch(body []byte) []byte {
// TODO: ensure why the pattern will be disabled when there are multiple goroutines for bytes replacement
return []byte(p.PatchString(string(body)))
}
// PatchString will convert a string to another string with patch rules
func (p *RegexpPatcher) PatchString(body string) string {
return p.pattern.ReplaceAllString(body, p.replacement)
}
// RetCodePatcher will convert `RetCode` as integer
var RetCodePatcher = NewRegexpPatcher(`"RetCode":\s?"(\d+)"`, `"RetCode": $1`)
// PortPatcher will convert `Port` as integer
var PortPatcher = NewRegexpPatcher(`"Port":\s?"(\d+)"`, `"Port": $1`)
// FrequencePatcher will convert `Frequence` as float64
var FrequencePatcher = NewRegexpPatcher(`"Frequence":\s?"([\d.]+)"`, `"Frequence": $1`)
|
Yeep/zeebe | clients/java/src/main/java/io/camunda/zeebe/client/impl/ZeebeClientCredentials.java | <reponame>Yeep/zeebe
/*
* Copyright © 2017 camunda services GmbH (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.camunda.zeebe.client.impl;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonSetter;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Objects;
public final class ZeebeClientCredentials {
@JsonAlias({"accesstoken", "access_token"})
private String accessToken;
private ZonedDateTime expiry;
@JsonAlias({"tokentype", "token_type"})
private String tokenType;
public ZeebeClientCredentials() {}
public ZeebeClientCredentials(
final String accessToken, final ZonedDateTime expiry, final String tokenType) {
this.accessToken = accessToken;
this.expiry = expiry;
this.tokenType = tokenType;
}
public String getAccessToken() {
return accessToken;
}
public String getTokenType() {
return tokenType;
}
@JsonSetter("expiry")
public void setExpiry(final String expiry) {
this.expiry = ZonedDateTime.parse(expiry);
}
@JsonSetter("expires_in")
public void setExpiresIn(final String expiresIn) {
expiry = ZonedDateTime.now().plusSeconds(Long.parseLong(expiresIn));
}
@JsonGetter("expiry")
public String getExpiry() {
return expiry.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
@JsonIgnore
public boolean isValid() {
return expiry.toInstant().isAfter(Instant.now());
}
@Override
public int hashCode() {
return Objects.hash(accessToken, expiry, tokenType);
}
@Override
public boolean equals(final Object o) {
if (o == null || !o.getClass().equals(getClass())) {
return false;
}
final ZeebeClientCredentials other = (ZeebeClientCredentials) o;
return accessToken.equals(other.accessToken) && tokenType.equals(other.tokenType);
}
}
|
imvu/bluesteel | app/logic/bluesteel/controllers/__init__.py | """ Automatic file """
from app.logic.bluesteel.controllers.BluesteelLayoutController import BluesteelLayoutController
from app.logic.bluesteel.controllers.BluesteelProjectController import BluesteelProjectController
|
qingqibing/go-genproto | googleapis/ads/googleads/v2/resources/operating_system_version_constant.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v2/resources/operating_system_version_constant.proto
package resources
import (
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
wrappers "github.com/golang/protobuf/ptypes/wrappers"
enums "google.golang.org/genproto/googleapis/ads/googleads/v2/enums"
_ "google.golang.org/genproto/googleapis/api/annotations"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// A mobile operating system version or a range of versions, depending on
// `operator_type`. List of available mobile platforms at
// https://developers.google.com/adwords/api/docs/appendix/codes-formats#mobile-platforms
type OperatingSystemVersionConstant struct {
// Output only. The resource name of the operating system version constant.
// Operating system version constant resource names have the form:
//
// `operatingSystemVersionConstants/{criterion_id}`
ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
// Output only. The ID of the operating system version.
Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
// Output only. Name of the operating system.
Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
// Output only. The OS Major Version number.
OsMajorVersion *wrappers.Int32Value `protobuf:"bytes,4,opt,name=os_major_version,json=osMajorVersion,proto3" json:"os_major_version,omitempty"`
// Output only. The OS Minor Version number.
OsMinorVersion *wrappers.Int32Value `protobuf:"bytes,5,opt,name=os_minor_version,json=osMinorVersion,proto3" json:"os_minor_version,omitempty"`
// Output only. Determines whether this constant represents a single version or a range of
// versions.
OperatorType enums.OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType `protobuf:"varint,6,opt,name=operator_type,json=operatorType,proto3,enum=google.ads.googleads.v2.enums.OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType" json:"operator_type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *OperatingSystemVersionConstant) Reset() { *m = OperatingSystemVersionConstant{} }
func (m *OperatingSystemVersionConstant) String() string { return proto.CompactTextString(m) }
func (*OperatingSystemVersionConstant) ProtoMessage() {}
func (*OperatingSystemVersionConstant) Descriptor() ([]byte, []int) {
return fileDescriptor_fc4e1bfff4c9e1bd, []int{0}
}
func (m *OperatingSystemVersionConstant) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_OperatingSystemVersionConstant.Unmarshal(m, b)
}
func (m *OperatingSystemVersionConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_OperatingSystemVersionConstant.Marshal(b, m, deterministic)
}
func (m *OperatingSystemVersionConstant) XXX_Merge(src proto.Message) {
xxx_messageInfo_OperatingSystemVersionConstant.Merge(m, src)
}
func (m *OperatingSystemVersionConstant) XXX_Size() int {
return xxx_messageInfo_OperatingSystemVersionConstant.Size(m)
}
func (m *OperatingSystemVersionConstant) XXX_DiscardUnknown() {
xxx_messageInfo_OperatingSystemVersionConstant.DiscardUnknown(m)
}
var xxx_messageInfo_OperatingSystemVersionConstant proto.InternalMessageInfo
func (m *OperatingSystemVersionConstant) GetResourceName() string {
if m != nil {
return m.ResourceName
}
return ""
}
func (m *OperatingSystemVersionConstant) GetId() *wrappers.Int64Value {
if m != nil {
return m.Id
}
return nil
}
func (m *OperatingSystemVersionConstant) GetName() *wrappers.StringValue {
if m != nil {
return m.Name
}
return nil
}
func (m *OperatingSystemVersionConstant) GetOsMajorVersion() *wrappers.Int32Value {
if m != nil {
return m.OsMajorVersion
}
return nil
}
func (m *OperatingSystemVersionConstant) GetOsMinorVersion() *wrappers.Int32Value {
if m != nil {
return m.OsMinorVersion
}
return nil
}
func (m *OperatingSystemVersionConstant) GetOperatorType() enums.OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType {
if m != nil {
return m.OperatorType
}
return enums.OperatingSystemVersionOperatorTypeEnum_UNSPECIFIED
}
func init() {
proto.RegisterType((*OperatingSystemVersionConstant)(nil), "google.ads.googleads.v2.resources.OperatingSystemVersionConstant")
}
func init() {
proto.RegisterFile("google/ads/googleads/v2/resources/operating_system_version_constant.proto", fileDescriptor_fc4e1bfff4c9e1bd)
}
var fileDescriptor_fc4e1bfff4c9e1bd = []byte{
// 531 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xdd, 0x6a, 0xd4, 0x40,
0x14, 0x26, 0xd9, 0xb6, 0x60, 0x6c, 0x8b, 0xe4, 0x6a, 0xad, 0xa5, 0x6e, 0x95, 0xc2, 0x5e, 0xcd,
0x48, 0xea, 0x0f, 0xc6, 0x0b, 0xc9, 0x16, 0x29, 0x2d, 0x68, 0xcb, 0x56, 0x72, 0x21, 0x0b, 0x61,
0x76, 0x33, 0x4d, 0x47, 0x36, 0x73, 0xc2, 0xcc, 0x64, 0x65, 0x11, 0x05, 0xdf, 0x40, 0x7c, 0x02,
0xf1, 0xd2, 0x47, 0xf1, 0x29, 0x7a, 0xdd, 0x47, 0xf0, 0x4a, 0x92, 0xcc, 0xa4, 0x11, 0xdd, 0x54,
0x7b, 0x77, 0x76, 0xbf, 0x9f, 0xf3, 0xcd, 0x9c, 0x39, 0x71, 0x0e, 0x12, 0x80, 0x64, 0x4a, 0x31,
0x89, 0x25, 0xae, 0xca, 0xa2, 0x9a, 0x79, 0x58, 0x50, 0x09, 0xb9, 0x98, 0x50, 0x89, 0x21, 0xa3,
0x82, 0x28, 0xc6, 0x93, 0x48, 0xce, 0xa5, 0xa2, 0x69, 0x34, 0xa3, 0x42, 0x32, 0xe0, 0xd1, 0x04,
0xb8, 0x54, 0x84, 0x2b, 0x94, 0x09, 0x50, 0xe0, 0x6e, 0x57, 0x7a, 0x44, 0x62, 0x89, 0x6a, 0x2b,
0x34, 0xf3, 0x50, 0x6d, 0xb5, 0x71, 0xb8, 0xa8, 0x1b, 0xe5, 0x79, 0xda, 0xd2, 0xa9, 0x02, 0x40,
0x44, 0x6a, 0x9e, 0xd1, 0xaa, 0xdd, 0xc6, 0x5d, 0xe3, 0x95, 0x31, 0x7c, 0xca, 0xe8, 0x34, 0x8e,
0xc6, 0xf4, 0x8c, 0xcc, 0x18, 0x08, 0x4d, 0xb8, 0xdd, 0x20, 0x98, 0x08, 0x1a, 0xda, 0xd2, 0x50,
0xf9, 0x6b, 0x9c, 0x9f, 0xe2, 0x77, 0x82, 0x64, 0x19, 0x15, 0x52, 0xe3, 0x9b, 0x0d, 0x29, 0xe1,
0x1c, 0x14, 0x51, 0x0c, 0xb8, 0x46, 0xef, 0x7d, 0x5d, 0x76, 0xb6, 0x8e, 0x4c, 0xd4, 0x93, 0x32,
0x69, 0x58, 0x05, 0xdd, 0xd3, 0x37, 0xe2, 0xc6, 0xce, 0x9a, 0x69, 0x19, 0x71, 0x92, 0xd2, 0xae,
0xd5, 0xb3, 0xfa, 0x37, 0x06, 0xcf, 0xcf, 0x83, 0xce, 0xcf, 0xe0, 0xa9, 0xf3, 0xe4, 0xf2, 0x7e,
0x74, 0x95, 0x31, 0x89, 0x26, 0x90, 0xe2, 0x76, 0xdf, 0xe1, 0xaa, 0x71, 0x7d, 0x45, 0x52, 0xea,
0x3e, 0x70, 0x6c, 0x16, 0x77, 0xed, 0x9e, 0xd5, 0xbf, 0xe9, 0xdd, 0xd1, 0x4e, 0xc8, 0x9c, 0x09,
0x1d, 0x70, 0xf5, 0xf8, 0x61, 0x48, 0xa6, 0x39, 0x1d, 0x74, 0xce, 0x83, 0xce, 0xd0, 0x66, 0xb1,
0xfb, 0xc8, 0x59, 0x2a, 0xe3, 0x74, 0x4a, 0xcd, 0xe6, 0x1f, 0x9a, 0x13, 0x25, 0x18, 0x4f, 0x1a,
0xa2, 0x92, 0xee, 0x1e, 0x3a, 0xb7, 0x40, 0x46, 0x29, 0x79, 0x0b, 0xc2, 0xcc, 0xa4, 0xbb, 0xb4,
0xb8, 0xed, 0xae, 0xd7, 0x70, 0x58, 0x07, 0xf9, 0xb2, 0x10, 0xea, 0xa3, 0x18, 0x2f, 0xc6, 0x1b,
0x5e, 0xcb, 0xff, 0xe1, 0x55, 0x08, 0x8d, 0xd7, 0x17, 0xcb, 0x59, 0xfb, 0xed, 0x6d, 0x74, 0x57,
0x7a, 0x56, 0x7f, 0xdd, 0x3b, 0x43, 0x8b, 0xde, 0x62, 0xf9, 0xd0, 0xd0, 0xdf, 0x6f, 0xf9, 0x48,
0x3b, 0xbd, 0x9e, 0x67, 0xf4, 0x05, 0xcf, 0xd3, 0x7f, 0xa0, 0x55, 0xb1, 0x56, 0xa1, 0xf1, 0x97,
0xff, 0xc9, 0xba, 0x08, 0x3e, 0x5e, 0x7b, 0xc4, 0xee, 0x1e, 0xb4, 0xe2, 0x12, 0xbf, 0xbf, 0x72,
0x21, 0x3f, 0x0c, 0x3e, 0xdb, 0xce, 0xce, 0x04, 0x52, 0x74, 0xe5, 0x4a, 0x0e, 0xee, 0xb7, 0xc7,
0x39, 0x2e, 0x46, 0x71, 0x6c, 0xbd, 0xd1, 0x9b, 0x8b, 0x12, 0x98, 0x12, 0x9e, 0x20, 0x10, 0x09,
0x4e, 0x28, 0x2f, 0x07, 0x85, 0x2f, 0xcf, 0xd7, 0xf2, 0x19, 0x79, 0x56, 0x57, 0xdf, 0xec, 0xce,
0x7e, 0x10, 0x7c, 0xb7, 0xb7, 0xf7, 0x2b, 0xcb, 0x20, 0x96, 0xa8, 0x2a, 0x8b, 0x2a, 0xf4, 0xd0,
0xd0, 0x30, 0x7f, 0x18, 0xce, 0x28, 0x88, 0xe5, 0xa8, 0xe6, 0x8c, 0x42, 0x6f, 0x54, 0x73, 0x2e,
0xec, 0x9d, 0x0a, 0xf0, 0xfd, 0x20, 0x96, 0xbe, 0x5f, 0xb3, 0x7c, 0x3f, 0xf4, 0x7c, 0xbf, 0xe6,
0x8d, 0x57, 0xca, 0xb0, 0xbb, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x6c, 0x79, 0x28, 0xf2,
0x04, 0x00, 0x00,
}
|
Cataldir/koalixcrm | koalixcrm/crm/migrations/0027_auto_20180606_2034.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-06-06 20:34
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('crm', '0026_auto_20180507_1957'),
]
operations = [
migrations.CreateModel(
name='Call',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(verbose_name='Description')),
('date_of_creation', models.DateTimeField(auto_now_add=True, verbose_name='Created at')),
('date_due', models.DateTimeField(blank=True, default=datetime.datetime.now, verbose_name='Date due')),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('status', models.CharField(choices=[('P', 'Planned'), ('D', 'Delayed'), ('R', 'ToRecall'), ('F', 'Failed'), ('S', 'Success')], default='P', max_length=1, verbose_name='Status')),
],
),
migrations.CreateModel(
name='ContactPersonAssociation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contact', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='person_association', to='crm.Contact')),
],
options={
'verbose_name': 'Contacts',
'verbose_name_plural': 'Contacts',
},
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prefix', models.CharField(blank=True, choices=[('F', 'Company'), ('W', 'Mrs'), ('H', 'Mr'), ('G', 'Ms')], max_length=1, null=True, verbose_name='Prefix')),
('name', models.CharField(blank=True, max_length=100, null=True, verbose_name='Name')),
('prename', models.CharField(blank=True, max_length=100, null=True, verbose_name='Prename')),
('email', models.EmailField(max_length=200, verbose_name='Email Address')),
('phone', models.CharField(max_length=20, verbose_name='Phone Number')),
('role', models.CharField(blank=True, max_length=100, null=True, verbose_name='Role')),
('companies', models.ManyToManyField(blank=True, through='crm.ContactPersonAssociation', to='crm.Contact', verbose_name='Works at')),
],
options={
'verbose_name': 'Person',
'verbose_name_plural': 'People',
},
),
migrations.AddField(
model_name='customer',
name='is_lead',
field=models.BooleanField(default=True),
),
migrations.CreateModel(
name='CallForContact',
fields=[
('call_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='crm.Call')),
('purpose', models.CharField(choices=[('F', 'First commercial call'), ('S', 'Planned commercial call'), ('A', 'Assistance call')], max_length=1, verbose_name='Purpose')),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crm.Contact')),
('cperson', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='crm.Person', verbose_name='Person')),
],
options={
'verbose_name': 'Call',
'verbose_name_plural': 'Calls',
},
bases=('crm.call',),
),
migrations.CreateModel(
name='VisitForContact',
fields=[
('call_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='crm.Call')),
('purpose', models.CharField(choices=[('F', 'First commercial visit'), ('S', 'Installation')], max_length=1, verbose_name='Purpose')),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crm.Contact')),
('cperson', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='crm.Person', verbose_name='Person')),
('ref_call', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='crm.CallForContact', verbose_name='Reference Call')),
],
options={
'verbose_name': 'Visit',
'verbose_name_plural': 'Visits',
},
bases=('crm.call',),
),
migrations.AddField(
model_name='contactpersonassociation',
name='person',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='contact_association', to='crm.Person'),
),
migrations.AddField(
model_name='call',
name='last_modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='db_calllstmodified', to=settings.AUTH_USER_MODEL, verbose_name='Last modified by'),
),
migrations.AddField(
model_name='call',
name='staff',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='db_relcallstaff', to=settings.AUTH_USER_MODEL, verbose_name='Staff'),
),
]
|
aliyun/aliyun-openapi-cpp-sdk | dts/include/alibabacloud/dts/model/CreateSynchronizationJobRequest.h | <reponame>aliyun/aliyun-openapi-cpp-sdk<gh_stars>10-100
/*
* Copyright 2009-2017 Alibaba Cloud All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ALIBABACLOUD_DTS_MODEL_CREATESYNCHRONIZATIONJOBREQUEST_H_
#define ALIBABACLOUD_DTS_MODEL_CREATESYNCHRONIZATIONJOBREQUEST_H_
#include <string>
#include <vector>
#include <alibabacloud/core/RpcServiceRequest.h>
#include <alibabacloud/dts/DtsExport.h>
namespace AlibabaCloud
{
namespace Dts
{
namespace Model
{
class ALIBABACLOUD_DTS_EXPORT CreateSynchronizationJobRequest : public RpcServiceRequest
{
public:
CreateSynchronizationJobRequest();
~CreateSynchronizationJobRequest();
std::string getClientToken()const;
void setClientToken(const std::string& clientToken);
std::string getNetworkType()const;
void setNetworkType(const std::string& networkType);
std::string getSourceEndpointInstanceType()const;
void setSourceEndpointInstanceType(const std::string& sourceEndpointInstanceType);
std::string getAccessKeyId()const;
void setAccessKeyId(const std::string& accessKeyId);
std::string getAccountId()const;
void setAccountId(const std::string& accountId);
std::string getRegionId()const;
void setRegionId(const std::string& regionId);
std::string getSynchronizationJobClass()const;
void setSynchronizationJobClass(const std::string& synchronizationJobClass);
std::string getPeriod()const;
void setPeriod(const std::string& period);
std::string getDestRegion()const;
void setDestRegion(const std::string& destRegion);
std::string getTopology()const;
void setTopology(const std::string& topology);
std::string getOwnerId()const;
void setOwnerId(const std::string& ownerId);
int getUsedTime()const;
void setUsedTime(int usedTime);
int getDBInstanceCount()const;
void setDBInstanceCount(int dBInstanceCount);
std::string getSourceRegion()const;
void setSourceRegion(const std::string& sourceRegion);
std::string getPayType()const;
void setPayType(const std::string& payType);
std::string getDestinationEndpointInstanceType()const;
void setDestinationEndpointInstanceType(const std::string& destinationEndpointInstanceType);
private:
std::string clientToken_;
std::string networkType_;
std::string sourceEndpointInstanceType_;
std::string accessKeyId_;
std::string accountId_;
std::string regionId_;
std::string synchronizationJobClass_;
std::string period_;
std::string destRegion_;
std::string topology_;
std::string ownerId_;
int usedTime_;
int dBInstanceCount_;
std::string sourceRegion_;
std::string payType_;
std::string destinationEndpointInstanceType_;
};
}
}
}
#endif // !ALIBABACLOUD_DTS_MODEL_CREATESYNCHRONIZATIONJOBREQUEST_H_ |
ohtu-aikavali2/aikavali2-front | src/app/components/common/FeedbackBar.js | import React from 'react'
const FeedbackBar = ({ topLeftContent, topRightContent }) => {
return (
<div style={{ width: '100%', height: 30, display: 'flex', flexDirection: 'row', justifyContent: 'center' }}>
<div style={{ flex: 1, display: 'flex', justifyContent: 'flex-end', alignItems: 'center' }}>
{topLeftContent}
</div>
<div style={{ flex: 0.5 }} />
<div style={{ flex: 1, display: 'flex', justifyContent: 'flex-start', alignItems: 'center' }}>
{topRightContent}
</div>
</div>
)
}
export default FeedbackBar
|
wuyazero/Elastos.App.UnionSquare.iOS | ELA/ThirdSDK/WOCrashProtector/WOContainer/NSMutableString+WOCrash.h | //
// NSMutableString+WOCrash.h
// GridGovernance
//
// Created by 吴欧 on 2017/12/21.
// Copyright © 2017年 Bitvalue. All rights reserved.
//
/**
* Can avoid crash method
*
* 1. 由于NSMutableString是继承于NSString,所以这里和NSString有些同样的方法就不重复写了
* 2. - (void)replaceCharactersInRange:(NSRange)range withString:(NSString *)aString
* 3. - (void)insertString:(NSString *)aString atIndex:(NSUInteger)loc
* 4. - (void)deleteCharactersInRange:(NSRange)range
*
*/
#import <Foundation/Foundation.h>
@interface NSMutableString (WOCrash)
+ (void)wo_enableMutableStringProtector;
@end
|
Process-Creative/Slate | packages/slate-ajax-collection/src/filter/facet/decorators/SelectFacet.js | import { FACET_SELECT_SELECTOR } from './../../../constant/Constants';
export const decorateSelectFacet = FacetType => {
return class extends FacetType {
constructor(params) {
super(params);
this.container.on('change', FACET_SELECT_SELECTOR, e => this.onChange(e));
}
getPrint(e) {
let options = this.getOptionsToDraw();
return `<select data-facet-select class="c-collection-faceted-nav__select">
${options.reduce((x,o) => (
`${x}<option value="${o}">${this.getOptionName(o)}</option>`
),'')}
</select>`;
}
onChange(e) {
this.setOptions([ $(e.currentTarget).val() ]);
}
}
} |
TeamSPoon/CYC_JRTL_with_CommonLisp_OLD | platform/main/server-4q/com/cyc/cycjava/cycl/inference/modules/removal/removal_modules_mean_stdev.java | <reponame>TeamSPoon/CYC_JRTL_with_CommonLisp_OLD
package com.cyc.cycjava.cycl.inference.modules.removal;
import static com.cyc.tool.subl.jrtl.nativeCode.type.core.SubLObjectFactory.makeKeyword;
import static com.cyc.tool.subl.jrtl.nativeCode.type.core.SubLObjectFactory.makeString;
import static com.cyc.tool.subl.jrtl.nativeCode.type.core.SubLObjectFactory.makeSymbol;
import com.cyc.cycjava.cycl.constant_handles;
import com.cyc.cycjava.cycl.number_utilities;
import com.cyc.cycjava.cycl.utilities_macros;
import com.cyc.cycjava.cycl.inference.harness.inference_modules;
import com.cyc.cycjava.cycl.inference.modules.preference_modules;
import com.cyc.tool.subl.jrtl.nativeCode.subLisp.ConsesLow;
import com.cyc.tool.subl.jrtl.nativeCode.subLisp.Values;
import com.cyc.tool.subl.jrtl.nativeCode.type.core.SubLList;
import com.cyc.tool.subl.jrtl.nativeCode.type.core.SubLObject;
import com.cyc.tool.subl.jrtl.nativeCode.type.symbol.SubLSymbol;
import com.cyc.tool.subl.util.SubLFile;
import com.cyc.tool.subl.util.SubLFiles;
import com.cyc.tool.subl.util.SubLTranslatedFile;
public final class removal_modules_mean_stdev
extends
SubLTranslatedFile
{
public static final SubLFile me;
public static final String myName = "com.cyc.cycjava.cycl.inference.modules.removal.removal_modules_mean_stdev";
public static final String myFingerPrint = "d846f69ab60440df89f992b7576d7c8e93bdd5721f6cd6470614c7656f576f02";
private static final SubLObject $const0$averageOfListElementsWithStandard;
private static final SubLSymbol $kw1$POS;
private static final SubLSymbol $kw2$REMOVAL_MEAN_STDEV_UNIFY_ARG1_ARG2;
private static final SubLList $list3;
private static final SubLSymbol $sym4$REMOVAL_STANDARD_DEVIATION_TUPLE;
@SubLTranslatedFile.SubL(source = "cycl/inference/modules/removal/removal-modules-mean-stdev.lisp", position = 2272L)
public static SubLObject removal_standard_deviation_tuple(final SubLObject numbers)
{
return Values.arg2( Values.resetMultipleValues(), Values.multiple_value_list( number_utilities.standard_deviation_from_population( numbers ) ) );
}
public static SubLObject declare_removal_modules_mean_stdev_file()
{
SubLFiles.declareFunction( me, "removal_standard_deviation_tuple", "REMOVAL-STANDARD-DEVIATION-TUPLE", 1, 0, false );
return NIL;
}
public static SubLObject init_removal_modules_mean_stdev_file()
{
return NIL;
}
public static SubLObject setup_removal_modules_mean_stdev_file()
{
inference_modules.register_solely_specific_removal_module_predicate( $const0$averageOfListElementsWithStandard );
preference_modules.doomed_unless_arg_bindable( $kw1$POS, $const0$averageOfListElementsWithStandard, ONE_INTEGER );
inference_modules.inference_removal_module( $kw2$REMOVAL_MEAN_STDEV_UNIFY_ARG1_ARG2, $list3 );
utilities_macros.note_funcall_helper_function( $sym4$REMOVAL_STANDARD_DEVIATION_TUPLE );
return NIL;
}
@Override
public void declareFunctions()
{
declare_removal_modules_mean_stdev_file();
}
@Override
public void initializeVariables()
{
init_removal_modules_mean_stdev_file();
}
@Override
public void runTopLevelForms()
{
setup_removal_modules_mean_stdev_file();
}
static
{
me = new removal_modules_mean_stdev();
$const0$averageOfListElementsWithStandard = constant_handles.reader_make_constant_shell( makeString( "averageOfListElementsWithStandardDeviation" ) );
$kw1$POS = makeKeyword( "POS" );
$kw2$REMOVAL_MEAN_STDEV_UNIFY_ARG1_ARG2 = makeKeyword( "REMOVAL-MEAN-STDEV-UNIFY-ARG1-ARG2" );
$list3 = ConsesLow.list( new SubLObject[] { makeKeyword( "SENSE" ), makeKeyword( "POS" ), makeKeyword( "PREDICATE" ), constant_handles.reader_make_constant_shell( makeString(
"averageOfListElementsWithStandardDeviation" ) ), makeKeyword( "REQUIRED-PATTERN" ), ConsesLow.list( constant_handles.reader_make_constant_shell( makeString( "averageOfListElementsWithStandardDeviation" ) ),
makeKeyword( "FULLY-BOUND" ), makeKeyword( "NOT-FULLY-BOUND" ), makeKeyword( "NOT-FULLY-BOUND" ) ), makeKeyword( "COST-EXPRESSION" ), ONE_INTEGER, makeKeyword( "COMPLETENESS" ), makeKeyword( "COMPLETE" ),
makeKeyword( "INPUT-EXTRACT-PATTERN" ), ConsesLow.list( makeKeyword( "TEMPLATE" ), ConsesLow.list( constant_handles.reader_make_constant_shell( makeString( "averageOfListElementsWithStandardDeviation" ) ),
ConsesLow.list( makeKeyword( "BIND" ), makeSymbol( "INPUT-EL-LIST" ) ), makeKeyword( "ANYTHING" ), makeKeyword( "ANYTHING" ) ), makeKeyword( "SUCCESS" ) ), makeKeyword( "INPUT-ENCODE-PATTERN" ), ConsesLow.list(
makeKeyword( "TEMPLATE" ), makeKeyword( "ANYTHING" ), ConsesLow.list( makeKeyword( "CALL" ), makeSymbol( "EL-LIST-ITEMS" ), ConsesLow.list( makeKeyword( "VALUE" ), makeSymbol( "INPUT-EL-LIST" ) ) ) ),
makeKeyword( "OUTPUT-GENERATE-PATTERN" ), ConsesLow.list( ConsesLow.list( makeKeyword( "CALL" ), makeSymbol( "REMOVAL-STANDARD-DEVIATION-TUPLE" ), makeKeyword( "INPUT" ) ) ), makeKeyword(
"OUTPUT-CONSTRUCT-PATTERN" ), ConsesLow.list( makeKeyword( "TUPLE" ), ConsesLow.list( makeSymbol( "STDEV" ), makeSymbol( "AVERAGE" ) ), ConsesLow.list( constant_handles.reader_make_constant_shell( makeString(
"averageOfListElementsWithStandardDeviation" ) ), ConsesLow.list( makeKeyword( "VALUE" ), makeSymbol( "INPUT-EL-LIST" ) ), ConsesLow.list( makeKeyword( "VALUE" ), makeSymbol( "AVERAGE" ) ), ConsesLow.list(
makeKeyword( "VALUE" ), makeSymbol( "STDEV" ) ) ) ), makeKeyword( "SUPPORT-MODULE" ), makeKeyword( "OPAQUE" ), makeKeyword( "SUPPORT-STRENGTH" ), makeKeyword( "MONOTONIC" ), makeKeyword( "SUPPORT-MT" ),
constant_handles.reader_make_constant_shell( makeString( "BaseKB" ) ), makeKeyword( "DOCUMENTATION" ), makeString( "(#$averageOfListElementsWithStandardDeviation :fully-bound :not-fully-bound :not-fully-bound)" ),
makeKeyword( "EXAMPLE" ), makeString( "(#$averageOfListElementsWithStandardDeviation (#$TheList 1 2 3 4 5) ?MEAN ?STDEV)\nwill bind ?MEAN to 3, and ?STDEV to 1.4142135623730951." )
} );
$sym4$REMOVAL_STANDARD_DEVIATION_TUPLE = makeSymbol( "REMOVAL-STANDARD-DEVIATION-TUPLE" );
}
}
/*
*
* Total time: 18 ms
*
*/ |
alexsicart/BravaKin | client/src/components/Homepage.js | import React from 'react';
import { connect } from 'react-redux';
import { Redirect } from 'react-router-dom';
import conf from '../private/conf';
import FontAwesome from 'react-fontawesome';
import { Row, Col, Button } from 'reactstrap';
import './style.css';
class Homepage extends React.Component {
loginClick = () => {
window.location.href = `https://api.instagram.com/oauth/authorize/?client_id=${conf.INSTAGRAM_CLIENT_ID}&redirect_uri=${conf.OAUTH_CB_URL}&response_type=code`
}
render () {
if (this.props.auth_token) {
return <Redirect to="/preferences" />;
} else {
return (
<Row>
<Col xs="6" className="pic">
<div className="hero">
<h1> Bravaklin </h1>
<h5> Grow Your Instagram Followers Responsibly. </h5>
</div>
</Col>
<Col xs="6">
<div className="signin">
<Button color="secondary" size="lg" className="instagram" onClick={this.loginClick}>
<FontAwesome name="instagram" />
<span> Login with Instagram</span>
</Button>{' '}
</div>
</Col>
</Row>
);
}
}
}
const mapStateToProps = (state) => ({
auth_token: state.authorization.auth_token
});
const mapDispatchToProps = (dispatch) => ({
});
export default connect(mapStateToProps, mapDispatchToProps)(Homepage);
|
ManoloAlvarezForo/abadon-server | src/app.js | import '@babel/polyfill';
import express from 'express';
import mongoose from 'mongoose';
// import path from 'path';
import { ApolloServer } from 'apollo-server-express';
import schema from './graphql/schema';
// Subscriptions
import { PubSub } from 'graphql-subscriptions';
// import fs from 'fs';
// import { createServer as createServerHttps } from 'https';
import { createServer } from 'http';
import { getUserAuthenticated } from './utils/authentication';
export const pubsub = new PubSub();
const apolloServer = new ApolloServer({
introspection: true,
playground: true,
schema,
context: async ({ req, connection }) => {
if (connection) {
// check connection for metadata
return connection.context;
} else {
const token = req.headers.authorization || '';
const user = await getUserAuthenticated(token);
return { user };
}
},
subscriptions: {
onConnect: async (connectionParams, webSocket) => {
if (connectionParams.authorization) {
const token = connectionParams.authorization;
const user = await getUserAuthenticated(token);
return { user };
}
throw new Error('You are not Authenticated!');
}
}
});
const app = express();
apolloServer.applyMiddleware({ app });
const server = createServer(app);
// Add subscription support
apolloServer.installSubscriptionHandlers(server);
//Db connection.
mongoose.Promise = global.Promise;
// Heroku Database configuration.
// var promise = mongoose.connect(
// 'mongodb://heroku_83d9bs84:<EMAIL>:21696/heroku_83d9bs84',
// {
// useNewUrlParser: true,
// }
// );
// Local Database configuration.
var mongoPromise = mongoose.connect('mongodb://localhost/abadon', {
useNewUrlParser: true,
useUnifiedTopology: true
});
const port = process.env.PORT || 4000;
// const hostname = process.env.hostname;
const environment = process.env.NODE_ENV || 'development';
mongoPromise.then(() => {
server.listen(port, () =>
console.log(
`🚀 Abadon Server [${environment}] environment running at port:${port}`
)
);
});
|
AlisonZXQ/micro-pratice | kk-react/src/components/Tool/components/stepTwo.js | <filename>kk-react/src/components/Tool/components/stepTwo.js
import React, { Component } from 'react';
import { Button, Row, Table, message } from 'antd';
import { connect } from 'dva';
import { importAdviseFile, getAdviseCustomList } from '@services/advise';
import { importRequirementFile, getRequirementCustomList } from '@services/requirement';
import { importTaskFile, getTaskCustomList } from '@services/task';
import { importBugFile, getBugCustomList } from '@services/bug';
import { importObjectiveFile, getObjectiveCustomList } from '@services/objective';
import { importTicketFile, getTicketCustomList } from '@services/ticket';
import BusinessHOC from '@components/BusinessHOC';
import { adviseColumns, requirementColumns, taskColumns, bugColumns, objectiveColumns, ticketColumns } from '../shared/commonConfig';
import styles from './import.less';
class StepTwo extends Component {
constructor(props) {
super(props);
this.state = {
data: {},
mapData: [
{ key: 'error', value: '以下为系统处理异常数据' },
{ key: 'incomplete', value: '以下为不完整或格式非法数据' },
{ key: 'repeat', value: '以下为重复数据' },
{ key: 'success', value: '以下为可导入数据' },
],
columns: [],
};
}
componentDidMount(){
const { key } = this.props.type;
const { id } = this.props.lastProduct;
const params = {
productid: id,
};
if(key === 'advise') {
const newAdviseColumns = JSON.parse(JSON.stringify(adviseColumns));
getAdviseCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newAdviseColumns.splice(newAdviseColumns.length-1, 0, ...data);
this.setInitValue(newAdviseColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}else if(key === 'requirement') {
const newRequirementColumns = JSON.parse(JSON.stringify(requirementColumns));
getRequirementCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newRequirementColumns.splice(newRequirementColumns.length-1, 0, ...data);
this.setInitValue(newRequirementColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}else if(key === 'task') {
const newTaskColumns = JSON.parse(JSON.stringify(taskColumns));
getTaskCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newTaskColumns.splice(newTaskColumns.length-1, 0, ...data);
this.setInitValue(newTaskColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}else if(key === 'bug') {
const newBugColumns = JSON.parse(JSON.stringify(bugColumns));
getBugCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newBugColumns.splice(newBugColumns.length-1, 0, ...data);
this.setInitValue(newBugColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}else if(key === 'objective') {
const newObjectiveColumns = JSON.parse(JSON.stringify(objectiveColumns));
getObjectiveCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newObjectiveColumns.splice(newObjectiveColumns.length-1, 0, ...data);
this.setInitValue(newObjectiveColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}else if(key === 'ticket') {
const newTicketColumns = JSON.parse(JSON.stringify(ticketColumns));
getTicketCustomList(params).then((res) => { //获取自定义字段
if (res.code !== 200) { return message.error(res.msg) }
const data = res.result;
newTicketColumns.splice(newTicketColumns.length-1, 0, ...data);
this.setInitValue(newTicketColumns);
}).catch((err) => {
return message.error(err || err.message);
});
}
}
componentWillReceiveProps(nextProps) {
if (this.props.secondList !== nextProps.secondList) {
this.setState({ data: nextProps.secondList });
}
}
setInitValue = (newColumns) => {
const arr = [];
newColumns.map((item) => {
if(item.key === 'message'){
arr.push({
title: item.value,
dataIndex: item.key,
key: item.key,
render: (text, record) => {
if (record.message) {
return (
<span style={{ color: '#F04646' }}>{record.message}</span>
);
} else if (record.missFields && record.missFields.length !== 0) {
return (
<span style={{ color: '#F04646' }}>缺少必填字段:{record.missFields.map((item) => (
<span>{item} </span>
))}</span>
);
}else {
return <div style={{minWidth: '50px'}}></div>;
}
}
});
}else if(item.name) { //自定义字段
const { id, name } = item;
arr.push({
title: name,
dataIndex: id,
key: id,
width: 200,
render: (text, record) => {
return <span className='f-ib' style={{minWidth: '100px'}}>
{record.customfieldList4excel && record.customfieldList4excel[id]}
</span>;
}
});
}else if(!(item.key === 'jirakey4excel' && this.props.isBusiness)){
arr.push({
title: item.value,
dataIndex: item.key,
key: item.key,
render: (text, record) => {
return <span className='f-ib' style={{minWidth: '100px'}}>
{text}
</span>;
}
});
}
});
this.setState({ columns: arr });
}
setCurrent = (val, data) => {
if(val === 0) {
this.props.setCurrent(val);
}else if( val === 2){
this.nextStep(val, data);
}
}
nextStep = (val, data) => {
const params = {
success: data
};
const { key } = this.props.type;
if(key === 'advise'){
importAdviseFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}else if(key === 'requirement') {
importRequirementFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}
else if(key === 'task') {
importTaskFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}
else if(key === 'bug') {
importBugFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}
else if(key === 'objective') {
importObjectiveFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}
else if(key === 'ticket') {
importTicketFile(params).then((res) => {
if (res.code !== 200) return message.error(res.msg);
this.props.setCurrent(val, res.result);
message.success(`导入成功`);
}).catch((err) => {
return message.error(`${err || err.msg}导入文件异常`);
});
}
}
render() {
const { data, mapData } = this.state;
return (<div className={styles.contentSecond}>
<div className={styles.tip}>
文件中包含<span>{(data && data.summary && data.summary.total) || 0}</span>条数据,
其中可导入<span>{(data && data.summary && data.summary.success) || 0}</span>条,
不可导入<span>{(data && data.summary && data.summary.failure) || 0}</span>条
</div>
<div className={styles.warningTip}>
包含<span>{(data && data.summary && data.summary.repeat) || 0}</span>条重复,
<span>{(data && data.summary && data.summary.incomplete) || 0}</span>条数据不完整或格式不合法,
<span>{(data && data.summary && data.summary.error) || 0}</span>条系统处理异常
</div>
{mapData.map((item) => (
data && data[item.key] && !!data[item.key].length && <Row>
<span className={styles.title}>{item.value}</span>
<Table
className={styles.table}
dataSource={data[item.key]}
columns={ this.state.columns } />
</Row>
))}
<Row style={{ marginTop: '40px' }} className='f-tar'>
<Button type='primary'disabled={data && data.success && !data.success.length} onClick={() => this.setCurrent(2, data.success)}>确认并导入可用数据</Button>
<Button className='u-mgl10' onClick={() => this.setCurrent(0)}>重新选择</Button>
</Row>
</div>);
}
}
const mapStateToProps = (state) => {
return {
lastProduct: state.product.lastProduct,
};
};
export default connect(mapStateToProps)(BusinessHOC()(StepTwo));
|
dejakob/react-native-alegrify-offline | src/components/create-thought-dialog/CreateThoughtDialog.js | import React, { PureComponent } from 'react';
import { Platform } from 'react-native';
import { Button, Dialog, Input, Label, SegmentedControl, Padding } from 'react-native-alegrify-ui';
import { translate } from '../../services/language';
import { MOODS, MOOD_ELEMENTS } from '../thought/Thought';
class CreateThoughtDialog extends PureComponent {
constructor() {
super();
this.handleMoodChange = this.handleMoodChange.bind(this);
this.handleMoodScoreChange = this.handleMoodScoreChange.bind(this);
this.handleThoughtChange = this.handleThoughtChange.bind(this);
this.handleEventChange = this.handleEventChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
componentWillMount() {
this.setState({
mood: null,
score: 5,
thought: '',
event: '',
hasSubmitted: false,
});
}
componentWillReceiveProps(newProps) {
if (newProps.show && !this.props.show) {
this.setState({ hasSubmitted: false });
}
else if (!newProps.show && this.props.show) {
this.componentWillMount();
}
}
get validationErrors() {
return {
mood: this.state.mood ? null : translate('DASHBOARD.VALIDATION_ERRORS.MOOD'),
score: this.state.score > 0 ? null : translate('DASHBOARD.VALIDATION_ERRORS.SCORE'),
thought: (typeof this.state.thought === 'string' && this.state.thought.trim().length > 0) ?
null :
translate('DASHBOARD.VALIDATION_ERRORS.THOUGHT'),
event: (typeof this.state.event === 'string' && this.state.event.trim().length > 0) ?
null :
translate('DASHBOARD.VALIDATION_ERRORS.EVENT')
};
}
get isValid() {
const { validationErrors } = this;
return Object
.keys(validationErrors)
.every(key => !validationErrors[key]);
}
handleMoodChange(element) {
const { mood } = element.props;
this.setState({ mood });
}
handleMoodScoreChange(score) {
this.setState({ score: Math.round(score * 10) / 10 });
}
handleThoughtChange(thought) {
this.setState({ thought });
}
handleEventChange(event) {
this.setState({ event });
}
handleSubmit() {
if (this.isValid) {
this.props.onComplete({
mood: this.state.mood,
score: this.state.score,
thought: this.state.thought,
event: this.state.event
});
this.componentWillMount();
}
else {
this.setState({ hasSubmitted: true });
}
}
render() {
const { validationErrors } = this;
return (
<Dialog
show={this.props.show}
onHide={this.props.onHide}
scrollDisabled={this.state.isScrollDisabled}
testID="createThoughtDialog__dialog"
>
<SegmentedControl
items={MOOD_ELEMENTS}
active={MOOD_ELEMENTS[Object.keys(MOODS).indexOf(this.state.mood)]}
onItemPress={this.handleMoodChange}
testID="createThoughtDialog__moodTypes"
/>
{this.state.hasSubmitted && validationErrors.mood ? (
<Label
error
>
{validationErrors.mood}
</Label>
) : null}
<Padding m />
<Input
type="range"
value={this.state.score}
onValueChange={this.handleMoodScoreChange}
onSlidingStart={() => this.setState({ isScrollDisabled: true })}
onSlidingComplete={() => this.setState({ isScrollDisabled: false })}
error={this.state.hasSubmitted && validationErrors.score}
testID="createThoughtDialog__score"
spaceL
/>
<Input
multiline
label={translate('DASHBOARD.WHATS_ON_YOUR_MIND')}
value={this.state.thought}
onChangeText={this.handleThoughtChange}
onBlur={Platform.OS === 'ios' ? e => this.handleThoughtChange(e.nativeEvent.text) : () => {}}
error={this.state.hasSubmitted && validationErrors.thought}
testID="createThoughtDialog__thought"
spaceL
/>
<Input
multiline
label={translate('DASHBOARD.WHAT_HAPPENED')}
value={this.state.event}
onChangeText={this.handleEventChange}
onBlur={Platform.OS === 'ios' ? e => this.handleEventChange(e.nativeEvent.text) : () => {}}
error={this.state.hasSubmitted && validationErrors.event}
testID="createThoughtDialog__event"
spaceL
/>
<Button
primary
onPress={this.handleSubmit}
disabled={this.state.hasSubmitted && !this.isValid}
testID="createThoughtDialog__submit"
>
{translate('DASHBOARD.ADD_THOUGHT')}
</Button>
</Dialog>
);
}
}
export default CreateThoughtDialog;
|
wxmerkt/ihmc-open-robotics-software | IHMCQuadrupedRobotics/src/us/ihmc/quadrupedRobotics/controller/ControllerEvent.java | package us.ihmc.quadrupedRobotics.controller;
public enum ControllerEvent
{
DONE,
FAIL,
}
|
wkoszek/ncurses_guide | book-ref/hline.c | <reponame>wkoszek/ncurses_guide<gh_stars>10-100
#include <ncurses.h>
int main(void)
{
int maxy,maxx,halfx,y,len;
initscr();
getmaxyx(stdscr,maxy,maxx);
halfx = maxx >> 1; /* x/2 */
len = 1;
for(y=0;y<maxy;y++,len++)
{
mvhline(y,halfx-len,0,len+len);
}
refresh();
getch();
endwin();
return 0;
}
|
mrinalini-m/travelog | client/src/utils/auth.utils.js | <gh_stars>0
import axios from 'axios'
import { createErrorMessage } from '@/utils'
const authHeader = () => {
let user = JSON.parse(localStorage.getItem('user'))
if (user && user.token) {
return { Authorization: 'Bearer ' + user.token }
} else {
return {}
}
}
const _logout = () => {
localStorage.removeItem('user')
}
const _update = async user => {
const options = {
method: 'PUT',
headers: { ...authHeader(), 'Content-Type': 'application/json' },
data: user,
url: `/users/${user.id}`
}
let res
try {
res = await axios(options)
return handleResponse(res)
} catch (error) {
return `${error.response.status}: ${createErrorMessage(error)}`
}
}
const _delete = async id => {
const options = {
method: 'DELETE',
headers: authHeader(),
url: `/users/${id}`
}
let res
try {
res = await axios(options)
return handleResponse(res)
} catch (error) {
return `${error.response.status}: ${createErrorMessage(error)}`
}
}
const handleResponse = response => {
const data = response.data
if (!data && !data.success) {
if (response.status === 401) {
_logout()
location.reload(true)
}
}
return data
}
export { _logout, _update, _delete, handleResponse }
|
edmund-troche/mbl-core | ci/lava/tests/test-provision-mbl.py | <gh_stars>1-10
#!/usr/bin/env python3
# Copyright (c) 2019 Arm Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for MBL provisioning."""
import os
import time
from pathlib import Path
class TestProvisionMbl:
"""Class to encapsulate the testing of mbl-cli provisioning of a DUT."""
dut_addr = ""
certificate = ""
def test_setup(self, dut_addr, execute_helper):
"""Setup enviroment for tests execution."""
# Save the dut_addr in the class
TestProvisionMbl.dut_addr = dut_addr
# Work out a unique certificate id - the first part of the path is the
# lava job number.
current_dir = Path().cwd()
TestProvisionMbl.certificate = "".join(current_dir.parts[1])
# Create a working directory for the following tests
directory = "/tmp/update-resources"
if not os.path.exists(directory):
os.mkdir(directory)
os.chdir(directory)
def test_manifest_init(self, execute_helper):
"""Test manifest-tool init command."""
exit_code, stdout, stderr = execute_helper.execute_command(
[
"manifest-tool",
"init",
"-q",
"-d",
"arm.com",
"-m",
"dev-device",
]
)
if exit_code:
print(stdout)
print(stderr)
assert exit_code == 0
def test_reject_invalid_key(self, execute_helper):
"""Test invalid API key saving using mbl-cli."""
exit_code, stdout, stderr = execute_helper.send_mbl_cli_command(
["save-api-key", "invalid_key"], TestProvisionMbl.dut_addr
)
assert "API key not recognised by Pelion Device Management" in stderr
def test_pelion_not_configured(self, execute_helper):
"""Test get-pelion-status command when the device is not configured."""
exit_code, stdout, stderr = execute_helper.send_mbl_cli_command(
["get-pelion-status"], TestProvisionMbl.dut_addr
)
assert (
"Your device is not correctly configured for Pelion Device "
"Management." in stderr
)
def test_pelion_provisioned(self, execute_helper):
"""Test the actual provisioning of the device."""
exit_code, stdout, stderr = execute_helper.send_mbl_cli_command(
[
"provision-pelion",
"-c",
TestProvisionMbl.certificate,
"anupdatecert",
"-p",
"/tmp/update-resources/update_default_resources.c",
],
TestProvisionMbl.dut_addr,
)
assert "Provisioning process completed without error." in stdout
def test_pelion_configured(self, execute_helper):
"""Check if the device has been provisioned correctly."""
exit_code, stdout, stderr = execute_helper.send_mbl_cli_command(
["get-pelion-status"], TestProvisionMbl.dut_addr
)
assert (
"Device is configured correctly. You can connect to Pelion Cloud!"
in stdout
)
def test_restart_mbl_cloud_client(self, execute_helper):
"""Restart the cloud client and wait 30 seconds to allow connection."""
exit_code, stdout, stderr = execute_helper.send_mbl_cli_command(
["shell", "systemctl restart mbl-cloud-client"],
TestProvisionMbl.dut_addr,
)
time.sleep(30)
assert exit_code == 0
|
lemkova/Yorozuya | library/ATF/__respawn_monster.hpp | // This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
#pragma once
#include <common/common.h>
#include <_react_area.hpp>
#include <_react_obj.hpp>
START_ATF_NAMESPACE
struct __respawn_monster
{
_react_obj ReactObj;
_react_area ReactArea;
unsigned int dwTermMSec;
int nLim;
bool bCallEvent;
char *pszDefineCode;
public:
__respawn_monster();
void ctor___respawn_monster();
};
END_ATF_NAMESPACE
|
FantLab/go-kit | http/mux/builder.go | <reponame>FantLab/go-kit
package mux
import "net/http"
func (g *Group) Middleware(fn func(http.Handler) http.Handler) {
g.Middlewares = append(g.Middlewares, fn)
}
func (g *Group) Endpoint(method, path string, handler http.Handler) {
g.Endpoints = append(g.Endpoints, &Endpoint{
Method: method,
Path: path,
Handler: handler,
})
}
func (g *Group) Subgroup(fn func(g *Group)) {
sg := new(Group)
fn(sg)
g.Subgroups = append(g.Subgroups, sg)
}
|
trumank/DRG-Mods | Source/FSD/Public/PipelineExtractorPodAnimInstance.h | <reponame>trumank/DRG-Mods
#pragma once
#include "CoreMinimal.h"
#include "Animation/AnimInstance.h"
#include "ERessuplyPodState.h"
#include "EPipelineExtractorPodAnimState.h"
#include "EPipelineBuildState.h"
#include "ERefineryState.h"
#include "PipelineExtractorPodAnimInstance.generated.h"
class APipelineExtractorPod;
class APipelineStart;
class APipelineSegment;
class AFSDRefinery;
UCLASS(Abstract, Blueprintable, NonTransient)
class FSD_API UPipelineExtractorPodAnimInstance : public UAnimInstance {
GENERATED_BODY()
public:
protected:
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
float ConnectionRotation;
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
EPipelineExtractorPodAnimState AnimState;
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
ERessuplyPodState PodState;
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
EPipelineBuildState PipelineState;
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
ERefineryState RefineryState;
UPROPERTY(BlueprintReadWrite, EditAnywhere, meta=(AllowPrivateAccess=true))
float RotationSpeed;
UPROPERTY(EditAnywhere, Transient)
TWeakObjectPtr<APipelineExtractorPod> Pod;
UPROPERTY(EditAnywhere, Transient)
TWeakObjectPtr<APipelineSegment> ConnectedSegment;
UPROPERTY(EditAnywhere, Transient)
TWeakObjectPtr<APipelineStart> PipelineStart;
UPROPERTY(EditAnywhere, Transient)
TWeakObjectPtr<AFSDRefinery> Refinery;
public:
UPipelineExtractorPodAnimInstance();
};
|
noelc-s/amber_developer_stack | eigen_utilities/include/eigen_utilities/operator_utilities.hpp | /**
* @author <NAME> <<EMAIL>>, member of Dr. Aaron
* Ames's AMBER Lab
*/
#ifndef EIGEN_OPERATOR_UTILITIES_H
#define EIGEN_OPERTAOR_UTILITIES_h
#include <Eigen/Dense>
namespace Eigen
{
template<typename Derived>
Eigen::MatrixBase<Derived>& operator-=(Eigen::MatrixBase<Derived> &X, const typename Derived::Scalar &a)
{
// Acceptably inefficient
X -= a * Matrix<typename Derived::Scalar, -1, -1>::Ones(X.rows(), X.cols());
return X;
}
template<typename Derived>
Eigen::MatrixBase<Derived>& operator+=(Eigen::MatrixBase<Derived> &X, const typename Derived::Scalar &a)
{
X += a * Matrix<typename Derived::Scalar, -1, -1>::Ones(X.rows(), X.cols());
return X;
}
}
#endif // EIGEN_OPERTAOR_UTILITIES_h
|
lockie/HiveGame | HiveGame/MyGUI/Tools/LayoutEditor/PropertyFieldFileName.h | /*!
@file
@author <NAME>
@date 12/2010
*/
#ifndef __PROPERTY_FIELD_FILE_NAME_H__
#define __PROPERTY_FIELD_FILE_NAME_H__
#include "EditorToolTip.h"
#include "PropertyFieldEditBox.h"
namespace tools
{
class PropertyFieldFileName :
public PropertyFieldEditBox
{
public:
PropertyFieldFileName(MyGUI::Widget* _parent);
virtual ~PropertyFieldFileName();
protected:
virtual bool onCheckValue();
};
} // namespace tools
#endif // __PROPERTY_FIELD_FILE_NAME_H__
|
Glidias/yuka | src/perception/memory/MemorySystem.js | <gh_stars>0
import { MemoryRecord } from './MemoryRecord.js';
/**
* Class for representing the memory system of a game entity. It is used for managing,
* filtering, and remembering sensory input.
*
* @author {@link https://github.com/Mugen87|Mugen87}
*/
class MemorySystem {
/**
* Constructs a new memory system.
*
* @param {GameEntity} owner - The game entity that owns this memory system.
*/
constructor( owner = null ) {
/**
* The game entity that owns this memory system.
* @type GameEntity
*/
this.owner = owner;
/**
* Used to simulate memory of sensory events. It contains {@link MemoryRecord memory records}
* of all relevant game entities in the environment. The records are usually update by
* the owner of the memory system.
* @type Array
*/
this.records = new Array();
/**
* Same as {@link MemorySystem#records} but used for fast access via the game entity.
* @type Map
*/
this.recordsMap = new Map();
/**
* Represents the duration of the game entities short term memory in seconds.
* When a bot requests a list of all recently sensed game entities, this value
* is used to determine if the bot is able to remember a game entity or not.
* @type Number
* @default 1
*/
this.memorySpan = 1;
}
/**
* Returns the memory record of the given game entity.
*
* @param {GameEntity} entity - The game entity.
* @return {MemoryRecord} The memory record for this game entity.
*/
getRecord( entity ) {
return this.recordsMap.get( entity );
}
/**
* Creates a memory record for the given game entity.
*
* @param {GameEntity} entity - The game entity.
* @return {MemorySystem} A reference to this memory system.
*/
createRecord( entity ) {
const record = new MemoryRecord( entity );
this.records.push( record );
this.recordsMap.set( entity, record );
return this;
}
/**
* Deletes the memory record for the given game entity.
*
* @param {GameEntity} entity - The game entity.
* @return {MemorySystem} A reference to this memory system.
*/
deleteRecord( entity ) {
const record = this.getRecord( entity );
const index = this.records.indexOf( record );
this.records.splice( index, 1 );
this.recordsMap.delete( entity );
return this;
}
/**
* Returns true if there is a memory record for the given game entity.
*
* @param {GameEntity} entity - The game entity.
* @return {Boolean} Whether the game entity has a memory record or not.
*/
hasRecord( entity ) {
return this.recordsMap.has( entity );
}
/**
* Removes all memory records from the memory system.
*
* @return {MemorySystem} A reference to this memory system.
*/
clear() {
this.records.length = 0;
this.recordsMap.clear();
return this;
}
/**
* Determines all valid memory record and stores the result in the given array.
*
* @param {Number} currentTime - The current elapsed time.
* @param {Array} result - The result array.
* @return {Array} The result array.
*/
getValidMemoryRecords( currentTime, result ) {
const records = this.records;
result.length = 0;
for ( let i = 0, l = records.length; i < l; i ++ ) {
const record = records[ i ];
if ( ( currentTime - record.timeLastSensed ) <= this.memorySpan ) {
result.push( record );
}
}
return result;
}
/**
* Transforms this instance into a JSON object.
*
* @return {Object} The JSON object.
*/
toJSON() {
const json = {
type: this.constructor.name,
owner: this.owner.uuid,
records: new Array(),
memorySpan: this.memorySpan
};
const records = this.records;
for ( let i = 0, l = records.length; i < l; i ++ ) {
const record = records[ i ];
json.records.push( record.toJSON() );
}
return json;
}
/**
* Restores this instance from the given JSON object.
*
* @param {Object} json - The JSON object.
* @return {MemorySystem} A reference to this memory system.
*/
fromJSON( json ) {
this.owner = json.owner; // uuid
this.memorySpan = json.memorySpan;
const recordsJSON = json.records;
for ( let i = 0, l = recordsJSON.length; i < l; i ++ ) {
const recordJSON = recordsJSON[ i ];
const record = new MemoryRecord().fromJSON( recordJSON );
this.records.push( record );
}
return this;
}
/**
* Restores UUIDs with references to GameEntity objects.
*
* @param {Map} entities - Maps game entities to UUIDs.
* @return {MemorySystem} A reference to this memory system.
*/
resolveReferences( entities ) {
this.owner = entities.get( this.owner ) || null;
// records
const records = this.records;
for ( let i = 0, l = records.length; i < l; i ++ ) {
const record = records[ i ];
record.resolveReferences( entities );
this.recordsMap.set( record.entity, record );
}
return this;
}
}
export { MemorySystem };
|
leedalmogbel/ace | src/app/SignedURL/GenerateModelSignedUrl.js | const { Operation } = require('@amberjs/core');
const signURL = require('src/infra/services/signedUrl');
const modelStatus = {
COMPLETED : 'Completed',
FAILED : 'Failed',
PROCESSING : 'Processing'
};
class GenerateModelSignedUrl extends Operation {
constructor({ ThirdPartyApis, ClipRepository, StandardModelRepository, logger }) {
super();
this.ThirdPartyApis = ThirdPartyApis;
this.ClipRepository = ClipRepository;
this.StandardModelRepository = StandardModelRepository;
this.logger = logger;
}
async execute(data) {
const { SUCCESS, ERROR, VALIDATION_ERROR } = this.events;
this.logger.info(`Generate Model SignedUrl DATA : ${JSON.stringify(data)}`);
const keypointArr = ['all', 'balance', 'ball_striking', 'movement'];
//add validation to data
try {
if(data.status == modelStatus.FAILED){
await this.StandardModelRepository.update(modelStatus.FAILED, {
userId : data.userId,
scenarioId : data.scenarioId
});
return this.emit(SUCCESS, 'Updated Status to Failed');
}
// existing scenario Id and userId
let resultArr= await Promise.all(
keypointArr.map(keypoint => {
let keyName = `models/${process.env.NODE_ENV}/${data.userId}/${data.scenarioId}/${String(Date.now())}_${keypoint}`.replace(/\s/g, '');
let key = `${keyName}.h5`;
let generatedData = signURL.generateSignedUrlForModel(key);
generatedData.keypointMap = keypoint;
this.StandardModelRepository.upsert({userId: data.userId, scenarioId: data.scenarioId, modelLink:generatedData.pathUrl, keypointMap: keypoint, keypointUrl: data.keypointUrl, status:modelStatus.COMPLETED});
return generatedData;
})
);
this.logger.info(`Generate Model SignedUrl RESULT : ${JSON.stringify(resultArr)}`);
return this.emit(SUCCESS, resultArr);
} catch(error) {
this.logger.error(`Generate Model SignedUrl ERROR : ${JSON.stringify(error)}`);
if(error.message === 'ValidationError') {
return this.emit(VALIDATION_ERROR, error);
}
return this.emit(ERROR, error);
}
}
}
GenerateModelSignedUrl.setEvents(['SUCCESS', 'ERROR', 'VALIDATION_ERROR', 'NOT_FOUND']);
module.exports = GenerateModelSignedUrl;
|
tianyapiaozi/tensorflow | tensorflow/contrib/lite/testing/tf_driver_test.cc | <filename>tensorflow/contrib/lite/testing/tf_driver_test.cc
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/contrib/lite/testing/tf_driver.h"
#include <gmock/gmock.h>
#include <gtest/gtest.h>
namespace tflite {
namespace testing {
namespace {
using ::testing::ElementsAre;
TEST(TfDriverTest, SimpleTest) {
std::unique_ptr<TfDriver> runner(
new TfDriver({"a", "b", "c", "d"}, {"float", "float", "float", "float"},
{"1,8,8,3", "1,8,8,3", "1,8,8,3", "1,8,8,3"}, {"x", "y"}));
runner->LoadModel(
"third_party/tensorflow/contrib/lite/testdata/multi_add.pb");
EXPECT_TRUE(runner->IsValid()) << runner->GetErrorMessage();
ASSERT_THAT(runner->GetInputs(), ElementsAre(0, 1, 2, 3));
ASSERT_THAT(runner->GetOutputs(), ElementsAre(0, 1));
for (int i : {0, 1, 2, 3}) {
runner->ReshapeTensor(i, "1,2,2,1");
}
ASSERT_TRUE(runner->IsValid());
runner->SetInput(0, "0.1,0.2,0.3,0.4");
runner->SetInput(1, "0.001,0.002,0.003,0.004");
runner->SetInput(2, "0.001,0.002,0.003,0.004");
runner->SetInput(3, "0.01,0.02,0.03,0.04");
runner->ResetTensor(2);
runner->Invoke();
ASSERT_EQ(runner->ReadOutput(0), "0.101,0.202,0.303,0.404");
ASSERT_EQ(runner->ReadOutput(1), "0.011,0.022,0.033,0.044");
}
} // namespace
} // namespace testing
} // namespace tflite
|
CameronMcWilliam/odo | vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/olm/overrides/inject_test.go | <gh_stars>1-10
package overrides_test
import (
"testing"
"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/olm/overrides"
"github.com/stretchr/testify/assert"
corev1 "k8s.io/api/core/v1"
)
var (
defaultEnvVars = []corev1.EnvVar{
corev1.EnvVar{
Name: "HTTP_PROXY",
Value: "http://foo.com:8080",
},
corev1.EnvVar{
Name: "HTTPS_PROXY",
Value: "https://foo.com:443",
},
corev1.EnvVar{
Name: "NO_PROXY",
Value: "a.com,b.com",
},
}
defaultVolumeMounts = []corev1.VolumeMount{
corev1.VolumeMount{
Name: "foo",
MountPath: "/bar",
},
}
defaultVolumes = []corev1.Volume{
corev1.Volume{
Name: "foo",
VolumeSource: corev1.VolumeSource{},
},
}
)
func TestInjectVolumeMountIntoDeployment(t *testing.T) {
tests := []struct {
name string
podSpec *corev1.PodSpec
volumeMounts []corev1.VolumeMount
expected *corev1.PodSpec
}{
{
// The container does not define a VolumeMount and is injected with an empty list of VolumeMounts.
// Expected: The container's VolumeMount list remains empty.
name: "EmptyVolumeMounts",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{},
},
},
volumeMounts: []corev1.VolumeMount{},
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{},
},
},
},
{
// The container does not define a VolumeMount and is injected with a single VolumeMount.
// Expected: The container contains the injected VolumeMount.
name: "WithContainerHasNoVolumeMounts",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{},
},
},
volumeMounts: defaultVolumeMounts,
expected:&corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: defaultVolumeMounts,
},
},
},
},
{
// The container defines a single VolumeMount which is injected with an empty VolumeMount list.
// Expected: The container's VolumeMount list is unchanged.
name: "WithContainerHasVolumeMountsEmptyDefaults",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: defaultVolumeMounts,
},
},
},
volumeMounts: []corev1.VolumeMount{},
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: defaultVolumeMounts,
},
},
},
},
{
// The container defines a single VolumeMount and is injected with a new VolumeMount.
// Expected: The container's VolumeMount list is updated to contain both VolumeMounts.
name: "WithContainerHasNonOverlappingEnvVar",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: []corev1.VolumeMount{
corev1.VolumeMount{
Name: "bar",
MountPath: "/foo",
},
},
},
},
},
volumeMounts: defaultVolumeMounts,
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: []corev1.VolumeMount{
corev1.VolumeMount{
Name: "bar",
MountPath: "/foo",
},
corev1.VolumeMount{
Name: "foo",
MountPath: "/bar",
},
},
},
},
},
},
{
// The container defines a single VolumeMount that has a name conflict with
// a VolumeMount being injected.
// Expected: The VolumeMount is overwritten.
name: "WithContainerHasOverlappingVolumeMounts",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: []corev1.VolumeMount{
corev1.VolumeMount{
Name: "foo",
MountPath: "/barbar",
},
},
},
},
},
volumeMounts: defaultVolumeMounts,
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
VolumeMounts: []corev1.VolumeMount{
corev1.VolumeMount{
Name: "foo",
MountPath: "/bar",
},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
overrides.InjectVolumeMountsIntoDeployment(tt.podSpec, tt.volumeMounts)
podSpecWant := tt.expected
podSpecGot := tt.podSpec
assert.Equal(t, podSpecWant, podSpecGot)
})
}
}
func TestInjectVolumeIntoDeployment(t *testing.T) {
tests := []struct {
name string
podSpec *corev1.PodSpec
volumes []corev1.Volume
expected *corev1.PodSpec
}{
{
// The PodSpec defines no Volumes and is injected with an empty list.
// Expected: The PodSpec's VolumeMount list remains empty.
name: "EmptyVolumeMounts",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
},
},
volumes: []corev1.Volume{},
expected: &corev1.PodSpec{
Containers: []corev1.Container{
},
},
},
{
// The PodSpec does not define any Volumes and is injected with a VolumeMount.
// Expected: The PodSpec contains the Volume that was injected.
name: "WithContainerHasNoVolumeMounts",
podSpec: &corev1.PodSpec{
},
volumes: defaultVolumes,
expected:&corev1.PodSpec{
Volumes: defaultVolumes,
},
},
{
// The PodSpec contains a single VolumeMount and is injected with an empty Volume list
// Expected: The PodSpec's Volume list is unchanged.
name: "WithContainerHasVolumeMountsEmptyDefaults",
podSpec: &corev1.PodSpec{
Volumes: defaultVolumes,
},
volumes: []corev1.Volume{},
expected: &corev1.PodSpec{
Volumes: defaultVolumes,
},
},
{
// The PodSpec defines single Volume and is injected with a new Volume.
// Expected: The PodSpec contains both Volumes.
name: "WithContainerHasNonOverlappingEnvVar",
podSpec: &corev1.PodSpec{
Volumes: []corev1.Volume{
corev1.Volume{
Name: "bar",
VolumeSource: corev1.VolumeSource{},
},
},
},
volumes: defaultVolumes,
expected: &corev1.PodSpec{
Volumes: []corev1.Volume{
corev1.Volume{
Name: "bar",
VolumeSource: corev1.VolumeSource{},
},
corev1.Volume{
Name: "foo",
VolumeSource: corev1.VolumeSource{},
},
},
},
},
{
// The PodSpec defines a single Volume that is injected with a Volume that has a name conflict.
// Expected: The existing Volume is overwritten.
name: "WithContainerHasOverlappingVolumeMounts",
podSpec: &corev1.PodSpec{
Volumes: []corev1.Volume{
corev1.Volume{
Name: "foo",
},
},
},
volumes: defaultVolumes,
expected: &corev1.PodSpec{
Volumes: []corev1.Volume{
corev1.Volume{
Name: "foo",
VolumeSource: corev1.VolumeSource{},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
overrides.InjectVolumesIntoDeployment(tt.podSpec, tt.volumes)
podSpecWant := tt.expected
podSpecGot := tt.podSpec
assert.Equal(t, podSpecWant, podSpecGot)
})
}
}
func TestInjectEnvIntoDeployment(t *testing.T) {
tests := []struct {
name string
podSpec *corev1.PodSpec
envVar []corev1.EnvVar
expected *corev1.PodSpec
}{
{
// PodSpec has one container and `Env` is empty.
// Expected: All env variable(s) specified are injected.
name: "WithContainerHasNoEnvVar",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{},
},
},
envVar: defaultEnvVars,
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: defaultEnvVars,
},
},
},
},
{
// PodSpec has one container and it has non overlapping env var(s).
// Expected: existing non overlapping env vars are intact.
name: "WithContainerHasNonOverlappingEnvVar",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
},
},
},
},
envVar: defaultEnvVars,
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: append([]corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
}, defaultEnvVars...),
},
},
},
},
{
// PodSpec has one container and it has overlapping env var.
// Expected: overlapping env var is modified.
name: "WithContainerHasOverlappingEnvVar",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
corev1.EnvVar{
Name: "bar",
Value: "bar_value",
},
},
},
},
},
envVar: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "new_foo_value",
},
corev1.EnvVar{
Name: "bar",
Value: "new_bar_value",
},
},
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "new_foo_value",
},
corev1.EnvVar{
Name: "bar",
Value: "new_bar_value",
},
},
},
},
},
},
{
// PodSpec has one container and it has overlapping env var which is being unset.
// Expected: overlapping env var is modified.
name: "WithContainerEnvVarBeingUnset",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
corev1.EnvVar{
Name: "bar",
Value: "bar_value",
},
},
},
},
},
envVar: []corev1.EnvVar{
corev1.EnvVar{
Name: "bar",
Value: "",
},
},
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
corev1.EnvVar{
Name: "bar",
Value: "",
},
},
},
},
},
},
{
// PodSpec has more than one container(s)
// Expected: All container(s) should be updated as expected.
name: "WithMultipleContainers",
podSpec: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{},
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
},
},
corev1.Container{
Env: []corev1.EnvVar{
corev1.EnvVar{
Name: "bar",
Value: "bar_value",
},
},
},
},
},
envVar: defaultEnvVars,
expected: &corev1.PodSpec{
Containers: []corev1.Container{
corev1.Container{
Env: defaultEnvVars,
},
corev1.Container{
Env: append([]corev1.EnvVar{
corev1.EnvVar{
Name: "foo",
Value: "foo_value",
},
}, defaultEnvVars...),
},
corev1.Container{
Env: append([]corev1.EnvVar{
corev1.EnvVar{
Name: "bar",
Value: "bar_value",
},
}, defaultEnvVars...),
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
overrides.InjectEnvIntoDeployment(tt.podSpec, tt.envVar)
podSpecWant := tt.expected
podSpecGot := tt.podSpec
assert.Equal(t, podSpecWant, podSpecGot)
})
}
}
|
pu2clr/PCF8574 | extras/apidoc/html/search/all_4.js | <filename>extras/apidoc/html/search/all_4.js
var searchData=
[
['lookfordevice_7',['lookForDevice',['../group__group01.html#gabb38aa941e8b607557fa8f3f98bf9fc5',1,'PCF']]]
];
|
wreiner/Office365-REST-Python-Client | office365/sharepoint/client_context.py | <filename>office365/sharepoint/client_context.py
import copy
import adal
from office365.runtime.auth.authentication_context import AuthenticationContext
from office365.runtime.auth.client_credential import ClientCredential
from office365.runtime.auth.providers.saml_token_provider import resolve_base_url
from office365.runtime.auth.token_response import TokenResponse
from office365.runtime.auth.user_credential import UserCredential
from office365.runtime.client_runtime_context import ClientRuntimeContext
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.http.request_options import RequestOptions
from office365.runtime.odata.json_light_format import JsonLightFormat
from office365.runtime.odata.odata_batch_request import ODataBatchRequest
from office365.runtime.odata.odata_metadata_level import ODataMetadataLevel
from office365.runtime.odata.odata_request import ODataRequest
from office365.runtime.queries.delete_entity_query import DeleteEntityQuery
from office365.runtime.queries.update_entity_query import UpdateEntityQuery
from office365.sharepoint.sites.site import Site
from office365.sharepoint.webs.context_web_information import ContextWebInformation
from office365.sharepoint.webs.web import Web
def get_tenant_info(url):
parts = url.split('://')
host_name = parts[1].split("/")[0]
tenant_name = "{0}.onmicrosoft.com".format(host_name.split(".")[0])
return {
"base_url": "{0}://{1}".format(parts[0], host_name),
"name": tenant_name
}
class ClientContext(ClientRuntimeContext):
"""SharePoint client context"""
def __init__(self, base_url, auth_context=None):
"""
:type base_url: str
:type auth_context: AuthenticationContext or None
"""
if base_url.endswith("/"):
base_url = base_url[:len(base_url) - 1]
super(ClientContext, self).__init__(auth_context)
self.__web = None
self.__site = None
self._base_url = base_url
self._contextWebInformation = None
self._pendingRequest = ODataRequest(self, JsonLightFormat(ODataMetadataLevel.Verbose))
self._pendingRequest.beforeExecute += self._build_modification_query
@staticmethod
def from_url(abs_url):
"""
Constructs ClientContext from absolute Url
:param str abs_url: Absolute Url to resource
:return: ClientContext
"""
base_url = resolve_base_url(abs_url)
ctx = ClientContext(base_url)
result = Web.get_web_url_from_page_url(ctx, abs_url)
def _init_context_for_web(resp):
ctx._base_url = result.value
ctx.after_execute(_init_context_for_web)
return ctx
@staticmethod
def connect_with_credentials(base_url, credentials):
"""
Creates authenticated SharePoint context via user or client credentials
:param str base_url: Url to Site or Web
:param ClientCredential or UserCredential credentials: Credentials object """
ctx = ClientContext(base_url).with_credentials(credentials)
ctx.authentication_context.acquire_token_func()
return ctx
@staticmethod
def connect_with_certificate(base_url, client_id, thumbprint, cert_path):
"""Creates authenticated SharePoint context via certificate credentials
:param str cert_path: Path to A PEM encoded certificate private key.
:param str thumbprint: Hex encoded thumbprint of the certificate.
:param str client_id: The OAuth client id of the calling application.
:param str base_url: Url to Site or Web
"""
def acquire_token():
tenant_info = get_tenant_info(base_url)
authority_url = 'https://login.microsoftonline.com/{0}'.format(tenant_info['name'])
auth_ctx = adal.AuthenticationContext(authority_url)
resource = tenant_info['base_url']
with open(cert_path, 'r') as file:
key = file.read()
json_token = auth_ctx.acquire_token_with_client_certificate(
resource,
client_id,
key,
thumbprint)
return TokenResponse(**json_token)
ctx_auth = AuthenticationContext(url=base_url)
ctx_auth.set_token(acquire_token())
ctx = ClientContext(base_url, ctx_auth)
return ctx
def with_credentials(self, credentials):
"""
Assigns credentials
:type credentials: UserCredential or ClientCredential
"""
self._auth_context = AuthenticationContext(url=self._base_url)
def _acquire_token():
if not self.authentication_context.is_authenticated:
if isinstance(credentials, ClientCredential):
return self.authentication_context.acquire_token_for_app(credentials.clientId,
credentials.clientSecret)
elif isinstance(credentials, UserCredential):
return self.authentication_context.acquire_token_for_user(credentials.userName,
credentials.password)
else:
raise ValueError("Unknown credential type")
self._auth_context.acquire_token_func = _acquire_token
return self
def execute_batch(self):
"""Construct and submit a batch request"""
batch_request = ODataBatchRequest(self)
def _prepare_batch_request(request):
self.ensure_form_digest(request)
batch_request.beforeExecute += _prepare_batch_request
batch_request.execute_query()
def build_request(self):
request = super(ClientContext, self).build_request()
self._build_modification_query(request)
return request
def pending_request(self):
"""
:return: ODataRequest
"""
return self._pendingRequest
def ensure_form_digest(self, request_options):
"""
:type request_options: RequestOptions
"""
if not self._contextWebInformation:
self._contextWebInformation = ContextWebInformation()
self.request_form_digest()
request_options.set_header('X-RequestDigest', self._contextWebInformation.FormDigestValue)
def request_form_digest(self):
"""Request Form Digest"""
request = RequestOptions(self.service_root_url() + "contextInfo")
request.method = HttpMethod.Post
response = self.execute_request_direct(request)
json = response.json()
json_format = JsonLightFormat()
json_format.function_tag_name = "GetContextWebInformation"
self.pending_request().map_json(json, self._contextWebInformation, json_format)
def clone(self, url, clear_queries=True):
"""
Creates a clone of ClientContext
:param bool clear_queries:
:param str url: Site Url
:return ClientContext
"""
ctx = copy.deepcopy(self)
ctx._base_url = url
if clear_queries:
ctx.clear_queries()
return ctx
def _build_modification_query(self, request):
"""
Constructs SharePoint specific modification OData request
:type request: RequestOptions
"""
query = self.current_query
if request.method == HttpMethod.Post:
self.ensure_form_digest(request)
# set custom SharePoint control headers
if isinstance(self._pendingRequest.json_format, JsonLightFormat):
if isinstance(query, DeleteEntityQuery):
request.ensure_header("X-HTTP-Method", "DELETE")
request.ensure_header("IF-MATCH", '*')
elif isinstance(query, UpdateEntityQuery):
request.ensure_header("X-HTTP-Method", "MERGE")
request.ensure_header("IF-MATCH", '*')
@property
def web(self):
"""Get Web client object"""
if not self.__web:
self.__web = Web(self)
return self.__web
@property
def site(self):
"""Get Site client object"""
if not self.__site:
self.__site = Site(self)
return self.__site
@property
def base_url(self):
return self._base_url
@property
def authentication_context(self):
return self._auth_context
def service_root_url(self):
return "{0}/_api/".format(self.base_url)
|
leilinen/flink | flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/AbstractStateChangeLogger.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.state.changelog;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.apache.flink.runtime.state.RegisteredKeyValueStateBackendMetaInfo;
import org.apache.flink.runtime.state.RegisteredPriorityQueueStateBackendMetaInfo;
import org.apache.flink.runtime.state.RegisteredStateMetaInfoBase;
import org.apache.flink.runtime.state.changelog.StateChangelogWriter;
import org.apache.flink.runtime.state.heap.InternalKeyContext;
import org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot;
import org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshotReadersWriters;
import org.apache.flink.util.function.ThrowingConsumer;
import javax.annotation.Nullable;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import static org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot.BackendStateType.KEY_VALUE;
import static org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot.BackendStateType.PRIORITY_QUEUE;
import static org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshotReadersWriters.CURRENT_STATE_META_INFO_SNAPSHOT_VERSION;
import static org.apache.flink.state.changelog.StateChangeOperation.ADD;
import static org.apache.flink.state.changelog.StateChangeOperation.ADD_ELEMENT;
import static org.apache.flink.state.changelog.StateChangeOperation.ADD_OR_UPDATE_ELEMENT;
import static org.apache.flink.state.changelog.StateChangeOperation.CLEAR;
import static org.apache.flink.state.changelog.StateChangeOperation.METADATA;
import static org.apache.flink.state.changelog.StateChangeOperation.REMOVE_ELEMENT;
import static org.apache.flink.state.changelog.StateChangeOperation.SET;
import static org.apache.flink.state.changelog.StateChangeOperation.SET_INTERNAL;
import static org.apache.flink.util.Preconditions.checkNotNull;
abstract class AbstractStateChangeLogger<Key, Value, Ns> implements StateChangeLogger<Value, Ns> {
static final int COMMON_KEY_GROUP = -1;
protected final StateChangelogWriter<?> stateChangelogWriter;
protected final InternalKeyContext<Key> keyContext;
protected final RegisteredStateMetaInfoBase metaInfo;
private final StateMetaInfoSnapshot.BackendStateType stateType;
private boolean metaDataWritten = false;
public AbstractStateChangeLogger(
StateChangelogWriter<?> stateChangelogWriter,
InternalKeyContext<Key> keyContext,
RegisteredStateMetaInfoBase metaInfo) {
this.stateChangelogWriter = checkNotNull(stateChangelogWriter);
this.keyContext = checkNotNull(keyContext);
this.metaInfo = checkNotNull(metaInfo);
if (metaInfo instanceof RegisteredKeyValueStateBackendMetaInfo) {
this.stateType = KEY_VALUE;
} else if (metaInfo instanceof RegisteredPriorityQueueStateBackendMetaInfo) {
this.stateType = PRIORITY_QUEUE;
} else {
throw new IllegalArgumentException("Unsupported state type: " + metaInfo);
}
}
@Override
public void valueUpdated(Value newValue, Ns ns) throws IOException {
if (newValue == null) {
valueCleared(ns);
} else {
log(SET, out -> serializeValue(newValue, out), ns);
}
}
@Override
public void valueUpdatedInternal(Value newValue, Ns ns) throws IOException {
if (newValue == null) {
valueCleared(ns);
} else {
log(SET_INTERNAL, out -> serializeValue(newValue, out), ns);
}
}
protected abstract void serializeValue(Value value, DataOutputViewStreamWrapper out)
throws IOException;
@Override
public void valueAdded(Value addedValue, Ns ns) throws IOException {
log(ADD, out -> serializeValue(addedValue, out), ns);
}
@Override
public void valueCleared(Ns ns) throws IOException {
log(CLEAR, ns);
}
@Override
public void valueElementAdded(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer, Ns ns)
throws IOException {
log(ADD_ELEMENT, dataSerializer, ns);
}
@Override
public void valueElementAddedOrUpdated(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer, Ns ns)
throws IOException {
log(ADD_OR_UPDATE_ELEMENT, dataSerializer, ns);
}
@Override
public void valueElementRemoved(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer, Ns ns)
throws IOException {
log(REMOVE_ELEMENT, dataSerializer, ns);
}
protected void log(StateChangeOperation op, Ns ns) throws IOException {
logMetaIfNeeded();
stateChangelogWriter.append(keyContext.getCurrentKeyGroupIndex(), serialize(op, ns, null));
}
protected void log(
StateChangeOperation op,
@Nullable ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataWriter,
Ns ns)
throws IOException {
logMetaIfNeeded();
stateChangelogWriter.append(
keyContext.getCurrentKeyGroupIndex(), serialize(op, ns, dataWriter));
}
private void logMetaIfNeeded() throws IOException {
if (!metaDataWritten) {
// todo: add StateChangelogWriter.append() version without a keygroup
// when all callers and implementers are merged (FLINK-21356 or later)
stateChangelogWriter.append(
COMMON_KEY_GROUP,
serializeRaw(
out -> {
out.writeByte(METADATA.getCode());
out.writeInt(CURRENT_STATE_META_INFO_SNAPSHOT_VERSION);
StateMetaInfoSnapshotReadersWriters.getWriter()
.writeStateMetaInfoSnapshot(metaInfo.snapshot(), out);
writeDefaultValueAndTtl(out);
}));
metaDataWritten = true;
}
}
protected void writeDefaultValueAndTtl(DataOutputViewStreamWrapper out) throws IOException {}
private byte[] serialize(
StateChangeOperation op,
Ns ns,
@Nullable ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataWriter)
throws IOException {
return serializeRaw(
wrapper -> {
wrapper.writeByte(op.getCode());
// todo: optimize in FLINK-22944 by either writing short code or grouping and
// writing once (same for key, ns)
wrapper.writeUTF(metaInfo.getName());
wrapper.writeByte(stateType.getCode());
serializeScope(ns, wrapper);
if (dataWriter != null) {
dataWriter.accept(wrapper);
}
});
}
protected abstract void serializeScope(Ns ns, DataOutputViewStreamWrapper out)
throws IOException;
private byte[] serializeRaw(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataWriter)
throws IOException {
// todo: optimize performance
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(out)) {
dataWriter.accept(wrapper);
return out.toByteArray();
}
}
}
|
jonjomckay/jooby | jooby/src/main/java/io/jooby/internal/handler/SendAttachment.java | <gh_stars>0
/**
* Jooby https://jooby.io
* Apache License Version 2.0 https://jooby.io/LICENSE.txt
* Copyright 2014 <NAME>
*/
package io.jooby.internal.handler;
import io.jooby.AttachedFile;
import io.jooby.Context;
import io.jooby.Route;
import javax.annotation.Nonnull;
public class SendAttachment implements LinkedHandler {
private Route.Handler next;
public SendAttachment(Route.Handler next) {
this.next = next;
}
@Nonnull @Override public Object apply(@Nonnull Context ctx) {
try {
Object result = next.apply(ctx);
if (ctx.isResponseStarted()) {
return result;
}
return ctx.send(((AttachedFile) result));
} catch (Throwable x) {
return ctx.sendError(x);
}
}
@Override public Route.Handler next() {
return next;
}
}
|
PraydE007/uChat | client/src/mx_init_signal_10.c | #include "client.h"
void mx_init_signal_10(t_s_glade *gui) {
g_signal_connect(gui->b_p_close, "clicked",
G_CALLBACK(mx_close_profile), gui);
g_signal_connect(gui->b_p_apply, "clicked",
G_CALLBACK(mx_change_profile), gui);
g_signal_connect(gui->b_password, "clicked",
G_CALLBACK(mx_open_window), gui->w_password);
g_signal_connect(gui->b_p_f_image, "clicked",
G_CALLBACK(mx_set_profile_img), gui);
}
|
chyzman/victus | src/main/java/com/glisco/victus/hearts/content/GoldenAspect.java | <reponame>chyzman/victus
package com.glisco.victus.hearts.content;
import com.glisco.victus.Victus;
import com.glisco.victus.hearts.HeartAspect;
import com.glisco.victus.hearts.HeartAspectComponent;
import net.minecraft.entity.damage.DamageSource;
import net.minecraft.entity.effect.StatusEffectInstance;
import net.minecraft.entity.effect.StatusEffects;
import net.minecraft.entity.player.PlayerEntity;
public class GoldenAspect extends HeartAspect {
public static final Type TYPE = new Type(Victus.id("golden"), 16, 100, 0xFFF77B, GoldenAspect::new);
public GoldenAspect(PlayerEntity player) {
super(player, TYPE);
}
@Override
public boolean handleBreak(DamageSource source, float damage, float originalHealth) {
final var aspects = Victus.ASPECTS.get(player);
int index = findIndex(aspects);
float percentage = 1f - ((index + 0f) / (player.getMaxHealth() / 2));
int level = Math.max(0, Math.round(percentage * 5) - 1);
player.addStatusEffect(new StatusEffectInstance(StatusEffects.ABSORPTION, 600, level));
return false;
}
private int findIndex(HeartAspectComponent component) {
for (int i = 0; i < component.effectiveSize(); i++) {
if (component.getAspect(i) == this) return i;
}
return -1;
}
}
|
Akarin-project/Paper2Srg | src/main/java/net/minecraft/server/dedicated/ServerHangWatchdog.java | package net.minecraft.server.dedicated;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import net.minecraft.crash.CrashReport;
import net.minecraft.crash.CrashReportCategory;
import net.minecraft.server.MinecraftServer;
public class ServerHangWatchdog implements Runnable {
private static final Logger field_180251_a = LogManager.getLogger();
private final DedicatedServer field_180249_b;
private final long field_180250_c;
public ServerHangWatchdog(DedicatedServer dedicatedserver) {
this.field_180249_b = dedicatedserver;
this.field_180250_c = dedicatedserver.func_175593_aQ();
}
public void run() {
while (this.field_180249_b.func_71278_l()) {
long i = this.field_180249_b.func_175587_aJ();
long j = MinecraftServer.func_130071_aq();
long k = j - i;
if (k > this.field_180250_c) {
ServerHangWatchdog.field_180251_a.fatal("A single server tick took {} seconds (should be max {})", String.format("%.2f", new Object[] { Float.valueOf((float) k / 1000.0F)}), String.format("%.2f", new Object[] { Float.valueOf(0.05F)}));
ServerHangWatchdog.field_180251_a.fatal("Considering it to be crashed, server will forcibly shutdown.");
ThreadMXBean threadmxbean = ManagementFactory.getThreadMXBean();
ThreadInfo[] athreadinfo = threadmxbean.dumpAllThreads(true, true);
StringBuilder stringbuilder = new StringBuilder();
Error error = new Error();
ThreadInfo[] athreadinfo1 = athreadinfo;
int l = athreadinfo.length;
for (int i1 = 0; i1 < l; ++i1) {
ThreadInfo threadinfo = athreadinfo1[i1];
if (threadinfo.getThreadId() == this.field_180249_b.func_175583_aK().getId()) {
error.setStackTrace(threadinfo.getStackTrace());
}
stringbuilder.append(threadinfo);
stringbuilder.append("\n");
}
CrashReport crashreport = new CrashReport("Watching Server", error);
this.field_180249_b.func_71230_b(crashreport);
CrashReportCategory crashreportsystemdetails = crashreport.func_85058_a("Thread Dump");
crashreportsystemdetails.func_71507_a("Threads", (Object) stringbuilder);
File file = new File(new File(this.field_180249_b.func_71238_n(), "crash-reports"), "crash-" + (new SimpleDateFormat("yyyy-MM-dd_HH.mm.ss")).format(new Date()) + "-server.txt");
if (crashreport.func_147149_a(file)) {
ServerHangWatchdog.field_180251_a.error("This crash report has been saved to: {}", file.getAbsolutePath());
} else {
ServerHangWatchdog.field_180251_a.error("We were unable to save this crash report to disk.");
}
this.func_180248_a();
}
try {
Thread.sleep(i + this.field_180250_c - j);
} catch (InterruptedException interruptedexception) {
;
}
}
}
private void func_180248_a() {
try {
Timer timer = new Timer();
timer.schedule(new TimerTask() {
public void run() {
Runtime.getRuntime().halt(1);
}
}, 10000L);
System.exit(1);
} catch (Throwable throwable) {
Runtime.getRuntime().halt(1);
}
}
}
|
esagecloudOS/esage-esage-etk | lib/abiquo-etk.rb | <reponame>esagecloudOS/esage-esage-etk
Dir[File.dirname(__FILE__) + '/../vendor/*'].each do |dir|
$: << dir + '/lib'
end
require 'logger'
require 'rubygems'
require 'term/ansicolor'
require 'rpm-utils'
require 'logger'
require 'nokogiri'
require 'mixlib/cli'
require 'abiquo'
#
# CONFIG CONSTANTS
#
ENV['LANG'] = 'C'
JAVA_BIN = "/usr/java/default/bin/java"
ABIQUO_BASE_DIR='/opt/abiquo'
TOMCAT_DIR='/opt/abiquo/tomcat'
TOMCAT_PID_FILE = '/opt/abiquo/tomcat/work/catalina.pid'
ABIQUO_VERSION = "1.6"
ABIQUO_SERVER_CONFIG = '/opt/abiquo/config/server.xml'
ABIQUO_VIRTUALFACTORY_CONFIG = '/opt/abiquo/config/virtualfactory.xml'
ABIQUO_VSM_CONFIG = '/opt/abiquo/config/vsm.xml'
ABIQUO_NODECOLLECTOR_CONFIG = '/opt/abiquo/config/nodecollector.xml'
ABIQUO_AM_CONFIG = '/opt/abiquo/config/am.xml'
ABIQUO_BPMASYNC_CONFIG = '/opt/abiquo/config/bpm-async.xml'
def abiquo_edition
end
def abiquo_server_settings(file = '/opt/abiquo/config/abiquo.properties')
settings = {}
File.read(file).each_line do |l|
next if l.strip.chomp.empty?
key,val = l.strip.chomp.split('=')
settings[key.strip.chomp] = val.strip.chomp rescue ''
end
settings
end
def abiquo_rs_settings(file = '/opt/abiquo/config/abiquo.properties')
abiquo_server_settings
end
def abiquo_base_dir
return ABIQUO_BASE_DIR
end
def tomcat_base_dir
return TOMCAT_DIR
end
def abiquo_installed?
return (File.directory?('/opt/abiquo') && RPMUtils.rpm_installed?('abiquo-core'))
end
def config_property(config, path)
config.root.xpath(path).text.chomp.strip
end
def abiquo_components_installed
c = Dir["#{TOMCAT_DIR}/webapps/*"].find_all { |d| File.directory? d }
c.map { |d| d.split('/').last }
end
def system_service_on?(service)
not `/sbin/chkconfig --list #{service}|grep 3:on`.empty?
end
def service_installed?(service_name)
File.exist?("/etc/rc.d/init.d/#{service_name}")
end
def abiquo_server_config
cfg = nil
if File.exist? ABIQUO_SERVER_CONFIG
cfg = Nokogiri::XML(File.new(ABIQUO_SERVER_CONFIG))
end
return cfg
end
def abiquo_virtualfactory_config
cfg = nil
if File.exist? ABIQUO_VIRTUALFACTORY_CONFIG
cfg= Nokogiri::XML(File.new(ABIQUO_VIRTUALFACTORY_CONFIG))
end
return cfg
end
def abiquo_vsm_config
cfg = nil
if File.exist? ABIQUO_VSM_CONFIG
cfg = Nokogiri::XML(File.new(ABIQUO_VSM_CONFIG))
end
return cfg
end
def abiquo_nodecollector_config
cfg = nil
if File.exist? ABIQUO_NODECOLLECTOR_CONFIG
cfg = Nokogiri::XML(File.new(ABIQUO_NODECOLLECTOR_CONFIG))
end
return cfg
end
def abiquo_am_config
cfg = nil
if File.exist? ABIQUO_AM_CONFIG
cfg = Nokogiri::XML(File.new(ABIQUO_AM_CONFIG))
end
end
def abiquo_bpmasync_config
cfg = nil
if File.exist? ABIQUO_BPMASYNC_CONFIG
cfg = Nokogiri::XML(File.new(ABIQUO_BPMASYNC_CONFIG))
end
return cfg
end
module AETK
class Log
def self.debug(mgs)
instance.debug msg
end
def self.info(msg)
instance.info msg
end
def self.error(msg)
instance.error msg
end
def self.warn(msg)
instance.warn msg
end
def self.instance(file = '/var/log/abiquo-etk.log')
begin
@@logger ||= Logger.new file
rescue Exception
@@logger ||= Logger.new $stderr
end
end
end
module OutputFormatters
def two_cols(first, second, justification = 40)
puts "#{first}".ljust(justification) + "#{second}"
end
end
def detect_install_type
AETK::System.detect_install_type
end
class System
def self.abiquo_version
File.read('/etc/abiquo-release').match(/Version:(.*)/)[1].strip.split(/(-|\s)/)[0].to_s.strip.chomp rescue nil
end
def self.detect_install_type
found = ['bpm-async', 'am', 'server'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
return :monolithic if found
found = ['am', 'virtualfactory', 'bpm-async'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
return :rs_plus_v2v if found
found = ['am', 'virtualfactory'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
return :remote_services if found
found = ['server', 'api'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
return :server if found
found = ['bpm-async'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
return :v2v if found
return :unknown
end
def self.detect_install_type2
itype = []
found = ['bpm-async', 'am', 'server'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
itype << :monolithic if found
found = ['am', 'virtualfactory', 'bpm-async'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
itype << :rs_plus_v2v if found
found = ['am', 'virtualfactory'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
itype << :remote_services if found
found = ['server', 'api'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
itype << :server if found
found = ['bpm-async'].each do |dir|
break if not File.directory?(ABIQUO_BASE_DIR + "/tomcat/webapps/#{dir}")
end
itype << :v2v if found
itype << :cloudnode_vbox if RPMUtils.rpm_installed?('abiquo-virtualbox')
itype << :cloudnode_kvm if RPMUtils.rpm_installed?('abiquo-cloud-node') and \
RPMUtils.rpm_installed?('kvm') and not RPMUtils.rpm_installed?('xen')
itype << :cloudnode_xen if RPMUtils.rpm_installed?('abiquo-cloud-node') and \
RPMUtils.rpm_installed?('xen')
if itype.size > 0
itype
else
return [:unknown]
end
end
end
def self.load_plugins(extra_plugins_dir = nil)
puts "Loading plugins...".yellow.bold
version = System.abiquo_version
plugins = Dir[File.dirname(__FILE__) + "/checks/#{version}/*.rb"].sort
if extra_plugins_dir and File.directory? extra_plugins_dir
puts "Loading extra plugins...".yellow.bold
plugins.concat( Dir[extra_plugins_dir + '/*.rb'].sort )
end
log = Log.instance
if log.level == Logger::DEBUG
plugins.each do |p|
log.debug " #{File.basename(p,'.rb')}..."
end
end
plugins.each do |p|
$stdout.sync = true
load p
end
end
end
|
amintasvrp/database-as-a-service | dbaas/physical/admin/__init__.py | <gh_stars>100-1000
# -*- coding:utf-8 -*-
from django.contrib import admin
from .. import models
from .databaseinfra import DatabaseInfraAdmin
from .engine import EngineAdmin
from .engine_type import EngineTypeAdmin
from .plan import PlanAdmin
from .host import HostAdmin
from .environment import EnvironmentAdmin
from .environment_group import EnvironmentGroupAdmin
from .replication_topology import ReplicationTopologyAdmin
from .disk_offering import DiskOfferingAdmin
from .parameter import ParameterAdmin
from .offering import OfferingAdmin
from .cloud import CloudAdmin
from .pool import PoolAdmin
from .core_replication_topology import CoreReplicationTopologyAdmin
from .ip import IpAdmin
from .vip import VipAdmin
from .vip_instance_group import VipInstanceGroupAdmin
admin.site.register(models.DatabaseInfra, DatabaseInfraAdmin)
admin.site.register(models.Engine, EngineAdmin)
admin.site.register(models.EngineType, EngineTypeAdmin)
admin.site.register(models.Plan, PlanAdmin)
admin.site.register(models.Host, HostAdmin)
admin.site.register(models.Offering, OfferingAdmin)
admin.site.register(models.Environment, EnvironmentAdmin)
admin.site.register(models.EnvironmentGroup, EnvironmentGroupAdmin)
admin.site.register(models.ReplicationTopology, ReplicationTopologyAdmin)
admin.site.register(models.DiskOffering, DiskOfferingAdmin)
admin.site.register(models.Parameter, ParameterAdmin)
admin.site.register(models.Cloud, CloudAdmin)
admin.site.register(models.Script)
admin.site.register(models.Pool, PoolAdmin)
admin.site.register(models.CoreReplicationTopology, CoreReplicationTopologyAdmin)
admin.site.register(models.Ip, IpAdmin)
admin.site.register(models.Vip, VipAdmin)
admin.site.register(models.VipInstanceGroup, VipInstanceGroupAdmin)
|
dplbsd/soc2013 | head/sys/sys/buf.h | /*-
* Copyright (c) 1982, 1986, 1989, 1993
* The Regents of the University of California. All rights reserved.
* (c) UNIX System Laboratories, Inc.
* All or some portions of this file are derived from material licensed
* to the University of California by American Telephone and Telegraph
* Co. or Unix System Laboratories, Inc. and are reproduced herein with
* the permission of UNIX System Laboratories, Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 4. Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* @(#)buf.h 8.9 (Berkeley) 3/30/95
* $FreeBSD: soc2013/dpl/head/sys/sys/buf.h 252377 2013-05-31 00:43:41Z jeff $
*/
#ifndef _SYS_BUF_H_
#define _SYS_BUF_H_
#include <sys/bufobj.h>
#include <sys/queue.h>
#include <sys/lock.h>
#include <sys/lockmgr.h>
struct bio;
struct buf;
struct bufobj;
struct mount;
struct vnode;
struct uio;
/*
* To avoid including <ufs/ffs/softdep.h>
*/
LIST_HEAD(workhead, worklist);
/*
* These are currently used only by the soft dependency code, hence
* are stored once in a global variable. If other subsystems wanted
* to use these hooks, a pointer to a set of bio_ops could be added
* to each buffer.
*/
extern struct bio_ops {
void (*io_start)(struct buf *);
void (*io_complete)(struct buf *);
void (*io_deallocate)(struct buf *);
int (*io_countdeps)(struct buf *, int);
} bioops;
struct vm_object;
typedef unsigned char b_xflags_t;
/*
* The buffer header describes an I/O operation in the kernel.
*
* NOTES:
* b_bufsize, b_bcount. b_bufsize is the allocation size of the
* buffer, either DEV_BSIZE or PAGE_SIZE aligned. b_bcount is the
* originally requested buffer size and can serve as a bounds check
* against EOF. For most, but not all uses, b_bcount == b_bufsize.
*
* b_dirtyoff, b_dirtyend. Buffers support piecemeal, unaligned
* ranges of dirty data that need to be written to backing store.
* The range is typically clipped at b_bcount ( not b_bufsize ).
*
* b_resid. Number of bytes remaining in I/O. After an I/O operation
* completes, b_resid is usually 0 indicating 100% success.
*
* All fields are protected by the buffer lock except those marked:
* V - Protected by owning bufobj lock
* Q - Protected by the buf queue lock
* D - Protected by an dependency implementation specific lock
*/
struct buf {
struct bufobj *b_bufobj;
long b_bcount;
void *b_caller1;
caddr_t b_data;
int b_error;
uint8_t b_iocmd;
uint8_t b_ioflags;
off_t b_iooffset;
long b_resid;
void (*b_iodone)(struct buf *);
daddr_t b_blkno; /* Underlying physical block number. */
off_t b_offset; /* Offset into file. */
TAILQ_ENTRY(buf) b_bobufs; /* (V) Buffer's associated vnode. */
uint32_t b_vflags; /* (V) BV_* flags */
TAILQ_ENTRY(buf) b_freelist; /* (Q) Free list position inactive. */
unsigned short b_qindex; /* (Q) buffer queue index */
uint32_t b_flags; /* B_* flags. */
b_xflags_t b_xflags; /* extra flags */
struct lock b_lock; /* Buffer lock */
long b_bufsize; /* Allocated buffer size. */
long b_runningbufspace; /* when I/O is running, pipelining */
caddr_t b_kvabase; /* base kva for buffer */
caddr_t b_kvaalloc; /* allocated kva for B_KVAALLOC */
int b_kvasize; /* size of kva for buffer */
daddr_t b_lblkno; /* Logical block number. */
struct vnode *b_vp; /* Device vnode. */
int b_dirtyoff; /* Offset in buffer of dirty region. */
int b_dirtyend; /* Offset of end of dirty region. */
struct ucred *b_rcred; /* Read credentials reference. */
struct ucred *b_wcred; /* Write credentials reference. */
void *b_saveaddr; /* Original b_addr for physio. */
union pager_info {
int pg_reqpage;
} b_pager;
union cluster_info {
TAILQ_HEAD(cluster_list_head, buf) cluster_head;
TAILQ_ENTRY(buf) cluster_entry;
} b_cluster;
struct vm_page *b_pages[btoc(MAXPHYS)];
int b_npages;
struct workhead b_dep; /* (D) List of filesystem dependencies. */
void *b_fsprivate1;
void *b_fsprivate2;
void *b_fsprivate3;
int b_pin_count;
};
#define b_object b_bufobj->bo_object
/*
* These flags are kept in b_flags.
*
* Notes:
*
* B_ASYNC VOP calls on bp's are usually async whether or not
* B_ASYNC is set, but some subsystems, such as NFS, like
* to know what is best for the caller so they can
* optimize the I/O.
*
* B_PAGING Indicates that bp is being used by the paging system or
* some paging system and that the bp is not linked into
* the b_vp's clean/dirty linked lists or ref counts.
* Buffer vp reassignments are illegal in this case.
*
* B_CACHE This may only be set if the buffer is entirely valid.
* The situation where B_DELWRI is set and B_CACHE is
* clear MUST be committed to disk by getblk() so
* B_DELWRI can also be cleared. See the comments for
* getblk() in kern/vfs_bio.c. If B_CACHE is clear,
* the caller is expected to clear BIO_ERROR and B_INVAL,
* set BIO_READ, and initiate an I/O.
*
* The 'entire buffer' is defined to be the range from
* 0 through b_bcount.
*
* B_MALLOC Request that the buffer be allocated from the malloc
* pool, DEV_BSIZE aligned instead of PAGE_SIZE aligned.
*
* B_CLUSTEROK This flag is typically set for B_DELWRI buffers
* by filesystems that allow clustering when the buffer
* is fully dirty and indicates that it may be clustered
* with other adjacent dirty buffers. Note the clustering
* may not be used with the stage 1 data write under NFS
* but may be used for the commit rpc portion.
*
* B_VMIO Indicates that the buffer is tied into an VM object.
* The buffer's data is always PAGE_SIZE aligned even
* if b_bufsize and b_bcount are not. ( b_bufsize is
* always at least DEV_BSIZE aligned, though ).
*
* B_DIRECT Hint that we should attempt to completely free
* the pages underlying the buffer. B_DIRECT is
* sticky until the buffer is released and typically
* only has an effect when B_RELBUF is also set.
*
*/
#define B_AGE 0x00000001 /* Move to age queue when I/O done. */
#define B_NEEDCOMMIT 0x00000002 /* Append-write in progress. */
#define B_ASYNC 0x00000004 /* Start I/O, do not wait. */
#define B_DIRECT 0x00000008 /* direct I/O flag (pls free vmio) */
#define B_DEFERRED 0x00000010 /* Skipped over for cleaning */
#define B_CACHE 0x00000020 /* Bread found us in the cache. */
#define B_VALIDSUSPWRT 0x00000040 /* Valid write during suspension. */
#define B_DELWRI 0x00000080 /* Delay I/O until buffer reused. */
#define B_PERSISTENT 0x00000100 /* Perm. ref'ed while EXT2FS mounted. */
#define B_DONE 0x00000200 /* I/O completed. */
#define B_EINTR 0x00000400 /* I/O was interrupted */
#define B_UNMAPPED 0x00000800 /* KVA is not mapped. */
#define B_KVAALLOC 0x00001000 /* But allocated. */
#define B_INVAL 0x00002000 /* Does not contain valid info. */
#define B_BARRIER 0x00004000 /* Write this and all preceeding first. */
#define B_NOCACHE 0x00008000 /* Do not cache block after use. */
#define B_MALLOC 0x00010000 /* malloced b_data */
#define B_CLUSTEROK 0x00020000 /* Pagein op, so swap() can count it. */
#define B_000400000 0x00040000 /* Available flag. */
#define B_000800000 0x00080000 /* Available flag. */
#define B_00100000 0x00100000 /* Available flag. */
#define B_DIRTY 0x00200000 /* Needs writing later (in EXT2FS). */
#define B_RELBUF 0x00400000 /* Release VMIO buffer. */
#define B_00800000 0x00800000 /* Available flag. */
#define B_NOCOPY 0x01000000 /* Don't copy-on-write this buf. */
#define B_INFREECNT 0x02000000 /* buf is counted in numfreebufs */
#define B_PAGING 0x04000000 /* volatile paging I/O -- bypass VMIO */
#define B_MANAGED 0x08000000 /* Managed by FS. */
#define B_RAM 0x10000000 /* Read ahead mark (flag) */
#define B_VMIO 0x20000000 /* VMIO flag */
#define B_CLUSTER 0x40000000 /* pagein op, so swap() can count it */
#define B_REMFREE 0x80000000 /* Delayed bremfree */
#define PRINT_BUF_FLAGS "\20\40remfree\37cluster\36vmio\35ram\34managed" \
"\33paging\32infreecnt\31nocopy\30b23\27relbuf\26dirty\25b20" \
"\24b19\23b18\22clusterok\21malloc\20nocache\17b14\16inval" \
"\15b12\14b11\13eintr\12done\11persist\10delwri\7validsuspwrt" \
"\6cache\5deferred\4direct\3async\2needcommit\1age"
/*
* These flags are kept in b_xflags.
*/
#define BX_VNDIRTY 0x00000001 /* On vnode dirty list */
#define BX_VNCLEAN 0x00000002 /* On vnode clean list */
#define BX_BKGRDWRITE 0x00000010 /* Do writes in background */
#define BX_BKGRDMARKER 0x00000020 /* Mark buffer for splay tree */
#define BX_ALTDATA 0x00000040 /* Holds extended data */
#define PRINT_BUF_XFLAGS "\20\7altdata\6bkgrdmarker\5bkgrdwrite\2clean\1dirty"
#define NOOFFSET (-1LL) /* No buffer offset calculated yet */
/*
* These flags are kept in b_vflags.
*/
#define BV_SCANNED 0x00000001 /* VOP_FSYNC funcs mark written bufs */
#define BV_BKGRDINPROG 0x00000002 /* Background write in progress */
#define BV_BKGRDWAIT 0x00000004 /* Background write waiting */
#define PRINT_BUF_VFLAGS "\20\3bkgrdwait\2bkgrdinprog\1scanned"
#ifdef _KERNEL
/*
* Buffer locking
*/
extern const char *buf_wmesg; /* Default buffer lock message */
#define BUF_WMESG "bufwait"
#include <sys/proc.h> /* XXX for curthread */
#include <sys/mutex.h>
/*
* Initialize a lock.
*/
#define BUF_LOCKINIT(bp) \
lockinit(&(bp)->b_lock, PRIBIO + 4, buf_wmesg, 0, 0)
/*
*
* Get a lock sleeping non-interruptably until it becomes available.
*/
#define BUF_LOCK(bp, locktype, interlock) \
_lockmgr_args_rw(&(bp)->b_lock, (locktype), (interlock), \
LK_WMESG_DEFAULT, LK_PRIO_DEFAULT, LK_TIMO_DEFAULT, \
LOCK_FILE, LOCK_LINE)
/*
* Get a lock sleeping with specified interruptably and timeout.
*/
#define BUF_TIMELOCK(bp, locktype, interlock, wmesg, catch, timo) \
_lockmgr_args_rw(&(bp)->b_lock, (locktype) | LK_TIMELOCK, \
(interlock), (wmesg), (PRIBIO + 4) | (catch), (timo), \
LOCK_FILE, LOCK_LINE)
/*
* Release a lock. Only the acquiring process may free the lock unless
* it has been handed off to biodone.
*/
#define BUF_UNLOCK(bp) do { \
KASSERT(((bp)->b_flags & B_REMFREE) == 0, \
("BUF_UNLOCK %p while B_REMFREE is still set.", (bp))); \
\
(void)_lockmgr_args(&(bp)->b_lock, LK_RELEASE, NULL, \
LK_WMESG_DEFAULT, LK_PRIO_DEFAULT, LK_TIMO_DEFAULT, \
LOCK_FILE, LOCK_LINE); \
} while (0)
/*
* Check if a buffer lock is recursed.
*/
#define BUF_LOCKRECURSED(bp) \
lockmgr_recursed(&(bp)->b_lock)
/*
* Check if a buffer lock is currently held.
*/
#define BUF_ISLOCKED(bp) \
lockstatus(&(bp)->b_lock)
/*
* Free a buffer lock.
*/
#define BUF_LOCKFREE(bp) \
lockdestroy(&(bp)->b_lock)
/*
* Print informations on a buffer lock.
*/
#define BUF_LOCKPRINTINFO(bp) \
lockmgr_printinfo(&(bp)->b_lock)
/*
* Buffer lock assertions.
*/
#if defined(INVARIANTS) && defined(INVARIANT_SUPPORT)
#define BUF_ASSERT_LOCKED(bp) \
_lockmgr_assert(&(bp)->b_lock, KA_LOCKED, LOCK_FILE, LOCK_LINE)
#define BUF_ASSERT_SLOCKED(bp) \
_lockmgr_assert(&(bp)->b_lock, KA_SLOCKED, LOCK_FILE, LOCK_LINE)
#define BUF_ASSERT_XLOCKED(bp) \
_lockmgr_assert(&(bp)->b_lock, KA_XLOCKED, LOCK_FILE, LOCK_LINE)
#define BUF_ASSERT_UNLOCKED(bp) \
_lockmgr_assert(&(bp)->b_lock, KA_UNLOCKED, LOCK_FILE, LOCK_LINE)
#define BUF_ASSERT_HELD(bp)
#define BUF_ASSERT_UNHELD(bp)
#else
#define BUF_ASSERT_LOCKED(bp)
#define BUF_ASSERT_SLOCKED(bp)
#define BUF_ASSERT_XLOCKED(bp)
#define BUF_ASSERT_UNLOCKED(bp)
#define BUF_ASSERT_HELD(bp)
#define BUF_ASSERT_UNHELD(bp)
#endif
#ifdef _SYS_PROC_H_ /* Avoid #include <sys/proc.h> pollution */
/*
* When initiating asynchronous I/O, change ownership of the lock to the
* kernel. Once done, the lock may legally released by biodone. The
* original owning process can no longer acquire it recursively, but must
* wait until the I/O is completed and the lock has been freed by biodone.
*/
#define BUF_KERNPROC(bp) \
_lockmgr_disown(&(bp)->b_lock, LOCK_FILE, LOCK_LINE)
#endif
/*
* Find out if the lock has waiters or not.
*/
#define BUF_LOCKWAITERS(bp) \
lockmgr_waiters(&(bp)->b_lock)
#endif /* _KERNEL */
struct buf_queue_head {
TAILQ_HEAD(buf_queue, buf) queue;
daddr_t last_pblkno;
struct buf *insert_point;
struct buf *switch_point;
};
/*
* This structure describes a clustered I/O. It is stored in the b_saveaddr
* field of the buffer on which I/O is done. At I/O completion, cluster
* callback uses the structure to parcel I/O's to individual buffers, and
* then free's this structure.
*/
struct cluster_save {
long bs_bcount; /* Saved b_bcount. */
long bs_bufsize; /* Saved b_bufsize. */
void *bs_saveaddr; /* Saved b_addr. */
int bs_nchildren; /* Number of associated buffers. */
struct buf **bs_children; /* List of associated buffers. */
};
#ifdef _KERNEL
static __inline int
bwrite(struct buf *bp)
{
KASSERT(bp->b_bufobj != NULL, ("bwrite: no bufobj bp=%p", bp));
KASSERT(bp->b_bufobj->bo_ops != NULL, ("bwrite: no bo_ops bp=%p", bp));
KASSERT(bp->b_bufobj->bo_ops->bop_write != NULL,
("bwrite: no bop_write bp=%p", bp));
return (BO_WRITE(bp->b_bufobj, bp));
}
static __inline void
bstrategy(struct buf *bp)
{
KASSERT(bp->b_bufobj != NULL, ("bstrategy: no bufobj bp=%p", bp));
KASSERT(bp->b_bufobj->bo_ops != NULL,
("bstrategy: no bo_ops bp=%p", bp));
KASSERT(bp->b_bufobj->bo_ops->bop_strategy != NULL,
("bstrategy: no bop_strategy bp=%p", bp));
BO_STRATEGY(bp->b_bufobj, bp);
}
static __inline void
buf_start(struct buf *bp)
{
if (bioops.io_start)
(*bioops.io_start)(bp);
}
static __inline void
buf_complete(struct buf *bp)
{
if (bioops.io_complete)
(*bioops.io_complete)(bp);
}
static __inline void
buf_deallocate(struct buf *bp)
{
if (bioops.io_deallocate)
(*bioops.io_deallocate)(bp);
}
static __inline int
buf_countdeps(struct buf *bp, int i)
{
if (bioops.io_countdeps)
return ((*bioops.io_countdeps)(bp, i));
else
return (0);
}
#endif /* _KERNEL */
/*
* Zero out the buffer's data area.
*/
#define clrbuf(bp) { \
bzero((bp)->b_data, (u_int)(bp)->b_bcount); \
(bp)->b_resid = 0; \
}
/*
* Flags for getblk's last parameter.
*/
#define GB_LOCK_NOWAIT 0x0001 /* Fail if we block on a buf lock. */
#define GB_NOCREAT 0x0002 /* Don't create a buf if not found. */
#define GB_NOWAIT_BD 0x0004 /* Do not wait for bufdaemon. */
#define GB_UNMAPPED 0x0008 /* Do not mmap buffer pages. */
#define GB_KVAALLOC 0x0010 /* But allocate KVA. */
#ifdef _KERNEL
extern int nbuf; /* The number of buffer headers */
extern long maxswzone; /* Max KVA for swap structures */
extern long maxbcache; /* Max KVA for buffer cache */
extern long runningbufspace;
extern long hibufspace;
extern int dirtybufthresh;
extern int bdwriteskip;
extern int dirtybufferflushes;
extern int altbufferflushes;
extern struct buf *buf; /* The buffer headers. */
extern struct buf *swbuf; /* Swap I/O buffer headers. */
extern int nswbuf; /* Number of swap I/O buffer headers. */
extern int cluster_pbuf_freecnt; /* Number of pbufs for clusters */
extern int vnode_pbuf_freecnt; /* Number of pbufs for vnode pager */
extern caddr_t unmapped_buf;
void runningbufwakeup(struct buf *);
void waitrunningbufspace(void);
caddr_t kern_vfs_bio_buffer_alloc(caddr_t v, long physmem_est);
void bufinit(void);
void bdata2bio(struct buf *bp, struct bio *bip);
void bwillwrite(void);
int buf_dirty_count_severe(void);
void bremfree(struct buf *);
void bremfreef(struct buf *); /* XXX Force bremfree, only for nfs. */
#define bread(vp, blkno, size, cred, bpp) \
breadn_flags(vp, blkno, size, NULL, NULL, 0, cred, 0, bpp)
#define bread_gb(vp, blkno, size, cred, gbflags, bpp) \
breadn_flags(vp, blkno, size, NULL, NULL, 0, cred, \
gbflags, bpp)
#define breadn(vp, blkno, size, rablkno, rabsize, cnt, cred, bpp) \
breadn_flags(vp, blkno, size, rablkno, rabsize, cnt, cred, 0, bpp)
int breadn_flags(struct vnode *, daddr_t, int, daddr_t *, int *, int,
struct ucred *, int, struct buf **);
void breada(struct vnode *, daddr_t *, int *, int, struct ucred *);
void bdwrite(struct buf *);
void bawrite(struct buf *);
void babarrierwrite(struct buf *);
int bbarrierwrite(struct buf *);
void bdirty(struct buf *);
void bundirty(struct buf *);
void bufstrategy(struct bufobj *, struct buf *);
void brelse(struct buf *);
void bqrelse(struct buf *);
int vfs_bio_awrite(struct buf *);
struct buf * getpbuf(int *);
struct buf *incore(struct bufobj *, daddr_t);
struct buf *gbincore(struct bufobj *, daddr_t);
struct buf *getblk(struct vnode *, daddr_t, int, int, int, int);
struct buf *geteblk(int, int);
int bufwait(struct buf *);
int bufwrite(struct buf *);
void bufdone(struct buf *);
void bufdone_finish(struct buf *);
void bd_speedup(void);
int cluster_read(struct vnode *, u_quad_t, daddr_t, long,
struct ucred *, long, int, int, struct buf **);
int cluster_wbuild(struct vnode *, long, daddr_t, int, int);
void cluster_write(struct vnode *, struct buf *, u_quad_t, int, int);
void vfs_bio_bzero_buf(struct buf *bp, int base, int size);
void vfs_bio_set_valid(struct buf *, int base, int size);
void vfs_bio_clrbuf(struct buf *);
void vfs_busy_pages(struct buf *, int clear_modify);
void vfs_unbusy_pages(struct buf *);
int vmapbuf(struct buf *, int);
void vunmapbuf(struct buf *);
void relpbuf(struct buf *, int *);
void brelvp(struct buf *);
void bgetvp(struct vnode *, struct buf *);
void pbgetbo(struct bufobj *bo, struct buf *bp);
void pbgetvp(struct vnode *, struct buf *);
void pbrelbo(struct buf *);
void pbrelvp(struct buf *);
int allocbuf(struct buf *bp, int size);
void reassignbuf(struct buf *);
struct buf *trypbuf(int *);
void bwait(struct buf *, u_char, const char *);
void bdone(struct buf *);
void bpin(struct buf *);
void bunpin(struct buf *);
void bunpin_wait(struct buf *);
#endif /* _KERNEL */
#endif /* !_SYS_BUF_H_ */
|
carloseduardomarques/java-fundamentals-poo | 2Disciplina/2Disciplina/src/unidade3/grafico/GenericsApp.java | <reponame>carloseduardomarques/java-fundamentals-poo
package unidade3.grafico;
import java.util.ArrayList;
import java.util.List;
public class GenericsApp {
public static void main(String[] args) {
List<Shape> lista = new ArrayList<>();
lista.add(new Circle());
lista.add(new Rectangle());
List<?> ls = lista;
}
}
|
smarter/scalameta | tests/src/test/scala/show/ScalaSuite.scala | import org.scalatest._
import scala.meta._
import scala.meta.internal.{ ast => impl }
import scala.meta.dialects.Scala211
class ScalaSuite extends InferSuite {
def templStatForceInfer(code: String)(implicit dialect: Dialect) = forceInferAll(super.templStat(code))
def tpeForceInfer(code: String)(implicit dialect: Dialect) = forceInferAll(super.tpe(code))
def sourceForceInfer(code: String)(implicit dialect: Dialect) = forceInferAll(super.source(code))
test("val x: Int (raw)") {
val tree = templStatForceInfer("val x: Int")
assert(forceInferAll(tree).show[Structure] === "Decl.Val(Nil, List(Pat.Var.Term(Term.Name(\"x\"))), Type.Name(\"Int\"))")
}
test("val x: Int (code)") {
val tree = templStatForceInfer("val x: Int")
assert(forceInferAll(tree).show[Syntax] === "val x: Int")
}
test("~(1 + 2) + ~x.y(z) + (~x).y(z)") {
val tree = templStatForceInfer("~(1 + 2) + ~x.y(z) + (~x).y(z)")
assert(forceInferAll(tree.asInstanceOf[impl.Tree]).show[Syntax] === "~(1 + 2) + ~x.y(z) + (~x).y(z)")
}
/*test("(a + b + c) && (a + (b + c)) && (a :: b :: c) && ((a :: b) :: c)") {
val tree = templStatForceInfer("(a + b + c) && (a + (b + c)) && (a :: b :: c) && ((a :: b) :: c)")
assert(tree.show[Syntax] === "a + b + c && a + (b + c) && (a :: b :: c) && ((a :: b) :: c)")
}*/
test("(x map y).foo") {
val tree = templStatForceInfer("(x map y).foo")
assert(forceInferAll(tree).show[Syntax] === "(x map y).foo")
}
test("string literals with newlines and double quotes") {
val tree = templStatForceInfer("""{
val x = QQQ
x
QQQ
val y = "\""
}""".replace("QQQ", "\"\"\""))
assert(tree.show[Structure] === """Term.Block(List(Defn.Val(Nil, List(Pat.Var.Term(Term.Name("x"))), None, Lit.String("%n x%n ")), Defn.Val(Nil, List(Pat.Var.Term(Term.Name("y"))), None, Lit.String("\""))))""".replace("%n", escapedEOL))
assert(forceInferAll(tree).show[Syntax] === """
|{
| val x = QQQ
| x
| QQQ
| val y = "\""
|}
""".trim.stripMargin.replace("QQQ", "\"\"\""))
}
test("interpolations") {
val tree = templStatForceInfer("""{
val x = q"123 + $x + ${foo(123)} + 456"
val y = QQQ
$x
$y
..$z
QQQ
}""".replace("QQQ", "\"\"\""))
assert(tree.show[Structure] === """Term.Block(List(Defn.Val(Nil, List(Pat.Var.Term(Term.Name("x"))), None, Term.Interpolate(Term.Name("q"), List(Lit.String("123 + "), Lit.String(" + "), Lit.String(" + 456")), List(Term.Name("x"), Term.Apply(Term.Name("foo"), List(Lit.Int(123)))))), Defn.Val(Nil, List(Pat.Var.Term(Term.Name("y"))), None, Lit.String("%n $x%n $y%n ..$z%n "))))""".replace("%n", escapedEOL))
assert(forceInferAll(tree).show[Syntax] === """
|{
| val x = q"123 + $x + ${foo(123)} + 456"
| val y = QQQ
| $x
| $y
| ..$z
| QQQ
|}
""".trim.stripMargin.replace("QQQ", "\"\"\""))
}
test("foo.bar(bar) { baz }") {
val tree = templStatForceInfer("foo.bar(bar) { baz }")
assert(tree.show[Syntax] === """
|foo.bar(bar) {
| baz
|}
""".trim.stripMargin)
}
test("Template.self stringifications") {
assert(templStatForceInfer("new { val x = 2 }").show[Syntax] === "new { val x = 2 }")
assert(templStatForceInfer("new { self => val x = 2 }").show[Syntax] === "new { self => val x = 2 }")
assert(templStatForceInfer("new { self: Int => val x = 2 }").show[Syntax] === "new { self: Int => val x = 2 }")
assert(templStatForceInfer("""
new {
val x = 2
val y = 3
}
""").show[Syntax] === """
|new {
| val x = 2
| val y = 3
|}
""".trim.stripMargin)
assert(templStatForceInfer("""
new { self =>
val x = 2
val y = 3
}
""").show[Syntax] === """
|new { self =>
| val x = 2
| val y = 3
|}
""".trim.stripMargin)
assert(templStatForceInfer("""
new { self: Int =>
val x = 2
val y = 3
}
""").show[Syntax] === """
|new { self: Int =>
| val x = 2
| val y = 3
|}
""".trim.stripMargin)
assert(templStatForceInfer("class B { x: B => }").show[Syntax] === "class B { x: B => }")
}
test("new X") {
assert(templStatForceInfer("new X").show[Syntax] === "new X")
assert(templStatForceInfer("new X {}").show[Syntax] === "new X {}")
}
test("ascribe and annotate") {
assert(templStatForceInfer("_: Int").show[Syntax] === "_: Int")
assert(templStatForceInfer("(_: Int) + 2").show[Syntax] === "(_: Int) + 2")
assert(templStatForceInfer("x: @foo").show[Syntax] === "x: @foo")
assert(templStatForceInfer("(x: @foo) + 2").show[Syntax] === "(x: @foo) + 2")
}
test("compound types") {
assert(tpeForceInfer("Foo").show[Syntax] === "Foo")
// TODO: Commented, as InferSuite does not replac names
// Revisit once InferSuite replaces names (will require another TQL func. for inferAndReparseSuite)
//assert(tpeForceInfer("Foo {}").show[Syntax] === "Foo")
assert(tpeForceInfer("Foo { type T = Int }").show[Syntax] === "Foo { type T = Int }")
assert(tpeForceInfer("Foo { type T = Int; type U <: String }").show[Syntax] === "Foo { type T = Int; type U <: String }")
assert(tpeForceInfer("Foo with Bar").show[Syntax] === "Foo with Bar")
// TODO: revisit this once we have trivia in place
// assert(tpe("Foo with Bar {}").show[Syntax] === "Foo with Bar {}")
assert(tpeForceInfer("Foo with Bar {}").show[Syntax] === "Foo with Bar")
assert(tpeForceInfer("Foo with Bar { type T = Int }").show[Syntax] === "Foo with Bar { type T = Int }")
assert(tpeForceInfer("Foo with Bar { type T = Int; type U <: String }").show[Syntax] === "Foo with Bar { type T = Int; type U <: String }")
}
test("packages") {
assert(sourceForceInfer("package foo.bar; class C").show[Syntax] === s"package foo.bar${EOL}class C")
assert(sourceForceInfer("package foo.bar; class C; class D").show[Syntax] === s"package foo.bar${EOL}class C${EOL}class D")
// TODO: revisit this once we have trivia in place
// assert(source("package foo.bar { class C }").show[Syntax] === s"package foo.bar {${EOL} class C${EOL}}")
// assert(source("package foo.bar { class C; class D }").show[Syntax] === s"package foo.bar {${EOL} class C${EOL} class D${EOL}}")
assert(sourceForceInfer("package foo.bar { class C }").show[Syntax] === s"package foo.bar${EOL}class C")
assert(sourceForceInfer("package foo.bar { class C; class D }").show[Syntax] === s"package foo.bar${EOL}class C${EOL}class D")
}
test("type parameter mods") {
assert(sourceForceInfer("class C[@foo T]").show[Syntax] === "class C[@foo T]")
assert(sourceForceInfer("class C[+T]").show[Syntax] === "class C[+T]")
assert(sourceForceInfer("class C[@foo +T]").show[Syntax] === "class C[@foo +T]")
}
test("primary constructor mods") {
assert(sourceForceInfer("class C").show[Syntax] === "class C")
assert(sourceForceInfer("class C private").show[Syntax] === "class C private")
assert(sourceForceInfer("class C @foo(x)").show[Syntax] === "class C @foo(x)")
assert(sourceForceInfer("class C @foo(x) private").show[Syntax] === "class C @foo(x) private")
assert(sourceForceInfer("class C(x: Int)").show[Syntax] === "class C(x: Int)")
assert(sourceForceInfer("class C private (x: Int)").show[Syntax] === "class C private (x: Int)")
assert(sourceForceInfer("class C @foo(x) (x: Int)").show[Syntax] === "class C @foo(x) (x: Int)")
assert(sourceForceInfer("class C @foo(x) private (x: Int)").show[Syntax] === "class C @foo(x) private (x: Int)")
}
test("parentheses in patterns") {
assert(templStatForceInfer("x match { case (xs: List[Int]) :+ x => ??? }").show[Syntax] === """
|x match {
| case (xs: List[Int]) :+ x => ???
|}
""".trim.stripMargin)
}
test("List(x, y) :: z") {
assert(templStatForceInfer("List(x, y) :: z").show[Syntax] == "List(x, y) :: z")
assert(templStatForceInfer("x match { case List(x, y) :: z => ??? }").show[Syntax] === """
|x match {
| case List(x, y) :: z => ???
|}
""".trim.stripMargin)
}
test("secondary ctor - expr") {
assert(sourceForceInfer("class C(x: Int) { def this() = this(2) }").show[Syntax] === "class C(x: Int) { def this() = this(2) }")
}
test("secondary ctor - block") {
assert(sourceForceInfer("class C(x: Int) { def this() { this(2); println(\"OBLIVION!!!\") } }").show[Syntax] === """
|class C(x: Int) {
| def this() {
| this(2)
| println("OBLIVION!!!")
| }
|}
""".trim.stripMargin)
}
test("case semicolons") {
assert(templStatForceInfer("x match { case y => foo1; foo2 }").show[Syntax] === """
|x match {
| case y =>
| foo1
| foo2
|}
""".trim.stripMargin)
}
test("assorted literals") {
assert(templStatForceInfer("true").show[Syntax] === "true")
assert(templStatForceInfer("false").show[Syntax] === "false")
assert(templStatForceInfer("0").show[Syntax] === "0")
assert(templStatForceInfer("0l").show[Syntax] === "0L")
assert(templStatForceInfer("0L").show[Syntax] === "0L")
assert(templStatForceInfer("0f").show[Syntax] === "0.0F")
assert(templStatForceInfer("0F").show[Syntax] === "0.0F")
assert(templStatForceInfer("0.0").show[Syntax] === "0.0")
assert(templStatForceInfer("0d").show[Syntax] === "0.0")
assert(templStatForceInfer("0D").show[Syntax] === "0.0")
assert(templStatForceInfer("'0'").show[Syntax] === "'0'")
assert(templStatForceInfer("\"0\"").show[Syntax] === "\"0\"")
assert(templStatForceInfer("'zero").show[Syntax] === "'zero")
assert(templStatForceInfer("null").show[Syntax] === "null")
assert(templStatForceInfer("()").show[Syntax] === "()")
}
test("context and view bounds") {
assert(templStatForceInfer("class C[T: List, U <% Int]").show[Syntax] === "class C[T: List, U <% Int]")
assert(templStatForceInfer("def m[T: List, U <% Int] = ???").show[Syntax] === "def m[T: List, U <% Int] = ???")
}
test("some tricky parenthesization") {
assert(templStatForceInfer("if (1) 2 else 3 + 4").show[Syntax] === "if (1) 2 else 3 + 4")
assert(templStatForceInfer("(if (1) 2 else 3) + 4").show[Syntax] === "(if (1) 2 else 3) + 4")
assert(templStatForceInfer("if (1) 2 else 3 match { case _ => }").show[Syntax] === s"if (1) 2 else 3 match {${EOL} case _ =>${EOL}}")
assert(templStatForceInfer("(if (1) 2 else 3) match { case _ => }").show[Syntax] === s"(if (1) 2 else 3) match {${EOL} case _ =>${EOL}}")
assert(templStatForceInfer("unit.toCheck += (() => body)").show[Syntax] === "unit.toCheck += (() => body)")
assert(templStatForceInfer("({ foo1; foo2 }).orElse(bar)").show[Syntax] === s"{${EOL} foo1${EOL} foo2${EOL}}.orElse(bar)")
assert(templStatForceInfer("(foo match { case _ => }).orElse(bar)").show[Syntax] === s"(foo match {${EOL} case _ =>${EOL}}).orElse(bar)")
assert(templStatForceInfer("foo || (if (cond) bar else baz)").show[Syntax] === "foo || (if (cond) bar else baz)")
assert(templStatForceInfer("foo && (bar match { case _ => })").show[Syntax] === s"foo && (bar match {${EOL} case _ =>${EOL}})")
assert(templStatForceInfer("\"foo \" + (if (cond) bar else baz)").show[Syntax] === "\"foo \" + (if (cond) bar else baz)")
assert(templStatForceInfer("foo match { case bar @ (_: T1 | _: T2) => }").show[Syntax] === s"foo match {${EOL} case bar @ (_: T1 | _: T2) =>${EOL}}")
assert(templStatForceInfer("foo match { case A + B / C => }").show[Syntax] === s"foo match {${EOL} case A + B / C =>${EOL}}")
assert(templStatForceInfer("foo match { case (A + B) / C => }").show[Syntax] === s"foo match {${EOL} case (A + B) / C =>${EOL}}")
assert(templStatForceInfer("foo match { case A + (B / C) => }").show[Syntax] === s"foo match {${EOL} case A + B / C =>${EOL}}")
assert(templStatForceInfer("foo match { case bar :: Nil :: Nil => }").show[Syntax] === s"foo match {${EOL} case bar :: Nil :: Nil =>${EOL}}")
assert(templStatForceInfer("foo match { case (bar :: Nil) :: Nil => }").show[Syntax] === s"foo match {${EOL} case (bar :: Nil) :: Nil =>${EOL}}")
assert(templStatForceInfer("@(foo @foo) class Bar").show[Syntax] === "@(foo @foo) class Bar")
assert(templStatForceInfer("(foo: Foo): @foo").show[Syntax] === "(foo: Foo): @foo")
assert(templStatForceInfer("type T = A + B / C").show[Syntax] === "type T = A + B / C")
assert(templStatForceInfer("type T = (A + B) / C").show[Syntax] === "type T = A + B / C")
assert(templStatForceInfer("type T = A + (B / C)").show[Syntax] === "type T = A + (B / C)")
assert(templStatForceInfer("type T = A :: B :: C").show[Syntax] === "type T = A :: B :: C")
assert(templStatForceInfer("type T = (A :: B) :: C").show[Syntax] === "type T = (A :: B) :: C")
assert(templStatForceInfer("foo match { case _: A | _: B => }").show[Syntax] === s"foo match {${EOL} case _: A | _: B =>${EOL}}")
assert(templStatForceInfer("foo match { case _: A | _: B | _: C => }").show[Syntax] === s"foo match {${EOL} case _: A | _: B | _: C =>${EOL}}")
}
test("more trickiness") {
assert(templStatForceInfer("def foo(bar_ : Int) = ???").show[Syntax] === "def foo(bar_ : Int) = ???")
assert(templStatForceInfer("class C[T_ : Foo]").show[Syntax] === "class C[T_ : Foo]")
assert(templStatForceInfer("val scala_ : NameType = ???").show[Syntax] === "val scala_ : NameType = ???")
}
test("class C extends (() => Int)") {
assert(templStatForceInfer("class C extends (() => Int)").show[Syntax] === "class C extends (() => Int)")
}
test("class C(x: Int)(implicit y: String, z: Boolean)") {
assert(templStatForceInfer("class C(x: Int)(implicit y: String, z: Boolean)").show[Syntax] === "class C(x: Int)(implicit y: String, z: Boolean)")
}
test("class C(var x: Int)") {
assert(templStatForceInfer("class C(var x: Int)").show[Syntax] === "class C(var x: Int)")
}
test("private/protected within something") {
assert(templStatForceInfer("""
class C {
private[this] val x = 1
private[D] val y = 2
protected[this] val z = 3
protected[D] val w = 4
}
""").show[Syntax] === """
|class C {
| private[this] val x = 1
| private[D] val y = 2
| protected[this] val z = 3
| protected[D] val w = 4
|}
""".stripMargin.trim)
}
test("case List(xs @ _*)") {
val tree = pat("List(xs @ _*)")
assert(tree.show[Structure] === "Pat.Extract(Term.Name(\"List\"), Nil, List(Pat.Bind(Pat.Var.Term(Term.Name(\"xs\")), Pat.Arg.SeqWildcard())))")
assert(tree.show[Syntax] === "List(xs @ _*)")
}
test("package foo; class C; package baz { class D }") {
val tree = source("package foo; class C; package baz { class D }")
assert(tree.show[Structure] === "Source(List(Pkg(Term.Name(\"foo\"), List(Defn.Class(Nil, Type.Name(\"C\"), Nil, Ctor.Primary(Nil, Ctor.Ref.Name(\"this\"), Nil), Template(Nil, Nil, Term.Param(Nil, Name.Anonymous(), None, None), None)), Pkg(Term.Name(\"baz\"), List(Defn.Class(Nil, Type.Name(\"D\"), Nil, Ctor.Primary(Nil, Ctor.Ref.Name(\"this\"), Nil), Template(Nil, Nil, Term.Param(Nil, Name.Anonymous(), None, None), None))))))))")
assert(forceInferAll(tree).show[Syntax] === "package foo\nclass C\npackage baz {\n class D\n}")
}
test("case `x`") {
val tree1 = pat("`x`")
assert(tree1.show[Structure] === "Term.Name(\"x\")")
val tree2 = pat("f(`x`)")
assert(tree2.show[Structure] === "Pat.Extract(Term.Name(\"f\"), Nil, List(Term.Name(\"x\")))")
assert(tree2.show[Syntax] === "f(`x`)")
val tree3 = pat("X")
assert(tree3.show[Structure] === "Term.Name(\"X\")")
assert(tree3.show[Syntax] === "X")
val tree4 = pat("f(X)")
assert(tree4.show[Structure] === "Pat.Extract(Term.Name(\"f\"), Nil, List(Term.Name(\"X\")))")
assert(tree4.show[Syntax] === "f(X)")
}
test("case _: Int") {
assert(pat("_: Int").show[Syntax] === "_: Int")
}
test("case _: t") {
assert(pat("_: t").show[Syntax] === "_: t")
}
test("case _: F[t]") {
assert(pat("_: F[t]").show[Syntax] === "_: F[t]")
}
test("case _: F[_]") {
assert(pat("_: F[_]").show[Syntax] === "_: F[_]")
}
test("case _: (t Map u)") {
assert(pat("_: (t Map u)").show[Syntax] === "_: (t Map u)")
}
test("constructors") {
import scala.meta.internal.ast._
val tree @ Defn.Class(_, _, _, primary, Template(_, _, _, Some(secondary :: Nil))) = templStatForceInfer("class C(x: Int) { def this() = this(42) }")
assert(forceInferAll(tree).show[Syntax] === "class C(x: Int) { def this() = this(42) }")
assert(primary.show[Syntax] === "(x: Int)")
assert(forceInferAll(secondary).show[Syntax] === "def this() = this(42)")
assert(tree.toString === "class C(x: Int) { def this() = this(42) }")
assert(primary.toString === "def this(x: Int)")
assert(secondary.toString === "def this() = this(42)")
}
// TODO: commenting lazy printing, it is not yet supported by InferToken
/*test("lazy printing") {
import scala.meta.internal.ast._
val emptyCtor = Ctor.Primary(Nil, Ctor.Name("this"), Nil)
val lazyStats = templStatForceInfer("class C") #:: ??? #:: Stream.empty
val lazyTemplate = Template(Nil, Nil, Term.Param(Nil, Name.Anonymous(), None, None), Some(lazyStats))
val tree1 = Defn.Class(Nil, Type.Name("test"), Nil, emptyCtor, lazyTemplate)
assert(tree1.toString === "class test { ... }")
val tree2 = Defn.Trait(Nil, Type.Name("test"), Nil, emptyCtor, lazyTemplate)
assert(tree2.toString === "trait test { ... }")
val tree3 = Defn.Object(Nil, Term.Name("test"), emptyCtor, lazyTemplate)
assert(tree3.toString === "object test { ... }")
val tree4 = Pkg(Term.Name("test"), lazyStats)
assert(tree4.toString === "package test { ... }")
val tree5 = Pkg.Object(Nil, Term.Name("test"), emptyCtor, lazyTemplate)
assert(tree5.toString === "package object test { ... }")
}*/
test("smart case printing - oneliner in one line") {
import scala.meta.internal.ast._
val Term.Match(_, case1 :: Nil) = templStatForceInfer("??? match { case x => x }")
assert(case1.toString === "case x => x")
}
test("smart case printing - oneliner in multiple lines") {
import scala.meta.internal.ast._
val Term.Match(_, case1 :: case2 :: Nil) = templStatForceInfer("??? match { case x => x; case List(x, y) => println(x); println(y) }")
assert(case1.toString === """
|case x =>
| x
""".trim.stripMargin)
assert(case2.toString === """
|case List(x, y) =>
| println(x)
| println(y)
""".trim.stripMargin)
}
test("xml literals") {
val tree = term("<foo>{bar}</foo>")
assert(tree.show[Structure] === """Term.Interpolate(Term.Name("xml"), List(Lit.String("<foo>{bar}</foo>")), Nil)""")
assert(forceInferAll(tree).show[Syntax] === """ xml"<foo>{bar}</foo>" """.trim)
}
} |
marcus-crane/junkcode | enspiral-dev-academy/JSListProject/js/script.js | <filename>enspiral-dev-academy/JSListProject/js/script.js<gh_stars>0
var lists = {
"Books": [{
"Title": "The Lord of the Rings",
"Author": "<NAME>"
}, {
"Title": "A Game of Thrones",
"Author": "<NAME>"
}],
"Games": [{
"Title": "Super Metroid",
"Developer": "Nintendo, Intelligent Systems, Nintendo R&D 1",
"Publisher": "Nintendo",
"Release Date": "March 19th, 1994"
}, {
"Title": "Gravity Rush",
"Developer": "Project Siren",
"Publisher": "Sony Interactive Entertainment",
"Release Date": "February 9th, 2012"
}, {
"Title": "Red Faction: Guerilla",
"Developer": "Volition",
"Publisher": "THQ",
"Release Date": "June 2nd, 2009"
}],
"Movies": [{
"Title": "Falling Down",
"Duration": "113 minutes",
"Release Year": "1993"
}, {
"Title": "Ghost in the Shell",
"Duration": "82 minutes",
"Release Year": "1995"
}],
"TVSeries": [{
"Title": "Broadchurch",
"Duration": "45 minutes",
"Episodes": "16",
"Year": "2016"
}, {
"Title": "Fargo",
"Duration": "60 minutes",
"Episodes": "20",
"Year": "2016"
}, {
"Title": "Silicon Valley",
"Duration": "30 minutes",
"Episodes": "21",
"Year": "2016"
}]
}
// Set lists equal to model
model = lists;
// Set modelKeys equal to an array containing the keys (eg; movies and tvseries)
modelKeys = Object.keys(model);
// Create a new function called showButtons()
function showButtons() {
// Where for each key in the array of list keys (movies, tvseries etc)
for (key in modelKeys) {
// Append a button to the button-holder div which contains the name of each key and an onClick action to run showList with the parameter equal to the key eg; showList("movies")
$("#button-holder").append(`<button class="button-primary" onClick=showList(${modelKeys[key]})>${modelKeys[key]}</button>`);
}
}
// Create a new function called showList that takes a param function, inherited above.
function showList(param) {
// First, we want to make sure the div is empty (or else appends will pile up)
$("#wrapper").empty();
// Then we want to append the category name to the top of the page
$("#wrapper").prepend(`<section><h1>${param}</h1></section>`);
// For each key in the set of keys
$("#wrapper").append("<section><article>");
for (key in modelKeys) {
// Get a set of the keys available under each category
categoryKeys = Object.keys(lists[param][key]);
// For each categoryKey instance
for (var i = 0; i < categoryKeys.length; i++) {
// Append the value of that category to the article tag.
$("article").append(`<p><i>${categoryKeys[i]}:</i>${lists[param][key][categoryKeys[i]]}</p>`);
}
// Append a horizontal rule to break up each outputted item
$("article").append("<hr>");
}
$("#wrapper").append("</article></section>");
}
showButtons() |
MewX/contendo-viewer-v1.6.3 | jp/cssj/sakae/pdf/g/b/b.java | /* */ package jp.cssj.sakae.pdf.g.b;
/* */
/* */ import java.io.FilterOutputStream;
/* */ import java.io.IOException;
/* */ import java.io.OutputStream;
/* */
/* */
/* */
/* */
/* */
/* */ public class b
/* */ implements e
/* */ {
/* 14 */ private final int[] a = new int[256];
/* */
/* */ class a
/* */ extends FilterOutputStream {
/* */ private final int[] b;
/* 19 */ private final int[] c = new int[] { 0, 0 };
/* */
/* */ public a(b this$0, OutputStream out) {
/* 22 */ super(out);
/* 23 */ this.b = (int[])b.a(this$0).clone();
/* */ }
/* */
/* */ public void write(int x) throws IOException {
/* 27 */ this.out.write(b.a(this.a, this.b, this.c, (byte)x));
/* */ }
/* */
/* */ public void write(byte[] bytes) throws IOException {
/* 31 */ for (int i = 0; i < bytes.length; i++) {
/* 32 */ write(bytes[i]);
/* */ }
/* */ }
/* */
/* */ public void write(byte[] bytes, int off, int len) throws IOException {
/* 37 */ for (int i = 0; i < len; i++) {
/* 38 */ write(bytes[i + off]);
/* */ }
/* */ }
/* */ }
/* */
/* */ public b(byte[] key, int len) {
/* 44 */ if (len < 0 || len > 32) {
/* 45 */ throw new IllegalArgumentException("The key length is limited to 1 to 32.");
/* */ }
/* 47 */ for (int i = 0; i < this.a.length; i++) {
/* 48 */ this.a[i] = i;
/* */ }
/* */
/* 51 */ int keyIndex = 0;
/* 52 */ int saltIndex = 0;
/* 53 */ for (int j = 0; j < this.a.length; j++) {
/* 54 */ byte x = key[keyIndex];
/* 55 */ saltIndex = (((x < 0) ? (256 + x) : x) + this.a[j] + saltIndex) % 256;
/* 56 */ a(this.a, j, saltIndex);
/* 57 */ keyIndex = (keyIndex + 1) % len;
/* */ }
/* */ }
/* */
/* */ public final void b(byte[] data, int off, int len) {
/* 62 */ int[] salt = (int[])this.a.clone();
/* 63 */ int[] bc = { 0, 0 };
/* 64 */ for (int i = 0; i < len; i++) {
/* 65 */ data[i + off] = a(salt, bc, data[i + off]);
/* */ }
/* */ }
/* */
/* */ private final void a(int[] salt, int firstIndex, int secondIndex) {
/* 70 */ int tmp = salt[firstIndex];
/* 71 */ salt[firstIndex] = salt[secondIndex];
/* 72 */ salt[secondIndex] = tmp;
/* */ }
/* */
/* */ private byte a(int[] salt, int[] bc, byte x) {
/* 76 */ bc[0] = (bc[0] + 1) % 256;
/* 77 */ bc[1] = (salt[bc[0]] + bc[1]) % 256;
/* 78 */ a(salt, bc[0], bc[1]);
/* 79 */ int saltIndex = (salt[bc[0]] + salt[bc[1]]) % 256;
/* 80 */ return (byte)(x ^ (byte)salt[saltIndex]);
/* */ }
/* */
/* */ public OutputStream a(OutputStream out) {
/* 84 */ return new a(this, out);
/* */ }
/* */
/* */ public byte[] a(byte[] data) {
/* 88 */ b(data, 0, data.length);
/* 89 */ return data;
/* */ }
/* */
/* */ public boolean a() {
/* 93 */ return false;
/* */ }
/* */
/* */ public byte[] a(byte[] data, int off, int len) {
/* 97 */ throw new UnsupportedOperationException();
/* */ }
/* */ }
/* Location: /mnt/r/ConTenDoViewer.jar!/jp/cssj/sakae/pdf/g/b/b.class
* Java compiler version: 8 (52.0)
* JD-Core Version: 1.1.3
*/ |
chrisguikema/seL4_projects_libs | libsel4arm-vmm/src/plat/exynos5/devices/misc_devices.c | /*
* Copyright 2017, Data61
* Commonwealth Scientific and Industrial Research Organisation (CSIRO)
* ABN 41 687 119 230.
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(DATA61_BSD)
*/
#include <sel4arm-vmm/plat/device_map.h>
#include <sel4arm-vmm/devices.h>
const struct device dev_i2c1 = {
.devid = DEV_CUSTOM,
.name = "i2c1",
.pstart = I2C1_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_i2c2 = {
.devid = DEV_CUSTOM,
.name = "i2c2",
.pstart = I2C2_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_i2c4 = {
.devid = DEV_CUSTOM,
.name = "i2c4",
.pstart = I2C4_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_i2chdmi = {
.devid = DEV_CUSTOM,
.name = "i2c_hdmi",
.pstart = I2C_HDMI_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_usb2_ohci = {
.devid = DEV_CUSTOM,
.name = "usb2.OHCI",
.pstart = USB2_HOST_OHCI_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_usb2_ehci = {
.devid = DEV_CUSTOM,
.name = "usb2.EHCI",
.pstart = USB2_HOST_EHCI_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_usb2_ctrl = {
.devid = DEV_CUSTOM,
.name = "usb2.ctrl",
.pstart = USB2_HOST_CTRL_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_chip_id = {
.devid = DEV_CUSTOM,
.name = "chipid",
.pstart = CHIP_ID_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
/* Video */
const struct device dev_ps_tx_mixer = {
.devid = DEV_CUSTOM,
.name = "tv_mixer",
.pstart = TV_MIXER_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi0 = {
.devid = DEV_CUSTOM,
.name = "hdmi0",
.pstart = HDMI_0_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi1 = {
.devid = DEV_CUSTOM,
.name = "hdmi1",
.pstart = HDMI_1_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi2 = {
.devid = DEV_CUSTOM,
.name = "hdmi2",
.pstart = HDMI_2_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi3 = {
.devid = DEV_CUSTOM,
.name = "hdmi3",
.pstart = HDMI_3_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi4 = {
.devid = DEV_CUSTOM,
.name = "hdmi4",
.pstart = HDMI_4_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi5 = {
.devid = DEV_CUSTOM,
.name = "hdmi5",
.pstart = HDMI_5_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_hdmi6 = {
.devid = DEV_CUSTOM,
.name = "hdmi6",
.pstart = HDMI_6_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
/* DMA */
const struct device dev_ps_mdma0 = {
.devid = DEV_CUSTOM,
.name = "MDMA0",
.pstart = NS_MDMA0_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_mdma1 = {
.devid = DEV_CUSTOM,
.name = "MDMA1",
.pstart = NS_MDMA1_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_pdma0 = {
.devid = DEV_CUSTOM,
.name = "PDMA0",
.pstart = PDMA0_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_pdma1 = {
.devid = DEV_CUSTOM,
.name = "PDMA1",
.pstart = PDMA1_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
/* Timers */
const struct device dev_wdt_timer = {
.devid = DEV_WDT_TIMER,
.name = "wdt",
.pstart = WDT_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
const struct device dev_ps_pwm_timer = {
.devid = DEV_CUSTOM,
.name = "pwm",
.pstart = PWM_PADDR,
.size = 0x1000,
.handle_page_fault = NULL,
.priv = NULL
};
|
onezens/QQTweak | qqtw/qqheaders7.2/QQVIPFunctionComicNavigationController.h | //
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "QQNavigationController.h"
#import "UIGestureRecognizerDelegate.h"
#import "UINavigationControllerDelegate.h"
@class NSString, QQVIPFunctionComicPopNavigationTransition, QQVIPFunctionComicPushNavigationTransition, UIPercentDrivenInteractiveTransition, UIScreenEdgePanGestureRecognizer;
@interface QQVIPFunctionComicNavigationController : QQNavigationController <UINavigationControllerDelegate, UIGestureRecognizerDelegate>
{
UIPercentDrivenInteractiveTransition *_customInteractiveTransition;
QQVIPFunctionComicPopNavigationTransition *_customPopNavigationTransition;
QQVIPFunctionComicPushNavigationTransition *_customPushNavigationTransition;
UIScreenEdgePanGestureRecognizer *_customEdgePanGesture;
_Bool _isInteractive;
id <QQVIPFunctionComicNavigationControllerDelegate> _comicNavigationDelegate;
}
- (void).cxx_destruct;
@property(nonatomic) __weak id <QQVIPFunctionComicNavigationControllerDelegate> comicNavigationDelegate; // @synthesize comicNavigationDelegate=_comicNavigationDelegate;
- (_Bool)gestureRecognizer:(id)arg1 shouldBeRequiredToFailByGestureRecognizer:(id)arg2;
- (_Bool)gestureRecognizer:(id)arg1 shouldReceiveTouch:(id)arg2;
- (_Bool)gestureRecognizer:(id)arg1 shouldRecognizeSimultaneouslyWithGestureRecognizer:(id)arg2;
- (void)handleScreenEdgePanGesture:(id)arg1;
- (id)navigationController:(id)arg1 animationControllerForOperation:(long long)arg2 fromViewController:(id)arg3 toViewController:(id)arg4;
- (void)navigationController:(id)arg1 didShowViewController:(id)arg2 animated:(_Bool)arg3;
- (id)navigationController:(id)arg1 interactionControllerForAnimationController:(id)arg2;
- (id)popToRootViewControllerAnimated:(_Bool)arg1;
- (id)popViewControllerAnimated:(_Bool)arg1;
- (void)pushViewController:(id)arg1 animated:(_Bool)arg2;
- (void)setViewControllers:(id)arg1 animated:(_Bool)arg2;
- (id)tabBarController;
- (void)viewDidLoad;
- (void)willPopToRootViewController:(id)arg1;
- (void)willPopViewController:(id)arg1;
- (void)willPopViewControllers:(id)arg1;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
menty44/tutorials | orika/src/main/java/com/baeldung/orika/Personne.java | <filename>orika/src/main/java/com/baeldung/orika/Personne.java
package com.baeldung.orika;
public class Personne {
private String nom;
private String surnom;
private int age;
public Personne(String nom, String surnom, int age) {
this.nom = nom;
this.surnom = surnom;
this.age = age;
}
public String getNom() {
return nom;
}
public void setNom(String nom) {
this.nom = nom;
}
public String getSurnom() {
return surnom;
}
public void setSurnom(String surnom) {
this.surnom = surnom;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
@Override
public String toString() {
return "Personne [nom=" + nom + ", surnom=" + surnom + ", age=" + age
+ "]";
}
}
|
i-vyatkin/snappykit.constructor | src/components/Modal.js | import classNames from 'classnames';
import { get } from 'lodash';
import PropTypes from 'prop-types';
import * as React from 'react';
import { Portal } from 'react-portal';
import { connect } from 'react-redux';
import { CSSTransition } from 'react-transition-group';
import { compose, withHandlers } from 'recompose';
// Actions
import { closeModal } from 'services/modals';
import styles from './Modal.scss';
type ModelType = {
children: React.Element<node>,
className: string,
classNames: {
root: string,
backdrop: string,
container: string
},
handleClose: Function,
isOpened: boolean
};
const Modal = ({
children,
className,
classNames: {
root: rootClassName,
backdrop: backdropClassName,
container: containerClassName
} = {},
handleClose,
isOpened,
...props
}: ModelType): React.Element<typeof CSSTransition> => {
const rootClassNames: string = classNames(
className,
rootClassName,
styles.Root
);
const backdropClassNames: string = classNames(
backdropClassName,
styles.Backdrop
);
const containerClassNames: string = classNames(
containerClassName,
styles.Container
);
return (
<CSSTransition
classNames={{
enter: styles.RootAnimateEnter,
enterActive: styles.RootAnimateEnterActive,
enterDone: styles.RootAnimateEnterDone,
exit: styles.RootAnimateExit,
exitActive: styles.RootAnimateExitActive
}}
in={isOpened}
timeout={200}
unmountOnExit
>
{(state: string): func => (
<Portal>
<div className={rootClassNames}>
<div className={backdropClassNames} onClick={handleClose} />
<div className={containerClassNames}>
{typeof children === 'function'
? children({ ...props, isEntered: state === 'entered' })
: children}
</div>
</div>
</Portal>
)}
</CSSTransition>
);
};
Modal.propTypes = {
children: PropTypes.oneOfType([PropTypes.func, PropTypes.node]),
handleClose: PropTypes.func,
isOpened: PropTypes.bool
};
const mapStateToProps = ({ services }, { id, isOpened }) => {
const modal = get(services, `modals.${id}`);
return {
...modal,
isOpened: !!modal || isOpened
};
};
export default compose(
connect(mapStateToProps, { closeModal }),
withHandlers({
handleClose: ({ closeModal, id, onClose }): Function => () => {
closeModal(id);
onClose && onClose();
}
})
)(Modal);
|
BrandonJohnGrenier/pattern-tokamak | tokamak-authorization/src/main/java/fm/pattern/tokamak/authorization/OAuth2AuthorizationContext.java | <gh_stars>1-10
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.pattern.tokamak.authorization;
import static org.springframework.security.core.context.SecurityContextHolder.getContext;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
public class OAuth2AuthorizationContext implements AuthorizationContextProvider {
public OAuth2AuthorizationContext() {
}
public boolean isAuthenticated() {
OAuth2Authentication oauth = oauth2Authentication();
return oauth == null ? false : oauth.isAuthenticated();
}
public Set<String> getScopes() {
OAuth2Authentication oauth = oauth2Authentication();
if (oauth == null) {
return new HashSet<String>();
}
Set<String> scope = oauth.getOAuth2Request().getScope();
return scope == null ? new HashSet<String>() : scope;
}
public Set<String> getAuthorities() {
OAuth2Authentication oauth = oauth2Authentication();
if (oauth == null) {
return new HashSet<String>();
}
Collection<GrantedAuthority> authorities = oauth.getAuthorities();
return authorities == null ? new HashSet<String>() : authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toSet());
}
public Set<String> getRoles() {
OAuth2Authentication oauth = oauth2Authentication();
if (oauth == null) {
return new HashSet<String>();
}
if (oauth.isClientOnly()) {
return new HashSet<String>();
}
Authentication userAuthentication = oauth.getUserAuthentication();
Collection<? extends GrantedAuthority> authorities = userAuthentication.getAuthorities();
return authorities == null ? new HashSet<String>() : authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toSet());
}
public String getUsername() {
OAuth2Authentication oauth = oauth2Authentication();
if (oauth == null) {
return null;
}
if (oauth.isClientOnly()) {
return null;
}
Authentication userAuthentication = oauth.getUserAuthentication();
return (String) userAuthentication.getPrincipal();
}
private static OAuth2Authentication oauth2Authentication() {
Authentication authentication = getContext().getAuthentication();
if (!(authentication instanceof OAuth2Authentication)) {
return null;
}
return (OAuth2Authentication) authentication;
}
}
|
Sweetist/spree_ams | spec/dummy/db/migrate/20171207164342_add_execution_backtrace_to_spree_integration_actions.rb | class AddExecutionBacktraceToSpreeIntegrationActions < ActiveRecord::Migration
def change
add_column :spree_integration_actions, :execution_backtrace, :text
end
end
|
Syrupz-UO/VERDICT | tools/verdict-back-ends/verdict-bundle/verdict-attack-defense-collector/src/main/java/com/ge/verdict/attackdefensecollector/Triple.java | <filename>tools/verdict-back-ends/verdict-bundle/verdict-attack-defense-collector/src/main/java/com/ge/verdict/attackdefensecollector/Triple.java
package com.ge.verdict.attackdefensecollector;
import java.util.Objects;
/**
* A generic triple. Correctly implements equals() and hashCode(), so may be used as a key to a hash
* map. Note that the L, M and, R types must themselves correctly implement equals() and hashCode()
* for this to work correctly.
*
* @param <L> type of left value
* @param <M> type of middle value
* @param <R> type of right value
*/
public class Triple<L, M, R> {
public final L left;
public final M middle;
public final R right;
public Triple(L left, M middle, R right) {
this.left = left;
this.middle = middle;
this.right = right;
}
@Override
public boolean equals(Object other) {
if (other instanceof Pair<?, ?>) {
Triple<?, ?, ?> otherTriple = (Triple<?, ?, ?>) other;
return left.equals(otherTriple.left)
&& middle.equals(otherTriple.middle)
&& right.equals(otherTriple.right);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(left, middle, right);
}
}
|
GKotsovos/WebBanking-Front-End | src/routes/root/routes/Banking/routes/Orders/routes/NewTransferOrder/routes/NewTransferOrderForm/components/SelectPeriodicity/index.js | import SelectPeriodicity from './SelectPeriodicity'
export default SelectPeriodicity
|
mallikarjuna54/rdk-xconfserver | dataaccess-core/src/main/java/com/comcast/hesperius/dataaccess/core/dao/RowMappingPartialDAO.java | /*
* If not stated otherwise in this file or this component's Licenses.txt file the
* following copyright and licenses apply:
*
* Copyright 2018 RDK Management
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.hesperius.dataaccess.core.dao;
import com.comcast.hesperius.data.annotation.CF;
import com.comcast.hesperius.dataaccess.core.ValidationException;
import com.comcast.hesperius.dataaccess.core.bindery.BindingFacility;
import com.comcast.hesperius.dataaccess.core.dao.mapper.CommonRowMapper;
import com.comcast.hesperius.dataaccess.core.dao.mapper.ISimpleMapper;
import com.comcast.hesperius.dataaccess.core.dao.util.CFPersistenceDefinition;
import com.comcast.hesperius.dataaccess.core.dao.util.ExecuteWithUncheckedException;
import com.comcast.hesperius.dataaccess.core.util.CoreUtil;
import com.comcast.hesperius.dataaccess.core.util.EntityValidationUtils;
import com.comcast.hesperius.dataaccess.core.util.bean.BeanUtils;
import com.comcast.hydra.astyanax.data.IPersistable;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.netflix.astyanax.Execution;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.recipes.reader.AllRowsReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.*;
/**
* base ADS dao
*
* @author PBura
*/
public class RowMappingPartialDAO<K, T extends IPersistable> extends SimpleDAO<K, T> implements IADSSimpleDAO<K, T> {
protected static final Logger log = LoggerFactory.getLogger(RowMappingPartialDAO.class);
public static final <K, T extends IPersistable> IADSSimpleDAO<K, T> createImpl(Class<T> valueType) {
return createImpl(CFPersistenceDefinition.fromAnnotation(valueType.getAnnotation(CF.class)), valueType);
}
public static final <K, T extends IPersistable> IADSSimpleDAO<K, T> createImpl(final CFPersistenceDefinition cfDef, Class<T> valueType) {
return new RowMappingPartialDAO(cfDef, cfDef.keyType, BeanUtils.getSerializer(cfDef.keyType), valueType, null, false);
}
public static class Builder<K, T extends IPersistable> implements IADSSimpleDAO.Builder<K, T>{
private CFPersistenceDefinition cfDef;
private Class<K> keyType;
private Class<T> entityType;
private ISimpleMapper<String, T> mapper;
private boolean avoidSelfBinding = false;
public Builder setCfDef(CFPersistenceDefinition cfDef) {
this.cfDef = cfDef;
return this;
}
public Builder setKeyType(Class<K> keyType) {
this.keyType = keyType;
return this;
}
public Builder setEntityType(Class<T> entityType) {
this.entityType = entityType;
return this;
}
public Builder setMapper(ISimpleMapper<String, T> mapper) {
this.mapper = mapper;
return this;
}
public Builder setAvoidSelfBinding(boolean avoidSelfBinding) {
this.avoidSelfBinding = avoidSelfBinding;
return this;
}
public RowMappingPartialDAO<K, T> build() {
Preconditions.checkArgument(entityType != null, "null inapplicable as entityType");
Preconditions.checkState(cfDef != null || keyType != null, "either cfDef or keyClass must be declared not null");
return new RowMappingPartialDAO(
Optional.fromNullable(cfDef).or(CFPersistenceDefinition.fromAnnotation(entityType.getAnnotation(CF.class))),
Optional.fromNullable(keyType).or((Class<K>) (cfDef!=null?cfDef.keyType:keyType)),
BeanUtils.getSerializer(Optional.fromNullable(keyType).or((Class<K>) (cfDef!=null?cfDef.keyType:keyType))),
entityType,
mapper, avoidSelfBinding);
}
}
private RowMappingPartialDAO(final CFPersistenceDefinition cfDef, final Class<K> keyType, final Serializer<K> keySerializer, final Class<T> entityType, final ISimpleMapper<String, T> mapper, final boolean avoidSelfBinding) {
super(cfDef.cfName, keySerializer, entityType, Optional.fromNullable(mapper).or(new CommonRowMapper(entityType, cfDef)));
this.valueClass = entityType;
this.keyClass = keyType;
this.cfdef = cfDef;
this.avoidSelfBinding = avoidSelfBinding;
}
private final Class<T> valueClass;
private final Class<K> keyClass;
private boolean avoidSelfBinding;
private final CFPersistenceDefinition cfdef;
protected final CFPersistenceDefinition getCfdef() {
return cfdef;
}
@Override
public final int id() {
return getColumnFamilyName().concat(keyClass.getCanonicalName()).concat(valueClass.getCanonicalName()).hashCode();
}
@Override
public final Class<K> getKeyClass() {
return keyClass;
}
@Override
public final Class<T> getValueClass() {
return valueClass;
}
@Override
public void truncateColumnFamily() {
super.truncateColumnFamily();
//safe since we don't care
}
@Override
public List<T> getAll(Set<K> keys) {
if (keys == null || keys.isEmpty()) {
return new ArrayList<T>();
}
return super.getAll(keys);
}
@Override
public Map<K, Optional<T>> getAllAsMap(final Set<K> keys) {
if (keys == null || keys.isEmpty()) {
throw new IllegalArgumentException("Invalid set of keys");
}
final Map<K, Optional<T>> results = Maps.newHashMap();
final Rows<K, String> queryResult = ExecuteWithUncheckedException.execute(getKeyspace().prepareQuery(columnFamily).getRowSlice((Iterable<K>) keys));
for (final Row<K, String> row : queryResult) {
if (row.getColumns().isEmpty()) {
results.put(row.getKey(), Optional.<T>absent());
continue;
}
results.put(row.getKey(), Optional.of(mapper.mapFromColumnList(row.getColumns(), factory.newObject())));
}
return results;
}
/**
* Please note: this method may return null despite it seems not to.
* It is due to cassandra tends returning dead(deleted) data for
* queries so that we ought to deal with it. Iterator will iterate over
* list of non-dead records and may return null if and only if
* queryResult contains ghost as the very last item.
*
* @return iterator<T> interface to the whole columnFamily managed by
* this DAO
*/
@Override
public Iterator<T> getIteratedAll() {
try {
return new Iterator<T>() {
final Iterator<Row<K, String>> rowiterator = getKeyspace().prepareQuery(columnFamily)
.getAllRows()
.setIncludeEmptyRows(Boolean.FALSE)
.execute()
.getResult()
.iterator();
@Override
public boolean hasNext() {
return rowiterator.hasNext();
}
@Override
public T next() {
T res = factory.newObject();
mapper.mapFromColumnList(rowiterator.next().getColumns(), res);
return res;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove not supported here");
}
};
} catch (ConnectionException ce) {
log.error("Exception caught while attempting to scan entire CF for {}", getColumnFamilyName());
throw new RuntimeException(ce);
}
}
/**
* Single noncached method for DAO since records order is different for cassandra and cache
*
* @param pageStart page starting key
* @param pageSize size of page in records
* @param reversed ik query should be reversed
* @return
*/
@Override
public List<T> getPage(K pageStart, final Integer pageSize, boolean reversed) {
return Lists.newArrayList(getRowsAsMap(pageStart, pageSize, reversed).values());
}
@Override
public Map<K, T> getRowsAsMap(final K from, final int size, final boolean reversed) {
final Map<K, T> result = new LinkedHashMap<K, T>();
final Execution<Rows<K, String>> query = (from == null) ?
getKeyspace().prepareQuery(columnFamily).getAllRows().setIncludeEmptyRows(false).setRowLimit(size) :
getKeyspace().prepareQuery(columnFamily).getKeyRange(from, null, null, null, size);
final Rows<K, String> rows = ExecuteWithUncheckedException.execute(query);
final Iterator<Row<K, String>> rowIterator = rows.iterator();
while (rowIterator.hasNext() && result.size() < size) {
final Row<K, String> row = rowIterator.next();
result.put(row.getKey(), mapper.mapFromColumnList(row.getColumns(), factory.newObject()));
}
return result;
}
@Override
public List<T> getAll(int maxResults) {
return getPage(null, maxResults, false);
}
@Override
public T setOne(final K rowKey, final T obj) throws ValidationException {
EntityValidationUtils.validateForSave(obj);
if (cfdef.marshalingPolicy == CF.MarshalingPolicy.PER_FIELD) {
super.deleteOne(rowKey);
}
return internalSetOne(rowKey, obj);
}
protected final T internalSetOne(final K rowKey, final T obj) throws ValidationException {
T result = super.setOne(rowKey, obj);
BindingFacility.entityCreated(rowKey, obj, avoidSelfBinding);
return result;
}
@Override
public void deleteOne(final K rowKey) {
final T boundObject = getOne(rowKey);
if (boundObject == null) {
log.debug("can not process boundEntityDeleted binding for key:".concat(rowKey.toString()));
return;
}
BindingFacility.entityDeleted(rowKey, boundObject, avoidSelfBinding);
super.deleteOne(rowKey);
}
private static final ExecutorService keyfetcher = Executors.newFixedThreadPool(CoreUtil.getThreadsAvailable(),
new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("RMPD-keyfetcher-%d")
.build());
@Override
public Iterable<K> getKeys() {
try {
final List<K> res = Lists.newLinkedList();
final List<K> syncList = Collections.synchronizedList(res);
new AllRowsReader.Builder<K, String>(getKeyspace(), columnFamily)
.withColumnRange(null, null, false, 0)
.withExecutor(keyfetcher)
.forEachRow(new Function<Row<K, String>, Boolean>() {
@Override
public Boolean apply(Row<K, String> input) {
syncList.add(input.getKey());
return true;
}
})
.build()
.call();
return res;
} catch (Exception e) {
log.error("{} exception thrown while trying to iterate over keys",getColumnFamilyName(),e);
}
return Collections.EMPTY_LIST;
}
}
|
SitdikovRustam/CatBoost | util/string/util.h | <reponame>SitdikovRustam/CatBoost
#pragma once
//THIS FILE A COMPAT STUB HEADER
#include <cstring>
#include <cstdarg>
#include <algorithm>
#include <util/system/defaults.h>
#include <util/generic/string.h>
#include <util/generic/strbuf.h>
/// @addtogroup Strings_Miscellaneous
/// @{
int a2i(const TString& s);
/// Removes the last character if it is equal to c.
template <class T>
inline void RemoveIfLast(T& s, int c) {
const size_t length = s.length();
if (length && s[length - 1] == c)
s.remove(length - 1);
}
/// Adds lastCh symbol to the the of the string if it is not already there.
inline void addIfNotLast(TString& s, int lastCh) {
size_t len = s.length();
if (!len || s[len - 1] != lastCh) {
s.append(char(lastCh));
}
}
/// @details Finishes the string with lastCh1 if lastCh2 is not present in the string and lastCh1 is not already at the end of the string.
/// Else, if lastCh2 is not equal to the symbol before the last, it finishes the string with lastCh2.
/// @todo ?? Define, when to apply the function. Is in use several times for URLs parsing.
inline void addIfAbsent(TString& s, char lastCh1, char lastCh2) {
size_t pos = s.find(lastCh2);
if (pos == TString::npos) {
//s.append((char)lastCh1);
addIfNotLast(s, lastCh1);
} else if (pos < s.length() - 1) {
addIfNotLast(s, lastCh2);
}
}
/// @}
/*
* ------------------------------------------------------------------
*
* A fast implementation of glibc's functions;
* strspn, strcspn and strpbrk.
*
* ------------------------------------------------------------------
*/
struct ui8_256 {
// forward chars table
ui8 chars_table[256];
// reverse (for c* functions) chars table
ui8 c_chars_table[256];
};
class str_spn: public ui8_256 {
public:
explicit str_spn(const char* charset, bool extended = false) {
// exteneded: if true, treat charset string more like
// interior of brackets [ ], e.g. "a-z0-9"
init(charset, extended);
}
/// Return first character in table, like strpbrk()
/// That is, skip all characters not in table
/// [DIFFERENCE FOR NOT_FOUND CASE: Returns end of string, not NULL]
const char* brk(const char* s) const {
while (c_chars_table[(ui8)*s])
++s;
return s;
}
const char* brk(const char* s, const char* e) const {
while (s < e && c_chars_table[(ui8)*s])
++s;
return s;
}
/// Return first character not in table, like strpbrk() for inverted table.
/// That is, skip all characters in table
const char* cbrk(const char* s) const {
while (chars_table[(ui8)*s])
++s;
return s;
}
const char* cbrk(const char* s, const char* e) const {
while (s < e && chars_table[(ui8)*s])
++s;
return s;
}
/// Offset of the first character not in table, like strspn().
size_t spn(const char* s) const {
return cbrk(s) - s;
}
size_t spn(const char* s, const char* e) const {
return cbrk(s, e) - s;
}
/// Offset of the first character in table, like strcspn().
size_t cspn(const char* s) const {
return brk(s) - s;
}
size_t cspn(const char* s, const char* e) const {
return brk(s, e) - s;
}
char* brk(char* s) const {
return const_cast<char*>(brk((const char*)s));
}
char* cbrk(char* s) const {
return const_cast<char*>(cbrk((const char*)s));
}
/// See strsep [BUT argument is *&, not **]
char* sep(char*& s) const {
char sep_char; // unused;
return sep(s, sep_char);
}
/// strsep + remember character that was destroyed
char* sep(char*& s, char& sep_char) const {
if (!s)
return nullptr;
char* ret = s;
char* next = brk(ret);
if (*next) {
sep_char = *next;
*next = 0;
s = next + 1;
} else {
sep_char = 0;
s = nullptr;
}
return ret;
}
protected:
void init(const char* charset, bool extended);
str_spn() {
}
};
// an analogue of tr/$from/$to/
class Tr {
public:
Tr(const char* from, const char* to);
char ConvertChar(char ch) const {
return Map[(ui8)ch];
}
void Do(char* s) const {
for (; *s; s++)
*s = ConvertChar(*s);
}
void Do(const char* src, char* dst) const {
for (; *src; src++)
*dst++ = ConvertChar(*src);
*dst = 0;
}
void Do(char* s, size_t l) const {
for (size_t i = 0; i < l && s[i]; i++)
s[i] = ConvertChar(s[i]);
}
void Do(TString& str) const;
private:
char Map[256];
size_t FindFirstChangePosition(const TString& str) const;
};
// Removes all occurrences of given character from string
template <typename TStroka>
void RemoveAll(TStroka& str, typename TStroka::char_type ch) {
size_t pos = str.find(ch); // 'find' to avoid cloning of string in 'TString.begin()'
if (pos == TStroka::npos)
return;
typename TStroka::iterator begin = str.begin();
typename TStroka::iterator end = begin + str.length();
typename TStroka::iterator it = std::remove(begin + pos, end, ch);
str.erase(it, end);
}
|
Kiddinglife/J2EE-DEMO | OnlineShop/Assign2WAR/WEB-INF/classes/uts/edu/controller/AbstractController.java | package uts.edu.controller;
import java.util.Arrays;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import uts.edu.web.RequestMethod;
import com.bea.common.security.xacml.IOException;
/**
* @author Administrator
*
*/
public abstract class AbstractController implements Controller
{
private final List<RequestMethod> allowedMethodList;
//protected ServiceLocal service;
public AbstractController(final RequestMethod... allowedMethods)
{
this.allowedMethodList = Arrays.asList(allowedMethods); // create allowed methods list
}
/*
* This method will justify if the request is correct type
*/
public boolean isAllowedMethod(HttpServletRequest request)
{
final String method = request.getMethod();
for (final RequestMethod allowedMethod : allowedMethodList)
{
if (allowedMethod.name().equalsIgnoreCase(method))
{
return true;
}
}
return false;
}
/*
* This method will process the logic operations and then forward to
* viewer layer
*/
public abstract void process(HttpServletRequest request,
HttpServletResponse response) throws ServletException,
IOException, java.io.IOException;
}
|
utfpr-gp-tsi/SGE | lib/populate/users.rb | require './lib/faker/cpf'
module Populate
class Users
def initialize
@rn = 1_234_567
@bol = [true, false]
end
def populate
create_users
end
private
def create_users
75.times do |i|
@rn += i
User.create!(
name: Faker::Name.name,
username: Faker::Internet.unique.username,
alternative_email: Faker::Internet.email,
registration_number: @rn,
cpf: Faker::CPF.numeric,
admin: @bol.sample,
active: @bol.sample,
password: '<PASSWORD>',
support: false
)
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.