repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
AlexandreCrettet/john | core/src/com/eogames/john/ecs/systems/ActionSystem.java | <reponame>AlexandreCrettet/john
package com.eogames.john.ecs.systems;
import com.badlogic.ashley.core.ComponentMapper;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.systems.IteratingSystem;
import com.eogames.john.ecs.components.ActionComponent;
import com.eogames.john.ecs.components.StateComponent;
import com.eogames.john.ecs.components.VelocityComponent;
import com.eogames.john.level.UiStage;
import com.eogames.john.utils.StateUtils;
public class ActionSystem extends IteratingSystem {
private ComponentMapper<VelocityComponent> vm = ComponentMapper.getFor(VelocityComponent.class);
private ComponentMapper<StateComponent> sm = ComponentMapper.getFor(StateComponent.class);
private UiStage uiStage;
public ActionSystem(UiStage uiStage) {
super(Family.all(ActionComponent.class).get());
this.uiStage = uiStage;
}
@Override
public void processEntity(Entity entity, float deltaTime) {
VelocityComponent velocity = vm.get(entity);
if (uiStage.getRightButton().isPressed()) {
if (velocity.x > velocity.maxX) {
if (!uiStage.getBButton().isPressed()) {
velocity.x *= 0.9f;
if (velocity.x < velocity.maxX) {
velocity.x = velocity.maxX;
}
}
}
else {
velocity.x = velocity.maxX;
if (uiStage.getBButton().isPressed()) {
velocity.x *= 2;
}
}
}
else if (uiStage.getLeftButton().isPressed()) {
velocity.x = -velocity.maxX;
if (uiStage.getBButton().isPressed()) {
velocity.x *= 2;
}
}
else {
if (velocity.x < 5f && velocity.x > -5f) {
velocity.x = 0;
}
else {
velocity.x *= 0.9f;
}
}
if (velocity.y == velocity.gravity) {
if (uiStage.getAButton().isPressed()) {
velocity.y = velocity.maxY;
}
}
}
}
|
joe4568/centreon-broker | tcp/src/factory.cc | <gh_stars>0
/*
** Copyright 2011-2013 Centreon
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
**
** For more information : <EMAIL>
*/
#include <memory>
#include <QString>
#include "com/centreon/broker/config/parser.hh"
#include "com/centreon/broker/exceptions/msg.hh"
#include "com/centreon/broker/tcp/acceptor.hh"
#include "com/centreon/broker/tcp/connector.hh"
#include "com/centreon/broker/tcp/factory.hh"
using namespace com::centreon::broker;
using namespace com::centreon::broker::tcp;
/**************************************
* *
* Public Methods *
* *
**************************************/
/**
* Default constructor.
*/
factory::factory() {}
/**
* Copy constructor.
*
* @param[in] other Object to copy.
*/
factory::factory(factory const& other) : io::factory(other) {}
/**
* Destructor.
*/
factory::~factory() {}
/**
* Assignment operator.
*
* @param[in] other Object to copy.
*
* @return This object.
*/
factory& factory::operator=(factory const& other) {
io::factory::operator=(other);
return (*this);
}
/**
* Clone the factory object.
*
* @return Clone of this factory.
*/
io::factory* factory::clone() const {
return (new factory(*this));
}
/**
* Check if a configuration supports this protocol.
*
* @param[in] cfg Object configuration.
*
* @return True if the configuration has this protocol.
*/
bool factory::has_endpoint(config::endpoint& cfg) const {
return ((cfg.type == "ip")
|| (cfg.type == "tcp")
|| (cfg.type == "ipv4")
|| (cfg.type == "ipv6"));
}
/**
* Create a new endpoint from a configuration.
*
* @param[in] cfg Endpoint configuration.
* @param[out] is_acceptor Set to true if the endpoint is an acceptor.
* @param[in] cache Unused.
*
* @return Endpoint matching configuration.
*/
io::endpoint* factory::new_endpoint(
config::endpoint& cfg,
bool& is_acceptor,
misc::shared_ptr<persistent_cache> cache) const {
(void)cache;
// Find host (if exist).
QString host;
{
QMap<QString, QString>::const_iterator it(cfg.params.find("host"));
if (it != cfg.params.end())
host = it.value();
}
// Find port (must exist).
unsigned short port;
{
QMap<QString, QString>::const_iterator it(cfg.params.find("port"));
if (it == cfg.params.end())
throw (exceptions::msg() << "TCP: no 'port' defined for " \
"endpoint '" << cfg.name << "'");
port = it.value().toUShort();
}
// Find TCP socket timeout option.
int write_timeout(-1);
{
QMap<QString, QString>::const_iterator it(cfg.params.find("socket_write_timeout"));
if (it != cfg.params.end())
write_timeout = it.value().toUInt();
}
int read_timeout(-1);
{
QMap<QString, QString>::const_iterator it(cfg.params.find("socket_read_timeout"));
if (it != cfg.params.end())
read_timeout = it.value().toUInt();
}
// Acceptor.
std::auto_ptr<io::endpoint> endp;
if (host.isEmpty()) {
is_acceptor = true;
std::auto_ptr<tcp::acceptor> a(new tcp::acceptor);
a->set_read_timeout(read_timeout);
a->set_write_timeout(write_timeout);
a->listen_on(port);
endp.reset(a.release());
}
// Connector.
else {
is_acceptor = false;
std::auto_ptr<tcp::connector> c(new tcp::connector);
c->set_read_timeout(read_timeout);
c->connect_to(host, port);
c->set_write_timeout(write_timeout);
endp.reset(c.release());
}
return (endp.release());
}
|
shaojiankui/iOS10-Runtime-Headers | PrivateFrameworks/OfficeImport.framework/EDStylesCollection.h | <gh_stars>10-100
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport
*/
@interface EDStylesCollection : EDCollection {
unsigned long long mDefaultWorkbookStyleIndex;
}
- (unsigned long long)addObject:(id)arg1;
- (id)defaultWorkbookStyle;
- (unsigned long long)defaultWorkbookStyleIndex;
- (id)init;
- (void)removeAllObjects;
- (void)removeObjectAtIndex:(unsigned long long)arg1;
- (void)replaceObjectAtIndex:(unsigned long long)arg1 withObject:(id)arg2;
- (void)setDefaultWorkbookStyleIndex:(unsigned long long)arg1;
@end
|
rhpvorderman/galaxy | client/src/components/User/UserServices.js | import { getAppRoot } from "onload/loadConfig";
import axios from "axios";
import { getGalaxyInstance } from "app";
export function getRecentInvocations() {
const Galaxy = getGalaxyInstance();
const params = { user_id: Galaxy.user.id, limit: 150, view: "collection" };
const url = `${getAppRoot()}api/invocations`;
return axios.get(url, { params: params });
}
|
dtglidden/ITK | Code/Numerics/Statistics/itkScalarImageToGreyLevelCooccurrenceMatrixGenerator.h | <gh_stars>1-10
/*=========================================================================
Program: Insight Segmentation & Registration Toolkit
Module: itkScalarImageToGreyLevelCooccurrenceMatrixGenerator.h
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Insight Software Consortium. All rights reserved.
See ITKCopyright.txt or http://www.itk.org/HTML/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#ifndef __itkScalarImageToGreyLevelCooccurrenceMatrixGenerator_h
#define __itkScalarImageToGreyLevelCooccurrenceMatrixGenerator_h
#include "itkImage.h"
#include "itkHistogram.h"
#include "itkDenseFrequencyContainer.h"
#include "itkVectorContainer.h"
#include "itkObject.h"
#include "itkNumericTraits.h"
#include "itkMacro.h"
namespace itk {
namespace Statistics {
/** \class ScalarImageToGreyLevelCooccurrenceMatrixGenerator
* \brief This class computes a grey-level co-occurence matrix (histogram) from
* a given image. GLCM's are used for image texture description.
*
* This generator creates a grey-level co-occurence matrix from a N-D scalar
* image. This is the first step in texture description a la Haralick. (See
* <NAME>., <NAME> and <NAME>. 1973. Textural Features for
* Image Classification. IEEE Transactions on Systems, Man and Cybernetics.
* SMC-3(6):610-620. See also <NAME>. 1979. Statistical and Structural
* Approaches to Texture. Proceedings of the IEEE, 67:786-804.)
*
* The basic idea is as follows:
* Given an image and an offset (e.g. (1, -1) for a 2-d image), grey-level
* co-occurences are pairs of intensity values for a specific pixel and the
* pixel at that offset from the specified pixel. These co-occurences can provide
* information about the visual texture of an image region -- for example, an
* eight-bit image of alternating pixel-wide white and black vertical lines
* would have a large number of (0, 255) and (255, 0) co-occurences for offset
* (1, 0).
*
* The offset (or offsets) along which the co-occurences are calculated can be
* set by the user. Traditionally, only one offset is used per histogram, and
* offset components in the range [-1, 1] are used. For rotation-invariant features,
* averages of features computed over several histograms with different offsets
* are generally used, instead of computing features from one histogram created
* with several offsets. Additionally, instead of using offsets of two or more
* pixels in any direction, multy-resulution techniques (e.g. image pyramids)
* are generally used to deal with texture at different spatial resolutions.
*
* This class calculates a 2-d histogram of all the co-occurence pairs in the
* given image's requested region, for a given set of offsets. That is, if a given
* offset falls outside of the requested region at a particular point, that
* co-occurrence pair will not be added to the matrix.
*
* The number of histogram bins on each axis can be set (defaults to 256). Also,
* by default the histogram min and max corresponds to the largest and smallest
* possible pixel value of that pixel type. To customize the histogram bounds
* for a given image, the max and min pixel values that will be placed in the
* histogram can be set manually. NB: The min and max are INCLUSIVE.
*
* Further, the type of histogram frequency container used is an optional template
* parameter. By default, a dense container is used, but for images with little
* texture or in cases where the user wants more histogram bins, a sparse container
* can be used for the histogram instead.
*
* WARNING: This probably won't work for pixels of double or long-double type
* unless you set the histogram min and max manually. This is because the largest
* histogram bin by default has max value of the largest possible pixel value
* plus 1. For double and long-double types, whose "RealType" as defined by the
* NumericTraits class is the same, and thus cannot hold any larger values,
* this would cause a float overflow.
*
* \sa MaskedScalarImageToGreyLevelCooccurrenceMatrixGenerator
* \sa GreyLevelCooccurrenceMatrixTextureCoefficientsCalculator
* \sa ScalarImageTextureCalculator
*
* Authors: <NAME> and <NAME>
*/
template< class TImageType,
class THistogramFrequencyContainer = DenseFrequencyContainer >
class ScalarImageToGreyLevelCooccurrenceMatrixGenerator : public Object
{
public:
/** Standard typedefs */
typedef ScalarImageToGreyLevelCooccurrenceMatrixGenerator Self;
typedef Object Superclass;
typedef SmartPointer<Self> Pointer;
typedef SmartPointer<const Self> ConstPointer;
/** Run-time type information (and related methods). */
itkTypeMacro(ScalarImageToGreyLevelCooccurrenceMatrixGenerator, Object);
/** standard New() method support */
itkNewMacro(Self);
typedef TImageType ImageType;
typedef typename ImageType::Pointer ImagePointer;
typedef typename ImageType::ConstPointer ImageConstPointer;
typedef typename ImageType::PixelType PixelType;
typedef typename ImageType::RegionType RegionType;
typedef typename ImageType::SizeType RadiusType;
typedef typename ImageType::OffsetType OffsetType;
typedef VectorContainer<unsigned char, OffsetType> OffsetVector;
typedef typename OffsetVector::Pointer OffsetVectorPointer;
typedef typename OffsetVector::ConstPointer OffsetVectorConstPointer;
typedef typename NumericTraits<PixelType>::RealType MeasurementType;
typedef Histogram< MeasurementType, 2, THistogramFrequencyContainer >
HistogramType;
typedef typename HistogramType::Pointer HistogramPointer;
typedef typename HistogramType::ConstPointer HistogramConstPointer;
typedef typename HistogramType::MeasurementVectorType MeasurementVectorType;
itkStaticConstMacro(DefaultBinsPerAxis, unsigned int, 256);
/** Triggers the Computation of the histogram */
void Compute( void );
/** Connects the input image for which the histogram is going to be computed */
itkSetConstObjectMacro( Input, ImageType );
itkGetConstObjectMacro( Input, ImageType );
/** Set the offset or offsets over which the co-occurrence pairs
* will be computed. Calling either of these methods clears the
* previous offsets. */
itkSetConstObjectMacro( Offsets, OffsetVector );
itkGetConstObjectMacro( Offsets, OffsetVector );
void SetOffset( const OffsetType offset )
{
OffsetVectorPointer offsetVector = OffsetVector::New();
offsetVector->push_back(offset);
this->SetOffsets(offsetVector);
}
/** Return the histogram.
\warning This output is only valid after the Compute() method has been invoked
\sa Compute */
itkGetObjectMacro( Output, HistogramType );
/** Set number of histogram bins along each axis */
itkSetMacro( NumberOfBinsPerAxis, unsigned int );
itkGetMacro( NumberOfBinsPerAxis, unsigned int );
/** Set the min and max (inclusive) pixel value that will be placed in the histogram */
void SetPixelValueMinMax( PixelType min, PixelType max );
itkGetMacro(Min, PixelType);
itkGetMacro(Max, PixelType);
/** Set the calculator to normalize the histogram (divide all bins by the
total frequency). Normalization is off by default.*/
itkSetMacro(Normalize, bool);
itkGetMacro(Normalize, bool);
itkBooleanMacro(Normalize);
protected:
ScalarImageToGreyLevelCooccurrenceMatrixGenerator();
virtual ~ScalarImageToGreyLevelCooccurrenceMatrixGenerator() {};
void PrintSelf(std::ostream& os, Indent indent) const;
virtual void FillHistogram( RadiusType radius, RegionType region );
private:
ScalarImageToGreyLevelCooccurrenceMatrixGenerator(const Self&); //purposely not implemented
void operator=(const Self&); //purposely not implemented
void NormalizeHistogram( void );
ImageConstPointer m_Input;
HistogramPointer m_Output;
OffsetVectorConstPointer m_Offsets;
PixelType m_Min;
PixelType m_Max;
unsigned int m_NumberOfBinsPerAxis;
MeasurementVectorType m_LowerBound;
MeasurementVectorType m_UpperBound;
bool m_Normalize;
};
} // end of namespace Statistics
} // end of namespace itk
#ifndef ITK_MANUAL_INSTANTIATION
#include "itkScalarImageToGreyLevelCooccurrenceMatrixGenerator.txx"
#endif
#endif
|
DianwodaCompany/bassy | bassy-mapper/src/main/java/com/dianwoda/test/bassy/db/dao/ProgramTaskMapperExt.java | package com.dianwoda.test.bassy.db.dao;
import com.dianwoda.test.bassy.common.domain.dto.program.ProgramTaskPercentDTO;
import com.dianwoda.test.bassy.db.entity.ProgramTask;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
public interface ProgramTaskMapperExt {
List<Map<String, Object>> getHeapData(@Param("departId") Integer departId, @Param("sDate") String sDate, @Param("eDate") String eDate);
List<ProgramTask> selectByCondition(@Param("searchKey") String searchKey, @Param("startTm") String startTm, @Param("endTm") String endTm, @Param("status") String status, @Param("tester") String tester, @Param("process") String process);
/**
* 根据项目id获取项目日报中的任务列表
* 不统计任务状态为 init close的任务
* 返回项目下的其余所有任务
* 通过需求id,项目阶段,任务来进行排序
*
* @param programId 项目id
* @return
*/
List<ProgramTask> getProgramReportTask(Integer programId);
/**
* 获取项目去修进度
* @param programId
* @param requires
* @return
*/
List<ProgramTaskPercentDTO> getProgramTaskProcessByrRequire(@Param("programId") Integer programId, @Param("requires") List requires);
/**
* 获取项目所有需求
* @param programId
* @return
*/
List<Map<String,Object>> getPlanRequires(Integer programId);
/**
* 测试计划的需求视图
* @param programId
* @param requireId
* @return
*/
List<Map<String, Object>> getPlanRequireBoard(@Param("programId") Integer programId, @Param("requireId") Integer requireId);
}
|
philtherobot/quickc | charge_engine/include/charge/exception.hpp | <reponame>philtherobot/quickc<filename>charge_engine/include/charge/exception.hpp
#ifndef GUARD_310c5f06f04f4590a8d9bc3d57e5b43e
#define GUARD_310c5f06f04f4590a8d9bc3d57e5b43e
#include <boost/filesystem/path.hpp>
#include <exception>
#include <string>
namespace charge
{
class Exception : public std::runtime_error
{
public:
using runtime_error::runtime_error;
};
class LibraryNotConfiguredError : public Exception
{
public:
explicit LibraryNotConfiguredError(std::string const & lib);
std::string const & library() const;
private:
std::string lib_;
};
class CommandLineArgumentError : public Exception
{
public:
explicit CommandLineArgumentError(std::string const & msg);
};
class UnsupportedFamilyError : public Exception
{
public:
explicit UnsupportedFamilyError(std::string const & family);
std::string const & family() const;
private:
std::string family_;
};
class RuntimeError : public Exception
{
public:
using Exception::Exception;
};
class AbsolutePathError : public Exception
{
public:
explicit AbsolutePathError(boost::filesystem::path const & p);
private:
static std::string msg(boost::filesystem::path const & p);
};
class CompilerDetectionError : public Exception
{
public:
using Exception::Exception;
};
class CompilationFailureError : public Exception
{
public:
using Exception::Exception;
};
} // charge
#endif
|
NajibAdan/kitsu-server | db/migrate/20170721022437_add_default_to_hidden_on_group_members_and_follows.rb | class AddDefaultToHiddenOnGroupMembersAndFollows < ActiveRecord::Migration
def change
change_column_default :group_members, :hidden, true
change_column_null :group_members, :hidden, false
change_column_default :follows, :hidden, true
change_column_null :follows, :hidden, false
end
end
|
Sowmya6597/java | HibernateDemo/src/com/cts/training/controller/CompanyController.java | package com.cts.training.controller;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import com.cts.training.model.Company;
public class CompanyController {
public static void main(String args[])
{
Configuration cfg=new Configuration();
cfg.configure();
SessionFactory sessionFactory = cfg.buildSessionFactory();
Session session = sessionFactory.openSession();
Transaction transaction = session.beginTransaction();
Company company=new Company(100,"CTS","digital Transformation","Brain Hamphrac",23899101.78);
session.save(company);
transaction.commit();
session.close();
}
}
|
p6-process/p6-process | src/main/java/org/lorislab/p6/process/deployment/AbstractTest.java | package org.lorislab.p6.process.deployment;
public abstract class AbstractTest {
public void xxxxxxxxxxxxx() {
System.out.println("###");
}
}
|
meteor-space/donations | packages/base/source/shared/value-objects/address.js | Space.domain.ValueObject.extend('Donations.Address', {
// EJSON serializable fields
fields() {
return {
country: Country,
zip: String,
city: String,
street: String
};
}
});
|
wangsikai/learn | yoo-master-dc2492330d5d46b48f1ceca891e0f9f7e1593fee/module/other/resource/src/main/java/com/lanking/uxb/rescon/question/convert/ResconQuestionConvert.java | <filename>yoo-master-dc2492330d5d46b48f1ceca891e0f9f7e1593fee/module/other/resource/src/main/java/com/lanking/uxb/rescon/question/convert/ResconQuestionConvert.java
package com.lanking.uxb.rescon.question.convert;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.lanking.cloud.domain.common.baseData.ExaminationPoint;
import com.lanking.cloud.domain.common.baseData.KnowledgePoint;
import com.lanking.cloud.domain.common.baseData.KnowledgeReview;
import com.lanking.cloud.domain.common.baseData.KnowledgeSync;
import com.lanking.cloud.domain.common.baseData.MetaKnowpoint;
import com.lanking.cloud.domain.common.baseData.QuestionCategory;
import com.lanking.cloud.domain.common.baseData.QuestionType;
import com.lanking.cloud.domain.common.resource.question.Answer;
import com.lanking.cloud.domain.common.resource.question.ChoiceFormat;
import com.lanking.cloud.domain.common.resource.question.Question;
import com.lanking.cloud.domain.common.resource.question.Question2Tag;
import com.lanking.cloud.domain.common.resource.question.QuestionCategoryType;
import com.lanking.cloud.domain.common.resource.question.QuestionSource;
import com.lanking.cloud.domain.type.AsciiStatus;
import com.lanking.cloud.sdk.bean.Converter;
import com.lanking.cloud.sdk.bean.ConverterAssembler;
import com.lanking.cloud.sdk.util.CollectionUtils;
import com.lanking.cloud.sdk.util.StringUtils;
import com.lanking.uxb.rescon.basedata.api.ResconQuestionExaminationPointService;
import com.lanking.uxb.rescon.basedata.api.ResconQuestionKnowledgeReviewService;
import com.lanking.uxb.rescon.basedata.api.ResconQuestionKnowledgeService;
import com.lanking.uxb.rescon.basedata.api.ResconQuestionKnowledgeSyncService;
import com.lanking.uxb.rescon.basedata.convert.ResconExaminationPointConvert;
import com.lanking.uxb.rescon.basedata.convert.ResconKnowledgePointConvert;
import com.lanking.uxb.rescon.basedata.convert.ResconKnowledgeReviewConvert;
import com.lanking.uxb.rescon.basedata.convert.ResconKnowledgeSyncConvert;
import com.lanking.uxb.rescon.basedata.convert.ResconMetaKnowpointConvert;
import com.lanking.uxb.rescon.basedata.value.VKnowledgePoint;
import com.lanking.uxb.rescon.basedata.value.VMetaKnowpoint;
import com.lanking.uxb.rescon.basedata.value.VResconExaminationPoint;
import com.lanking.uxb.rescon.basedata.value.VResconKnowledgeReview;
import com.lanking.uxb.rescon.basedata.value.VResconKnowledgeSync;
import com.lanking.uxb.rescon.question.api.ResconAnswerManage;
import com.lanking.uxb.rescon.question.api.ResconQuestionCategoryManage;
import com.lanking.uxb.rescon.question.api.ResconQuestionManage;
import com.lanking.uxb.rescon.question.api.ResconQuestionMetaKnowManage;
import com.lanking.uxb.rescon.question.api.ResconQuestionTagManage;
import com.lanking.uxb.rescon.question.api.ResconQuestionTypeManage;
import com.lanking.uxb.rescon.question.value.VQuestion;
import com.lanking.uxb.rescon.question.value.VQuestionCategory;
import com.lanking.uxb.rescon.question.value.VQuestionTag;
import com.lanking.uxb.service.code.api.PhaseService;
import com.lanking.uxb.service.code.api.SchoolService;
import com.lanking.uxb.service.code.api.SectionService;
import com.lanking.uxb.service.code.api.SubjectService;
import com.lanking.uxb.service.code.api.TextbookCategoryService;
import com.lanking.uxb.service.code.api.TextbookService;
import com.lanking.uxb.service.code.convert.PhaseConvert;
import com.lanking.uxb.service.code.convert.SchoolConvert;
import com.lanking.uxb.service.code.convert.SectionConvert;
import com.lanking.uxb.service.code.convert.SubjectConvert;
import com.lanking.uxb.service.code.convert.TextbookCategoryConvert;
import com.lanking.uxb.service.code.convert.TextbookConvert;
import com.lanking.uxb.service.code.value.VPhase;
import com.lanking.uxb.service.code.value.VSchool;
import com.lanking.uxb.service.code.value.VSection;
import com.lanking.uxb.service.code.value.VSubject;
import com.lanking.uxb.service.code.value.VTextbook;
import com.lanking.uxb.service.code.value.VTextbookCategory;
import com.lanking.uxb.service.file.util.FileUtil;
/**
* 资源管控平台使用的习题转换器.
*
* @author <a href="mailto:<EMAIL>">wanlong.che</a>
* @version 2015年8月15日
*/
@Component
public class ResconQuestionConvert extends Converter<VQuestion, Question, Long> {
@Autowired
private ResconQuestionTypeManage questionTypeService;
@Autowired
private ResconQuestionManage questionService;
@Autowired
private ResconQuestionMetaKnowManage questionMetaKnowService;
@Autowired
private ResconAnswerManage answerService;
@Autowired
private TextbookCategoryService tcService;
@Autowired
private TextbookCategoryConvert tcConvert;
@Autowired
private TextbookService tbService;
@Autowired
private TextbookConvert tbConvert;
@Autowired
private SectionService sectionService;
@Autowired
private SectionConvert sectionConvert;
@Autowired
private PhaseService phaseService;
@Autowired
private PhaseConvert phaseConvert;
@Autowired
private SubjectService subjectService;
@Autowired
private SubjectConvert subjectConvert;
@Autowired
private ResconMetaKnowpointConvert mkConvert;
@Autowired
private ResconQuestionTypeConvert questionTypeConvert;
@Autowired
private ResconAnswerConvert answerConvert;
@Autowired
private SchoolService schoolService;
@Autowired
private SchoolConvert schoolConvert;
@Autowired
private ResconKnowledgePointConvert knowledgePointConvert;
@Autowired
private ResconExaminationPointConvert examinationPointConvert;
@Autowired
private ResconQuestionKnowledgeService questionKnowledgeService;
@Autowired
private ResconQuestionExaminationPointService questionExaminationPointService;
@Autowired
private ResconQuestionTagManage questionTagManage;
@Autowired
private ResconQuestionTagConvert questionTagConvert;
@Autowired
private ResconQuestionCategoryManage questionCategoryManage;
@Autowired
private ResconQuestionCategoryConvert questionCategoryConvert;
@Autowired
private ResconQuestionKnowledgeReviewService questionKnowledgeReviewService;
@Autowired
private ResconQuestionKnowledgeSyncService questionKnowledgeSyncService;
@Autowired
private ResconKnowledgeReviewConvert knowledgeReviewConvert;
@Autowired
private ResconKnowledgeSyncConvert knowledgeSyncConvert;
@Override
protected Long getId(Question s) {
return s.getId();
}
private String delPContent(String str) {
if (StringUtils.isNotBlank(str) && str.indexOf("<p>") == 0 && str.lastIndexOf("</p>") == str.length() - 4) {
// 去除首尾的P标签
str = str.substring(3, str.length() - 4);
}
return str;
}
@Override
protected Question internalGet(Long id) {
return questionService.get(id);
}
@Override
protected Map<Long, Question> internalMGet(Collection<Long> ids) {
return questionService.mget(ids);
}
public void convert(Question s, VQuestion v) {
v.setId(s.getId());
v.setType(s.getType());
v.setContent(this.delPContent(s.getContent()));
v.setDifficulty(s.getDifficulty());
v.setSource(StringUtils.defaultIfBlank(s.getSource()));
v.setCode(StringUtils.defaultIfBlank(s.getCode()));
v.setAnalysis(this.delPContent(validBlank(s.getAnalysis())));
v.setHint(this.delPContent(validBlank(s.getHint())));
v.setSubFlag(s.isSubFlag());
v.setSequence(s.getSequence() == null ? 1 : s.getSequence());
v.setParentId(s.getParentId() == null ? 0 : s.getParentId());
// 选项
List<String> choices = Lists.newArrayList();
if (StringUtils.isNotBlank(s.getChoiceA())) {
choices.add(this.delPContent(s.getChoiceA()));
}
if (StringUtils.isNotBlank(s.getChoiceB())) {
choices.add(this.delPContent(s.getChoiceB()));
}
if (StringUtils.isNotBlank(s.getChoiceC())) {
choices.add(this.delPContent(s.getChoiceC()));
}
if (StringUtils.isNotBlank(s.getChoiceD())) {
choices.add(this.delPContent(s.getChoiceD()));
}
if (StringUtils.isNotBlank(s.getChoiceE())) {
choices.add(this.delPContent(s.getChoiceE()));
}
if (StringUtils.isNotBlank(s.getChoiceF())) {
choices.add(this.delPContent(s.getChoiceF()));
}
v.setChoices(choices);
v.setAnswerNumber(s.getAnswerNumber());
v.setCreateId(s.getCreateId());
v.setVerifyId(s.getVerifyId());
v.setVerify2Id(s.getVerify2Id());
v.setCreateAt(s.getCreateAt());
v.setUpdateAt(s.getUpdateAt());
v.setVerifyAt(s.getVerifyAt());
v.setVerify2At(s.getVerify2At());
v.setCheckStatus(s.getStatus());
v.setAsciiStatus(s.getAsciiStatus() == null ? AsciiStatus.NOCHECK : s.getAsciiStatus());
v.setNopassContent(s.getNopassContent());
if (StringUtils.isNotBlank(s.getNopassFiles())) {
String[] nopassImages = s.getNopassFiles().split(";");
List<String> urls = new ArrayList<String>(nopassImages.length);
for (String fileId : nopassImages) {
urls.add(FileUtil.getUrl(Long.parseLong(fileId)));
}
v.setNopassImages(urls);
}
if (s.getType() == Question.Type.SINGLE_CHOICE || s.getType() == Question.Type.MULTIPLE_CHOICE) {
if (s.getChoiceFormat() != null) {
v.setChoiceFormat(s.getChoiceFormat());
} else {
// 历史题目默认的排列方式
if (choices.size() < 4) {
v.setChoiceFormat(ChoiceFormat.HORIZONTAL);
} else if (choices.size() == 4) {
v.setChoiceFormat(ChoiceFormat.ABREAST);
} else {
v.setChoiceFormat(ChoiceFormat.VERTICAL);
}
}
}
v.setQuestionSource(s.getQuestionSource() == null ? QuestionSource.VENDOR : s.getQuestionSource());
v.setOpenAnswerFlag(s.getOpenAnswerFlag() == null ? false : s.getOpenAnswerFlag());
if (CollectionUtils.isNotEmpty(s.getCategoryTypes())) {
List<String> ctypes = new ArrayList<String>(s.getCategoryTypes().size());
for (QuestionCategoryType qct : s.getCategoryTypes()) {
ctypes.add(qct.getName());
}
v.setCtypes(ctypes);
}
v.setHasSimilar(s.isHasSimilar() == null ? false : s.isHasSimilar());
v.setSameShow(s.getSameShow());
v.setSameShowId(s.getSameShowId());
v.setCheckRefund(s.isCheckRefund());
}
@Override
protected VQuestion convert(Question s) {
VQuestion v = new VQuestion();
convert(s, v);
return v;
}
@SuppressWarnings("rawtypes")
@Override
protected void initAssemblers(List<ConverterAssembler> assemblers) {
// 教材分类
assemblers.add(new ConverterAssembler<VQuestion, Question, Integer, VTextbookCategory>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Integer getKey(Question s, VQuestion d) {
return s.getTextbookCategoryCode();
}
@Override
public void setValue(Question s, VQuestion d, VTextbookCategory value) {
d.setTextbookCategory(value);
}
@Override
public VTextbookCategory getValue(Integer key) {
return key == null ? null : tcConvert.to(tcService.get(key));
}
@Override
public Map<Integer, VTextbookCategory> mgetValue(Collection<Integer> keys) {
return keys == null ? null : tcConvert.to(tcService.mget(keys));
}
});
// 版本
assemblers.add(new ConverterAssembler<VQuestion, Question, Integer, VTextbook>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Integer getKey(Question s, VQuestion d) {
return s.getTextbookCode();
}
@Override
public void setValue(Question s, VQuestion d, VTextbook value) {
d.setTextbook(value);
}
@Override
public VTextbook getValue(Integer key) {
return key == null ? null : tbConvert.to(tbService.get(key));
}
@Override
public Map<Integer, VTextbook> mgetValue(Collection<Integer> keys) {
return keys == null ? null : tbConvert.to(tbService.mget(keys));
}
});
// 章节
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, VSection>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getSectionCode();
}
@Override
public void setValue(Question s, VQuestion d, VSection value) {
d.setSection(value);
}
@Override
public VSection getValue(Long key) {
return key == null ? null : sectionConvert.to(sectionService.get(key));
}
@Override
public Map<Long, VSection> mgetValue(Collection<Long> keys) {
return keys == null ? null : sectionConvert.to(sectionService.mget(keys));
}
});
// 知识点
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VMetaKnowpoint>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VMetaKnowpoint> value) {
d.setMetaKnowpoints(value);
}
@Override
public List<VMetaKnowpoint> getValue(Long key) {
return mkConvert.to(questionMetaKnowService.listByQuestion(key));
}
@Override
public Map<Long, List<VMetaKnowpoint>> mgetValue(Collection<Long> keys) {
Map<Long, List<VMetaKnowpoint>> rmap = new HashMap<Long, List<VMetaKnowpoint>>();
Map<Long, List<MetaKnowpoint>> map = questionMetaKnowService.mListByQuestions(keys);
for (Entry<Long, List<MetaKnowpoint>> entry : map.entrySet()) {
rmap.put(entry.getKey(), mkConvert.to(entry.getValue()));
}
return rmap;
}
});
// 新知识点
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VKnowledgePoint>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VKnowledgePoint> value) {
d.setKnowledgePoints(value);
}
@Override
public List<VKnowledgePoint> getValue(Long key) {
return knowledgePointConvert.to(questionKnowledgeService.listByQuestion(key));
}
@Override
public Map<Long, List<VKnowledgePoint>> mgetValue(Collection<Long> keys) {
Map<Long, List<VKnowledgePoint>> rmap = new HashMap<Long, List<VKnowledgePoint>>();
Map<Long, List<KnowledgePoint>> map = questionKnowledgeService.mListByQuestions(keys);
for (Entry<Long, List<KnowledgePoint>> entry : map.entrySet()) {
rmap.put(entry.getKey(), knowledgePointConvert.to(entry.getValue()));
}
return rmap;
}
});
// 考点
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VResconExaminationPoint>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VResconExaminationPoint> value) {
d.setExaminationPoints(value);
}
@Override
public List<VResconExaminationPoint> getValue(Long key) {
return examinationPointConvert.to(questionExaminationPointService.listByQuestion(key));
}
@Override
public Map<Long, List<VResconExaminationPoint>> mgetValue(Collection<Long> keys) {
Map<Long, List<VResconExaminationPoint>> rmap = new HashMap<Long, List<VResconExaminationPoint>>();
Map<Long, List<ExaminationPoint>> map = questionExaminationPointService.mListByQuestions(keys);
for (Entry<Long, List<ExaminationPoint>> entry : map.entrySet()) {
rmap.put(entry.getKey(), examinationPointConvert.to(entry.getValue()));
}
return rmap;
}
});
// 阶段
assemblers.add(new ConverterAssembler<VQuestion, Question, Integer, VPhase>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Integer getKey(Question s, VQuestion d) {
return s.getPhaseCode();
}
@Override
public void setValue(Question s, VQuestion d, VPhase value) {
d.setPhase(value);
}
@Override
public VPhase getValue(Integer key) {
return phaseConvert.to(phaseService.get(key));
}
@Override
public Map<Integer, VPhase> mgetValue(Collection<Integer> keys) {
return phaseConvert.to(phaseService.mget(keys));
}
});
// 学科
assemblers.add(new ConverterAssembler<VQuestion, Question, Integer, VSubject>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Integer getKey(Question s, VQuestion d) {
return s.getSubjectCode();
}
@Override
public void setValue(Question s, VQuestion d, VSubject value) {
d.setSubject(value);
}
@Override
public VSubject getValue(Integer key) {
return subjectConvert.to(subjectService.get(key));
}
@Override
public Map<Integer, VSubject> mgetValue(Collection<Integer> keys) {
return subjectConvert.to(subjectService.mget(keys));
}
});
// 题目类型
assemblers.add(new ConverterAssembler<VQuestion, Question, Integer, QuestionType>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Integer getKey(Question s, VQuestion d) {
return s.getTypeCode();
}
@Override
public void setValue(Question s, VQuestion d, QuestionType value) {
d.setQuestionType(questionTypeConvert.to(value));
}
@Override
public QuestionType getValue(Integer key) {
if (key == null) {
return null;
}
return questionTypeService.get(key);
}
@Override
public Map<Integer, QuestionType> mgetValue(Collection<Integer> keys) {
if (keys.isEmpty()) {
return Maps.newHashMap();
}
return questionTypeService.mget(keys);
}
});
// 子题
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<Question>>() {
@Override
public boolean accept(Question s) {
return s.getType() == Question.Type.COMPOSITE;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
if (s.isSubFlag()) {
return null;
}
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<Question> value) {
if (CollectionUtils.isNotEmpty(value)) {
for (Question q : value) {
q.setInitSub(s.isInitSub());
q.setAnalysis(s.isAnalysis());
q.setAnswer(s.isAnswer());
q.setStudentHomeworkId(s.getStudentHomeworkId());
}
d.setChildren(to(value));
}
}
@Override
public List<Question> getValue(Long key) {
if (key == null) {
return null;
}
return questionService.getSubQuestions(key);
}
@Override
public Map<Long, List<Question>> mgetValue(Collection<Long> keys) {
if (CollectionUtils.isEmpty(keys)) {
return Collections.EMPTY_MAP;
}
Map<Long, List<Question>> map = questionService.mgetSubQuestions(keys);
return map;
}
});
// 答案
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<Answer>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<Answer> value) {
if (CollectionUtils.isNotEmpty(value)) {
d.setAnswers(answerConvert.to(value));
}
}
@Override
public List<Answer> getValue(Long key) {
if (key == null) {
return null;
}
return answerService.getQuestionAnswers(key);
}
@Override
public Map<Long, List<Answer>> mgetValue(Collection<Long> keys) {
if (CollectionUtils.isEmpty(keys)) {
return Maps.newHashMap();
}
return answerService.getQuestionAnswers(keys);
}
});
// 学校
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, VSchool>() {
@Override
public boolean accept(Question s) {
return s.getSchoolId() != 0;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getSchoolId();
}
@Override
public void setValue(Question s, VQuestion d, VSchool value) {
d.setSchool(value);
}
@Override
public VSchool getValue(Long key) {
return key == null ? null : schoolConvert.to(schoolService.get(key));
}
@Override
public Map<Long, VSchool> mgetValue(Collection<Long> keys) {
if (CollectionUtils.isEmpty(keys)) {
return null;
}
return schoolConvert.to(schoolService.mget(keys));
}
});
// 题目分类
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VQuestionCategory>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VQuestionCategory> questionCategorys) {
d.setQuestionCategorys(questionCategorys);
}
@Override
public List<VQuestionCategory> getValue(Long key) {
if (key == null) {
return null;
}
return questionCategoryConvert.to(questionCategoryManage.listByQuestion(key));
}
@Override
public Map<Long, List<VQuestionCategory>> mgetValue(Collection<Long> keys) {
if (keys.isEmpty()) {
return Maps.newHashMap();
}
Map<Long, List<QuestionCategory>> categoryMap = questionCategoryManage.mgetByQuestions(keys);
Map<Long, List<VQuestionCategory>> vCategoryMap = new HashMap<Long, List<VQuestionCategory>>(
categoryMap.size());
for (Entry<Long, List<QuestionCategory>> entry : categoryMap.entrySet()) {
vCategoryMap.put(entry.getKey(), questionCategoryConvert.to(entry.getValue()));
}
return vCategoryMap;
}
});
// 题目标签
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VQuestionTag>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VQuestionTag> questionTags) {
d.setQuestionTags(questionTags);
}
@Override
public List<VQuestionTag> getValue(Long key) {
if (key == null) {
return null;
}
List<Question2Tag> questionTags = questionTagManage.listByQuestion(key);
if (CollectionUtils.isEmpty(questionTags)) {
return null;
}
Set<Long> tagCodes = new HashSet<Long>(questionTags.size());
for (Question2Tag question2Tag : questionTags) {
tagCodes.add(question2Tag.getTagCode());
}
Map<Long, VQuestionTag> questionTagMap = questionTagConvert.to(questionTagManage.mget(tagCodes));
List<VQuestionTag> result = new ArrayList<VQuestionTag>(questionTags.size());
for (Question2Tag question2Tag : questionTags) {
VQuestionTag vQuestionTag = questionTagMap.get(question2Tag.getTagCode());
vQuestionTag.setSystem(question2Tag.getSystem());
result.add(vQuestionTag);
}
return result;
}
@Override
public Map<Long, List<VQuestionTag>> mgetValue(Collection<Long> keys) {
if (keys.isEmpty()) {
return Maps.newHashMap();
}
Map<Long, List<Question2Tag>> tag2Map = questionTagManage.mgetByQuestions(keys);
Set<Long> tagCodes = new HashSet<Long>();
for (Entry<Long, List<Question2Tag>> entry : tag2Map.entrySet()) {
if (entry.getValue() != null) {
for (Question2Tag question2Tag : entry.getValue()) {
tagCodes.add(question2Tag.getTagCode());
}
}
}
Map<Long, VQuestionTag> questionTagMap = questionTagConvert.to(questionTagManage.mget(tagCodes));
// 拼装
Map<Long, List<VQuestionTag>> result = new HashMap<Long, List<VQuestionTag>>(tag2Map.size());
for (Entry<Long, List<Question2Tag>> entry : tag2Map.entrySet()) {
if (entry.getValue() != null) {
List<VQuestionTag> vQuestionTags = new ArrayList<VQuestionTag>(entry.getValue().size());
for (Question2Tag question2Tag : entry.getValue()) {
VQuestionTag vQuestionTag = questionTagMap.get(question2Tag.getTagCode());
vQuestionTag.setSystem(question2Tag.getSystem());
vQuestionTags.add(vQuestionTag);
}
result.put(entry.getKey(), vQuestionTags);
}
}
return result;
}
});
// 知识点V3-同步知识点
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VResconKnowledgeSync>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VResconKnowledgeSync> value) {
d.setKnowledgeSyncs(value);
}
@Override
public List<VResconKnowledgeSync> getValue(Long key) {
return knowledgeSyncConvert.to(questionKnowledgeSyncService.listByQuestion(key));
}
@Override
public Map<Long, List<VResconKnowledgeSync>> mgetValue(Collection<Long> keys) {
Map<Long, List<VResconKnowledgeSync>> rmap = new HashMap<Long, List<VResconKnowledgeSync>>();
Map<Long, List<KnowledgeSync>> map = questionKnowledgeSyncService.mListByQuestions(keys);
for (Entry<Long, List<KnowledgeSync>> entry : map.entrySet()) {
rmap.put(entry.getKey(), knowledgeSyncConvert.to(entry.getValue()));
}
return rmap;
}
});
// 知识点V3-复习知识点
assemblers.add(new ConverterAssembler<VQuestion, Question, Long, List<VResconKnowledgeReview>>() {
@Override
public boolean accept(Question s) {
return true;
}
@Override
public boolean accept(Map<String, Object> hints) {
return true;
}
@Override
public Long getKey(Question s, VQuestion d) {
return s.getId();
}
@Override
public void setValue(Question s, VQuestion d, List<VResconKnowledgeReview> value) {
d.setKnowledgeReviews(value);
}
@Override
public List<VResconKnowledgeReview> getValue(Long key) {
return knowledgeReviewConvert.to(questionKnowledgeReviewService.listByQuestion(key));
}
@Override
public Map<Long, List<VResconKnowledgeReview>> mgetValue(Collection<Long> keys) {
Map<Long, List<VResconKnowledgeReview>> rmap = new HashMap<Long, List<VResconKnowledgeReview>>();
Map<Long, List<KnowledgeReview>> map = questionKnowledgeReviewService.mListByQuestions(keys);
for (Entry<Long, List<KnowledgeReview>> entry : map.entrySet()) {
rmap.put(entry.getKey(), knowledgeReviewConvert.to(entry.getValue()));
}
return rmap;
}
});
}
}
|
chapmj/SpaceGame | src/main/java/gamecontroller/command/GameUpdateStateChangeCommand.java | package gamecontroller.command;
import gamecontroller.gamesystem.GameSystemMgr;
import gamecontroller.gamesystem._GameSystemState;
import gamecontroller.gametime.GTLogger;
public class GameUpdateStateChangeCommand implements _ICommand {
private _GameSystemState nextGameSystemState;
public GameUpdateStateChangeCommand(_GameSystemState nextGameSystemState) {
this.nextGameSystemState = nextGameSystemState;
}
@Override
public void execute() {
var gameSystem = GameSystemMgr.getInstance().get(GameSystemMgr.GameSystemName.MAIN_GAME);
gameSystem.setState(nextGameSystemState);
GTLogger.log(nextGameSystemState.toString());
}
}
|
davebarnes97/geode | geode-core/src/distributedTest/java/org/apache/geode/internal/cache/ha/HARQueueNewImplDUnitTest.java | <gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.ha;
import static org.apache.geode.cache.Region.Entry;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.distributed.ConfigurationProperties.DELTA_PROPAGATION;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.internal.cache.CacheServerImpl.generateNameForClientMsgsRegion;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.apache.geode.test.dunit.NetworkUtils.getServerHostName;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.GemFireException;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.DiskStoreFactory;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.cache30.ClientServerTestCase;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.AvailablePort;
import org.apache.geode.internal.cache.CacheServerImpl;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil;
import org.apache.geode.internal.cache.tier.sockets.ClientUpdateMessage;
import org.apache.geode.internal.cache.tier.sockets.ConflationDUnitTestHelper;
import org.apache.geode.internal.cache.tier.sockets.HAEventWrapper;
import org.apache.geode.logging.internal.log4j.api.LogService;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.SerializableRunnableIF;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.dunit.rules.DistributedRule;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
/**
* This DUnit contains various tests to ensure new implementation of ha region queues works as
* expected.
*
* @since GemFire 5.7
*/
@Category({ClientSubscriptionTest.class})
public class HARQueueNewImplDUnitTest extends JUnit4DistributedTestCase {
private static final String regionName = HARQueueNewImplDUnitTest.class.getSimpleName();
private static final Map<Object, Object> map = new HashMap<>();
private static Cache cache = null;
private static VM serverVM0 = null;
private static VM serverVM1 = null;
private static VM clientVM1 = null;
private static VM clientVM2 = null;
private static final Logger logger = LogService.getLogger();
private static int numOfCreates = 0;
private static int numOfUpdates = 0;
private static int numOfInvalidates = 0;
private static Object[] deletedValues = null;
private int PORT1;
private int PORT2;
@Rule
public DistributedRule distributedRule = new DistributedRule();
/**
* Sets up the test.
*/
@Before
public void setUp() {
map.clear();
serverVM0 = VM.getVM(0);
serverVM1 = VM.getVM(1);
clientVM1 = VM.getVM(2);
clientVM2 = VM.getVM(3);
PORT1 = serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.createServerCache(HARegionQueue.HA_EVICTION_POLICY_MEMORY));
PORT2 = serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.createServerCache(HARegionQueue.HA_EVICTION_POLICY_ENTRY));
numOfCreates = 0;
numOfUpdates = 0;
numOfInvalidates = 0;
clientVM1.invoke(() -> {
numOfCreates = 0;
numOfUpdates = 0;
numOfInvalidates = 0;
});
}
/**
* Tears down the test.
*/
@After
public void tearDown() {
map.clear();
closeCache();
clientVM1.invoke(HARQueueNewImplDUnitTest::closeCache);
clientVM2.invoke(HARQueueNewImplDUnitTest::closeCache);
// Unset the isSlowStartForTesting flag
serverVM0.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
serverVM1.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
// then close the servers
serverVM0.invoke(HARQueueNewImplDUnitTest::closeCache);
serverVM1.invoke(HARQueueNewImplDUnitTest::closeCache);
disconnectAllFromDS();
}
private void createCache(Properties props) throws Exception {
props.setProperty(DELTA_PROPAGATION, "false");
DistributedSystem ds = getSystem(props);
ds.disconnect();
ds = getSystem(props);
assertThat(ds).isNotNull();
cache = CacheFactory.create(ds);
assertThat(cache).isNotNull();
}
public static Integer createServerCache() throws Exception {
return createServerCache(null);
}
public static Integer createServerCache(String ePolicy) throws Exception {
return createServerCache(ePolicy, 1);
}
public static Integer createServerCache(String ePolicy, Integer cap) throws Exception {
new HARQueueNewImplDUnitTest().createCache(new Properties());
RegionFactory<Object, Object> factory = cache.createRegionFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
factory.create(regionName);
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
CacheServer server1 = cache.addCacheServer();
server1.setPort(port);
if (ePolicy != null) {
File overflowDirectory = new File("bsi_overflow_" + port);
overflowDirectory.mkdir();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
File[] dirs1 = new File[] {overflowDirectory};
server1.getClientSubscriptionConfig().setEvictionPolicy(ePolicy);
server1.getClientSubscriptionConfig().setCapacity(cap);
// specify disk store for this server
server1.getClientSubscriptionConfig()
.setDiskStoreName(dsf.setDiskDirs(dirs1).create("bsi").getName());
}
server1.start();
return server1.getPort();
}
private static Integer createOneMoreBridgeServer(Boolean notifyBySubscription) throws Exception {
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
CacheServer server1 = cache.addCacheServer();
server1.setPort(port);
server1.setNotifyBySubscription(notifyBySubscription);
server1.getClientSubscriptionConfig()
.setEvictionPolicy(HARegionQueue.HA_EVICTION_POLICY_MEMORY);
// let this server to use default disk store
server1.start();
return server1.getPort();
}
public static void createClientCache(String host, Integer port1, Integer port2, String rLevel,
Boolean addListener) throws Exception {
CacheServerTestUtil.disableShufflingOfEndpoints();
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
new HARQueueNewImplDUnitTest().createCache(props);
AttributesFactory<Object, Object> factory = new AttributesFactory<>();
ClientServerTestCase
.configureConnectionPool(factory, host, port1, port2, true,
Integer.parseInt(rLevel),
2, null, 1000, 250,
-2/* lifetimeTimeout */);
factory.setScope(Scope.LOCAL);
if (addListener) {
factory.addCacheListener(new CacheListenerAdapter<Object, Object>() {
@Override
public void afterInvalidate(EntryEvent event) {
logger.debug("Invalidate Event: <" + event.getKey() + ", " + event.getNewValue() + ">");
numOfInvalidates++;
}
@Override
public void afterCreate(EntryEvent event) {
logger.debug("Create Event: <" + event.getKey() + ", " + event.getNewValue() + ">");
numOfCreates++;
}
@Override
public void afterUpdate(EntryEvent event) {
logger.debug("Update Event: <" + event.getKey() + ", " + event.getNewValue() + ">");
numOfUpdates++;
}
});
}
RegionAttributes<Object, Object> attrs = factory.create();
cache.createRegion(regionName, attrs);
}
public static void createClientCache(String host, Integer port1, Integer port2, String rLevel)
throws Exception {
createClientCache(host, port1, port2, rLevel, Boolean.FALSE);
}
private static void registerInterestListAll() {
try {
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
region.registerInterest("ALL_KEYS");
} catch (GemFireException ex) {
fail("failed in registerInterestListAll", ex);
}
}
private static void registerInterestList() {
try {
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
region.registerInterest("k1");
region.registerInterest("k3");
region.registerInterest("k5");
} catch (GemFireException ex) {
fail("failed while registering keys", ex);
}
}
private static void putEntries() {
try {
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
region.put("k1", "pv1");
region.put("k2", "pv2");
region.put("k3", "pv3");
region.put("k4", "pv4");
region.put("k5", "pv5");
} catch (GemFireException ex) {
fail("failed in putEntries()", ex);
}
}
public static void createEntries() {
try {
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
region.create("k1", "v1");
region.create("k2", "v2");
region.create("k3", "v3");
region.create("k4", "v4");
region.create("k5", "v5");
} catch (GemFireException ex) {
fail("failed in createEntries()", ex);
}
}
public static void createEntries(Long num) {
try {
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
for (long i = 0; i < num; i++) {
region.create("k" + i, "v" + i);
}
} catch (GemFireException ex) {
fail("failed in createEntries(Long)", ex);
}
}
private static void putHeavyEntries(Integer num) {
try {
byte[] val;
Region<Object, Object> region = cache.getRegion("/" + regionName);
assertThat(region).isNotNull();
for (long i = 0; i < num; i++) {
val = new byte[1024 * 1024 * 5]; // 5 MB
region.put("k0", val);
}
} catch (GemFireException ex) {
fail("failed in putHeavyEntries(Long)", ex);
}
}
/**
* This test verifies that the client-messages-region does not store duplicate
* ClientUpdateMessageImpl instances, during a normal put path as well as the GII path.
*/
@Test
public void testClientMsgsRegionSize() throws Exception {
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
serverVM1.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
}
/**
* This test verifies that the ha-region-queues increment the reference count of their respective
* HAEventWrapper instances in the client-messages-region correctly, during put as well as GII
* path.
*/
@Test
public void testRefCountForNormalAndGIIPut() throws Exception {
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("240000"));
serverVM1.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("240000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
serverVM1.invoke(() -> ValidateRegionSizes(PORT2));
serverVM0.invoke(() -> ValidateRegionSizes(PORT1));
serverVM0.invoke(HARQueueNewImplDUnitTest::updateMapForVM0);
serverVM1.invoke(HARQueueNewImplDUnitTest::updateMapForVM1);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT2));
}
private void ValidateRegionSizes(int port) {
await().untilAsserted(() -> {
Region region = cache.getRegion("/" + regionName);
Region<Object, Object> msgsRegion =
cache.getRegion(CacheServerImpl.generateNameForClientMsgsRegion(port));
int clientMsgRegionSize = msgsRegion.size();
int regionSize = region.size();
assertThat(((5 == clientMsgRegionSize) && (5 == regionSize))).describedAs(
"Region sizes were not as expected after 60 seconds elapsed. Actual region size = "
+ regionSize + "Actual client msg region size = " + clientMsgRegionSize)
.isTrue();
});
}
/**
* This test verifies that the ha-region-queues decrement the reference count of their respective
* HAEventWrapper instances in the client-messages-region correctly, after the events have been
* peeked and removed from the queue.
*/
@Test
public void testRefCountForPeekAndRemove() throws Exception {
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
serverVM0.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest
.waitTillMessagesAreDispatched(PORT1));
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 0));
}
/**
* This test verifies that the processing of the QRM messages results in decrementing the
* reference count of corresponding HAEventWrapper instances, correctly.
*/
@Test
public void testRefCountForQRM() throws Exception {
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
serverVM0.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 0));
}
/**
* This test verifies that the destruction of a ha-region (caused by proxy/client disconnect),
* causes the reference count of all HAEventWrapper instances belonging to the ha-region-queue to
* be decremented by one, and makes it visible to the client-messages-region.
*/
@Test
public void testRefCountForDestroy() throws Exception {
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
serverVM1.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
// 1. stop the second server
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
// 3. start the second server.
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
Thread.sleep(3000);
clientVM1.invoke(HARQueueNewImplDUnitTest::closeCache);
Thread.sleep(1000);
serverVM0.invoke(HARQueueNewImplDUnitTest::updateMap1);
serverVM1.invoke(HARQueueNewImplDUnitTest::updateMap1);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT2));
clientVM2.invoke(HARQueueNewImplDUnitTest::closeCache);
serverVM0.invoke(HARQueueNewImplDUnitTest::updateMap2);
serverVM1.invoke(HARQueueNewImplDUnitTest::updateMap2);
Thread.sleep(1000);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyQueueData(
PORT2));
}
/**
* Addresses the bug 39179. If a clientUpdateMessage is dispatched to the client while its GII was
* under way, then it should not be put into the HARegionQueue of a client at receiving server
* side.
*/
@Test
public void testConcurrentGIIAndDispatch() throws Exception {
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("40000"));
serverVM1.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("40000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestListAll);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestListAll);
// 1. stop the second server
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest
.makeValuesOfSomeKeysNullInClientMsgsRegion(PORT1, new String[] {"k1", "k3"}));
// 3. start the second server.
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 3));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyNullValuesInCMR(
PORT2, new String[] {"k1", "k3"}));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 3));
serverVM0.invoke(() -> HARQueueNewImplDUnitTest
.populateValuesOfSomeKeysInClientMsgsRegion(PORT1, new String[] {"k1", "k3"}));
serverVM0.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
serverVM1.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
}
/**
* This test verifies that when two BridgeServerImpl instances are created in a single VM, they do
* share the client-messages-region.
*/
@Test
public void testTwoBridgeServersInOneVMDoShareCMR() throws Exception {
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
Integer port3 = serverVM0
.invoke(() -> HARQueueNewImplDUnitTest.createOneMoreBridgeServer(Boolean.TRUE));
createClientCache(getServerHostName(), PORT1, port3, "0");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.verifyRegionSize(5, 5));
}
/**
* This test verifies that two clients, connected to two cache servers with different
* notifyBySubscription values, on a single VM, receive updates/invalidates depending upon their
* notifyBySubscription value.
*/
@Test
public void testUpdatesWithTwoBridgeServersInOneVM() throws Exception {
Integer port3 = serverVM0
.invoke(() -> HARQueueNewImplDUnitTest.createOneMoreBridgeServer(Boolean.FALSE));
createClientCache(getServerHostName(), PORT1, PORT2,
"1", Boolean.TRUE);
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host, port3,
PORT2, "1", Boolean.TRUE));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestListAll);
clientVM1.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM0.invoke(HARQueueNewImplDUnitTest::putEntries);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.waitTillMessagesAreDispatched(PORT1));
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.waitTillMessagesAreDispatched(port3));
// expect updates
verifyUpdatesReceived(true);
// expect invalidates
clientVM1.invoke(() -> verifyUpdatesReceived(false));
}
/**
* This test verifies that the HAEventWrapper instances present in the client-messages-region give
* up the references to their respective ClientUpdateMessageImpl instances.
*/
@Test
public void testHAEventWrapperDoesNotHoldCUMOnceInsideCMR() throws Exception {
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM1.invoke(HARQueueNewImplDUnitTest::stopServer);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.createEntries(1000L));
serverVM1.invoke(HARQueueNewImplDUnitTest::startServer);
Thread.sleep(2000);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.verifyNullCUMReference(PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.verifyNullCUMReference(PORT2));
}
/**
* This test verifies that client-messages-regions are not created for the cache servers who have
* eviction policy as 'none'. Instead, such cache servers will have simple HashMap structures.
* Also, it verifies that such a structure (referred to as haContainer, in general) is destroyed
* when its cache server is stopped.
*/
@Test
public void testCMRNotCreatedForNoneEvictionPolicy() throws Exception {
serverVM0.invoke(HARQueueNewImplDUnitTest::closeCache);
serverVM1.invoke(HARQueueNewImplDUnitTest::closeCache);
Thread.sleep(2000);
PORT1 = serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.createServerCache(HARegionQueue.HA_EVICTION_POLICY_NONE));
PORT2 = serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.createServerCache(HARegionQueue.HA_EVICTION_POLICY_NONE));
Boolean isRegion = Boolean.FALSE;
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM0
.invoke(() -> HARQueueNewImplDUnitTest.verifyHaContainerType(isRegion, PORT1));
serverVM1
.invoke(() -> HARQueueNewImplDUnitTest.verifyHaContainerType(isRegion, PORT2));
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.stopOneBridgeServer(PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.stopOneBridgeServer(PORT2));
serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.verifyHaContainerDestroyed(isRegion, PORT1));
serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.verifyHaContainerDestroyed(isRegion, PORT2));
}
/**
* This test verifies that client-messages-regions are created for the cache servers who have
* eviction policy either as 'mem' or as 'entry'. Also, it verifies that such a
* client-messages-region is destroyed when its cache server is stopped.
*/
@Test
public void testCMRCreatedForMemOrEntryEvictionPolicy() throws Exception {
Boolean isRegion = Boolean.TRUE;
// slow start for dispatcher
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("30000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM0
.invoke(() -> HARQueueNewImplDUnitTest.verifyHaContainerType(isRegion, PORT1));
serverVM1
.invoke(() -> HARQueueNewImplDUnitTest.verifyHaContainerType(isRegion, PORT2));
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.stopOneBridgeServer(PORT1));
serverVM1.invoke(() -> HARQueueNewImplDUnitTest.stopOneBridgeServer(PORT2));
serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.verifyHaContainerDestroyed(isRegion, PORT1));
serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.verifyHaContainerDestroyed(isRegion, PORT2));
}
/**
* This test verifies that the Cache.rootRegions() method does not return the
* client-messages-region of any of the cache's attached cache servers.
*/
@Test
public void testCMRNotReturnedByRootRegionsMethod() throws Exception {
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestList);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestList);
serverVM0.invoke((SerializableRunnableIF) HARQueueNewImplDUnitTest::createEntries);
serverVM0.invoke(
() -> HARQueueNewImplDUnitTest.verifyRootRegionsDoesNotReturnCMR(PORT1));
serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.verifyRootRegionsDoesNotReturnCMR(PORT2));
}
/**
* This test verifies that the memory footprint of the ha region queues is less when ha-overflow
* is enabled (with an appropriate value of haCapacity) compared to when it is disabled, for the
* same amount of data feed.
*/
@Ignore("TODO")
@Test
public void testMemoryFootprintOfHARegionQueuesWithAndWithoutOverflow() throws Exception {
serverVM0.invoke(HARQueueNewImplDUnitTest::closeCache);
serverVM1.invoke(HARQueueNewImplDUnitTest::closeCache);
Thread.sleep(2000);
Integer numOfEntries = 30;
PORT1 = serverVM0.invoke(() -> HARQueueNewImplDUnitTest
.createServerCache(HARegionQueue.HA_EVICTION_POLICY_MEMORY, 30));
PORT2 = serverVM1.invoke(
() -> HARQueueNewImplDUnitTest.createServerCache(HARegionQueue.HA_EVICTION_POLICY_NONE));
serverVM0.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("60000"));
serverVM1.invoke(() -> ConflationDUnitTestHelper.setIsSlowStart("60000"));
createClientCache(getServerHostName(), PORT1, PORT2,
"1");
final String client1Host = getServerHostName();
clientVM1.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client1Host,
PORT1, PORT2, "1"));
final String client2Host = getServerHostName();
clientVM2.invoke(() -> HARQueueNewImplDUnitTest.createClientCache(client2Host,
PORT1, PORT2, "1"));
registerInterestListAll();
clientVM1.invoke(HARQueueNewImplDUnitTest::registerInterestListAll);
clientVM2.invoke(HARQueueNewImplDUnitTest::registerInterestListAll);
serverVM0.invoke(() -> HARQueueNewImplDUnitTest.putHeavyEntries(numOfEntries));
Long usedMemInVM0 = serverVM0.invoke(() -> HARQueueNewImplDUnitTest
.getUsedMemoryAndVerifyRegionSize(numOfEntries, PORT1));
Long usedMemInVM1 = serverVM1.invoke(() -> HARQueueNewImplDUnitTest
.getUsedMemoryAndVerifyRegionSize(numOfEntries, -1));
serverVM0.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
serverVM1.invoke(ConflationDUnitTestHelper::unsetIsSlowStart);
logger.debug("Used Mem: " + usedMemInVM1 + "(without overflow), "
+ usedMemInVM0 + "(with overflow)");
assertThat(usedMemInVM0 < usedMemInVM1).isTrue();
}
private static void verifyNullCUMReference(Integer port) {
Region<Object, Object> region =
cache.getRegion("/" + CacheServerImpl.generateNameForClientMsgsRegion(port));
assertThat(region).isNotNull();
Object[] arr = region.keySet().toArray();
for (Object o : arr) {
assertThat(((HAEventWrapper) o).getClientUpdateMessage()).isNull();
}
}
private static void verifyHaContainerDestroyed(Boolean isRegion, Integer port) {
Map region = cache.getRegion("/" + CacheServerImpl.generateNameForClientMsgsRegion(port));
if (isRegion) {
if (region != null) {
assertThat(((Region) region).isDestroyed()).isTrue();
}
} else {
region = ((CacheServerImpl) cache.getCacheServers().toArray()[0]).getAcceptor()
.getCacheClientNotifier().getHaContainer();
if (region != null) {
assertThat(region.isEmpty()).isTrue();
}
}
}
private static Long getUsedMemoryAndVerifyRegionSize(Integer haContainerSize,
Integer port) {
Long retVal = null;
try {
retVal = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
if (port != -1) {
verifyRegionSize(1, haContainerSize);
} else {
verifyRegionSize(haContainerSize);
}
} catch (GemFireException e) {
fail("failed in getUsedMemory()" + e);
}
return retVal;
}
private static void stopOneBridgeServer(Integer port) {
try {
Iterator iterator = cache.getCacheServers().iterator();
if (iterator.hasNext()) {
CacheServer server = (CacheServer) iterator.next();
if (server.getPort() == port) {
server.stop();
}
}
} catch (GemFireException e) {
fail("failed in stopOneBridgeServer()" + e);
}
}
public static void stopServer() {
try {
Iterator iterator = cache.getCacheServers().iterator();
if (iterator.hasNext()) {
CacheServer server = (CacheServer) iterator.next();
server.stop();
}
} catch (GemFireException e) {
fail("failed in stopServer()" + e);
}
}
private static void updateMapForVM0() {
try {
map.put("k1", 3L);
map.put("k2", 1L);
map.put("k3", 3L);
map.put("k4", 1L);
map.put("k5", 3L);
} catch (GemFireException e) {
fail("failed in updateMapForVM0()" + e);
}
}
private static void updateMap1() {
try {
map.put("k1", 2L);
map.put("k2", 1L);
map.put("k3", 2L);
map.put("k4", 1L);
map.put("k5", 2L);
} catch (GemFireException e) {
fail("failed in updateMap1()" + e);
}
}
private static void updateMap2() {
try {
map.put("k1", 1L);
map.put("k2", 1L);
map.put("k3", 1L);
map.put("k4", 1L);
map.put("k5", 1L);
} catch (GemFireException e) {
fail("failed in updateMap2()" + e);
}
}
private static void updateMapForVM1() {
try {
updateMapForVM0();
} catch (GemFireException e) {
fail("failed in updateMapForVM1()" + e);
}
}
private static void verifyNullValuesInCMR(final Integer port,
String[] keys) {
final Region<Object, Object> msgsRegion =
cache.getRegion(generateNameForClientMsgsRegion(port));
GeodeAwaitility.await().until(() -> msgsRegion.size() == 3);
Set entries = msgsRegion.entrySet();
Iterator iterator = entries.iterator();
for (; iterator.hasNext();) {
Entry entry = (Entry) iterator.next();
ClientUpdateMessage cum = (ClientUpdateMessage) entry.getValue();
for (String key : keys) {
logger.debug("cum.key: " + cum.getKeyToConflate());
// assert that the keys are not present in entries set
assertThat(!key.equals(cum.getKeyToConflate())).isTrue();
}
}
}
private static void makeValuesOfSomeKeysNullInClientMsgsRegion(Integer port, String[] keys) {
Region<Object, Object> msgsRegion =
cache.getRegion(CacheServerImpl.generateNameForClientMsgsRegion(port));
assertThat(msgsRegion).isNotNull();
Set entries = msgsRegion.entrySet();
Iterator iterator = entries.iterator();
deletedValues = new Object[keys.length];
while (iterator.hasNext()) {
Region.Entry entry = (Region.Entry) iterator.next();
ClientUpdateMessage cum = (ClientUpdateMessage) entry.getValue();
for (int i = 0; i < keys.length; i++) {
if (keys[i].equals(cum.getKeyToConflate())) {
logger.debug("HARQueueNewImplDUnit: Removing " + cum.getKeyOfInterest());
deletedValues[i] = msgsRegion.remove(entry.getKey());
}
}
}
}
private static void populateValuesOfSomeKeysInClientMsgsRegion(Integer port, String[] keys) {
Region<Object, Object> msgsRegion =
cache.getRegion(CacheServerImpl.generateNameForClientMsgsRegion(port));
assertThat(msgsRegion).isNotNull();
for (int i = 0; i < keys.length; i++) {
logger.debug("HARQueueNewImplDUnit: populating " + deletedValues[i]);
msgsRegion.put(keys[1], deletedValues[i]);
}
}
public static void startServer() {
try {
Iterator iterator = cache.getCacheServers().iterator();
if (iterator.hasNext()) {
CacheServer server = (CacheServer) iterator.next();
server.start();
}
} catch (GemFireException | IOException e) {
fail("failed in startServer()" + e);
}
}
private static void verifyQueueData(Integer port) {
try {
// Get the clientMessagesRegion and check the size.
Region<Object, Object> msgsRegion =
cache.getRegion(CacheServerImpl.generateNameForClientMsgsRegion(port));
Region region = cache.getRegion("/" + regionName);
logger.debug(
"size<serverRegion, clientMsgsRegion>: " + region.size() + ", " + msgsRegion.size());
assertThat(region.size()).isEqualTo(((Integer) 5).intValue());
assertThat(msgsRegion.size()).isEqualTo(((Integer) 5).intValue());
for (Object o : msgsRegion.entrySet()) {
await().untilAsserted(() -> {
Entry entry = (Entry) o;
HAEventWrapper wrapper = (HAEventWrapper) entry.getKey();
ClientUpdateMessage cum = (ClientUpdateMessage) entry.getValue();
Object key = cum.getKeyOfInterest();
logger.debug("key<feedCount, regionCount>: " + key + "<"
+ map.get(key) + ", " + wrapper.getReferenceCount() + ">");
assertThat(wrapper.getReferenceCount()).isEqualTo(((Long) map.get(key)).longValue());
});
}
} catch (GemFireException e) {
fail("failed in verifyQueueData()" + e);
}
}
private static void verifyRegionSize(final Integer regionSize, final Integer msgsRegionSize) {
GeodeAwaitility.await().until(() -> {
try {
// Get the clientMessagesRegion and check the size.
Region<Object, Object> region = cache.getRegion("/" + regionName);
int sz = region.size();
if (regionSize != sz) {
return false;
}
Iterator iterator = cache.getCacheServers().iterator();
if (iterator.hasNext()) {
CacheServerImpl server = (CacheServerImpl) iterator.next();
Map msgsRegion = server.getAcceptor().getCacheClientNotifier().getHaContainer();
sz = msgsRegion.size();
return msgsRegionSize == sz;
}
return true;
} catch (GemFireException e) {
return false;
}
});
}
private static void verifyRegionSize(final Integer msgsRegionSize) {
GeodeAwaitility.await().until(() -> {
try {
// Get the clientMessagesRegion and check the size.
Region<Object, Object> region = cache.getRegion("/" + regionName);
int sz = region.size();
if (sz != 1) {
return false;
}
Iterator iterator = cache.getCacheServers().iterator();
if (!iterator.hasNext()) {
return true;
}
CacheServerImpl server = (CacheServerImpl) iterator.next();
sz = server.getAcceptor().getCacheClientNotifier().getHaContainer().size();
return sz == msgsRegionSize;
} catch (Exception e) {
return false;
}
});
}
private static void verifyHaContainerType(Boolean isRegion, Integer port) {
try {
Map<Object, Object> haMap =
cache.getRegion(CacheServerImpl.generateNameForClientMsgsRegion(port));
if (isRegion) {
assertThat(haMap).isNotNull();
assertThat(haMap instanceof LocalRegion).isTrue();
haMap = (Map<Object, Object>) ((CacheServerImpl) cache.getCacheServers().toArray()[0])
.getAcceptor()
.getCacheClientNotifier().getHaContainer();
assertThat(haMap).isNotNull();
assertThat(haMap instanceof HAContainerRegion).isTrue();
} else {
assertThat(haMap).isNull();
haMap = (Map<Object, Object>) ((CacheServerImpl) cache.getCacheServers().toArray()[0])
.getAcceptor()
.getCacheClientNotifier().getHaContainer();
assertThat(haMap).isNotNull();
assertThat(haMap instanceof HAContainerMap).isTrue();
}
logger.debug("haContainer: " + haMap);
} catch (GemFireException e) {
fail("failed in verifyHaContainerType()" + e);
}
}
private static void verifyRootRegionsDoesNotReturnCMR(Integer port) {
try {
String cmrName = CacheServerImpl.generateNameForClientMsgsRegion(port);
Map<Object, Object> haMap = cache.getRegion(cmrName);
assertThat(haMap).isNotNull();
String rName;
for (Region<?, ?> region : cache.rootRegions()) {
rName = region.getName();
if (cmrName.equals(rName)) {
throw new AssertionError(
"Cache.rootRegions() method should not return the client_messages_region.");
}
logger.debug("Region name returned from cache.rootRegions(): " + rName);
}
} catch (GemFireException e) {
fail("failed in verifyRootRegionsDoesNotReturnCMR()" + e);
}
}
private static void verifyUpdatesReceived(Boolean isUpdates) {
try {
if (true) {
GeodeAwaitility.await().until(() -> {
logger.info("MLH number of updates = " + numOfUpdates);
return 5 == numOfUpdates;
});
} else {
GeodeAwaitility.await().until(() -> {
logger.info("MLH number of invalidates = " + numOfInvalidates);
return 5 == numOfInvalidates;
});
}
} catch (GemFireException e) {
fail("failed in verifyUpdatesReceived()" + e);
}
}
private static void waitTillMessagesAreDispatched(Integer port) {
try {
Map haContainer;
haContainer = cache.getRegion(
SEPARATOR + generateNameForClientMsgsRegion(port));
if (haContainer == null) {
Object[] servers = cache.getCacheServers().toArray();
for (Object server : servers) {
if (port == ((CacheServerImpl) server).getPort()) {
haContainer = ((CacheServerImpl) server).getAcceptor().getCacheClientNotifier()
.getHaContainer();
break;
}
}
}
final Map m = haContainer;
GeodeAwaitility.await().until(() -> m.size() == 0);
} catch (GemFireException e) {
fail("failed in waitTillMessagesAreDispatched()" + e);
}
}
public static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().getDistributedMember();
}
}
}
|
raymondchen625/grafana | pkg/cmd/grafana-cli/runner/wireexts_oss.go | <reponame>raymondchen625/grafana<filename>pkg/cmd/grafana-cli/runner/wireexts_oss.go
//go:build wireinject && oss
// +build wireinject,oss
package runner
import (
"github.com/google/wire"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/services/encryption"
"github.com/grafana/grafana/pkg/services/encryption/ossencryption"
"github.com/grafana/grafana/pkg/services/kmsproviders"
"github.com/grafana/grafana/pkg/services/kmsproviders/osskmsproviders"
"github.com/grafana/grafana/pkg/services/licensing"
"github.com/grafana/grafana/pkg/services/sqlstore/migrations"
"github.com/grafana/grafana/pkg/setting"
)
var wireExtsSet = wire.NewSet(
wireSet,
migrations.ProvideOSSMigrations,
licensing.ProvideService,
wire.Bind(new(models.Licensing), new(*licensing.OSSLicensingService)),
wire.Bind(new(registry.DatabaseMigrator), new(*migrations.OSSMigrations)),
setting.ProvideProvider,
wire.Bind(new(setting.Provider), new(*setting.OSSImpl)),
osskmsproviders.ProvideService,
wire.Bind(new(kmsproviders.Service), new(osskmsproviders.Service)),
ossencryption.ProvideService,
wire.Bind(new(encryption.Internal), new(*ossencryption.Service)),
)
|
FluffyQuack/ja2-stracciatella | src/game/Tactical/Interface.h | <gh_stars>100-1000
#ifndef _INTERFACE_H
#define _INTERFACE_H
#include "Handle_UI.h"
#include "MouseSystem.h"
#include <string_theory/string>
#define MAX_UICOMPOSITES 4
// FLASH PORTRAIT CODES
#define FLASH_PORTRAIT_STOP 0
#define FLASH_PORTRAIT_START 1
#define FLASH_PORTRAIT_WAITING 2
#define FLASH_PORTRAIT_DELAY 150
// FLASH PORTRAIT PALETTE IDS
#define FLASH_PORTRAIT_NOSHADE 0
#define FLASH_PORTRAIT_STARTSHADE 1
#define FLASH_PORTRAIT_ENDSHADE 2
#define FLASH_PORTRAIT_DARKSHADE 3
#define FLASH_PORTRAIT_GRAYSHADE 4
#define FLASH_PORTRAIT_LITESHADE 5
// GLOBAL DEFINES FOR SOME UI FLAGS
#define ARROWS_HIDE_UP 0x00000002
#define ARROWS_HIDE_DOWN 0x00000004
#define ARROWS_SHOW_UP_BESIDE 0x00000008
#define ARROWS_SHOW_DOWN_BESIDE 0x00000020
#define ARROWS_SHOW_UP_ABOVE_Y 0x00000040
#define ARROWS_SHOW_DOWN_BELOW_Y 0x00000080
#define ARROWS_SHOW_DOWN_BELOW_G 0x00000200
#define ARROWS_SHOW_DOWN_BELOW_YG 0x00000400
#define ARROWS_SHOW_DOWN_BELOW_GG 0x00000800
#define ARROWS_SHOW_UP_ABOVE_G 0x00002000
#define ARROWS_SHOW_UP_ABOVE_YG 0x00004000
#define ARROWS_SHOW_UP_ABOVE_GG 0x00008000
#define ARROWS_SHOW_UP_ABOVE_YY 0x00020000
#define ARROWS_SHOW_DOWN_BELOW_YY 0x00040000
#define ARROWS_SHOW_UP_ABOVE_CLIMB 0x00080000
#define ARROWS_SHOW_UP_ABOVE_CLIMB2 0x00400000
#define ARROWS_SHOW_UP_ABOVE_CLIMB3 0x00800000
#define ARROWS_SHOW_DOWN_CLIMB 0x02000000
#define ROOF_LEVEL_HEIGHT 50
#define SCROLL_LEFT_PADDING -30
#define SCROLL_RIGHT_PADDING 10
#define SCROLL_TOP_PADDING -50
#define SCROLL_BOTTOM_PADDING -20
// Interface level enums
enum
{
I_GROUND_LEVEL,
I_ROOF_LEVEL,
I_NUMLEVELS
};
enum InterfacePanelKind
{
SM_PANEL,
TEAM_PANEL,
NUM_UI_PANELS
};
extern BOOLEAN gfUIStanceDifferent;
extern InterfacePanelKind gsCurInterfacePanel;
extern SGPVObject* guiDEAD;
extern SGPVObject* guiHATCH;
extern SGPVObject* guiRADIO;
extern MOUSE_REGION gViewportRegion;
extern MOUSE_REGION gRadarRegion;
#define MOVEMENT_MENU_LOOK 1
#define MOVEMENT_MENU_ACTIONC 2
#define MOVEMENT_MENU_HAND 3
#define MOVEMENT_MENU_TALK 4
#define MOVEMENT_MENU_RUN 5
#define MOVEMENT_MENU_WALK 6
#define MOVEMENT_MENU_SWAT 7
#define MOVEMENT_MENU_PRONE 8
enum DirtyLevel
{
DIRTYLEVEL0 = 0,
DIRTYLEVEL1 = 1,
DIRTYLEVEL2 = 2
};
void InitializeTacticalInterface(void);
extern DirtyLevel fInterfacePanelDirty;
extern BOOLEAN gfPausedTacticalRenderFlags;
extern DirtyLevel gfPausedTacticalRenderInterfaceFlags;
extern INT16 gsInterfaceLevel;
extern BOOLEAN gfInMovementMenu;
void PopupMovementMenu( UI_EVENT *pUIEvent );
void PopDownMovementMenu(void);
void RenderMovementMenu(void);
void CancelMovementMenu(void);
void PopDownOpenDoorMenu(void);
void RenderOpenDoorMenu(void);
void InitDoorOpenMenu(SOLDIERTYPE* pSoldier, BOOLEAN fClosingDoor);
BOOLEAN HandleOpenDoorMenu(void);
void CancelOpenDoorMenu(void);
void HandleInterfaceBackgrounds(void);
void DrawSelectedUIAboveGuy(SOLDIERTYPE&);
void CreateCurrentTacticalPanelButtons(void);
void RemoveCurrentTacticalPanelButtons(void);
void SetCurrentTacticalPanelCurrentMerc(SOLDIERTYPE* s);
void SetCurrentInterfacePanel(InterfacePanelKind);
BOOLEAN IsMercPortraitVisible(const SOLDIERTYPE* s);
void InitializeCurrentPanel(void);
void ShutdownCurrentPanel(void);
void ClearInterface(void);
void RestoreInterface(void);
void RenderArrows(void);
void EraseRenderArrows(void);
void DirtyMercPanelInterface(SOLDIERTYPE const*, DirtyLevel);
void EndUIMessage(void);
void BeginUIMessage(BOOLEAN fUseSkullIcon, const ST::string& text);
// map screen version, for centering over the map area
void BeginMapUIMessage(INT16 delta_y, const ST::string& text);
extern VIDEO_OVERLAY* g_ui_message_overlay;
extern UINT32 guiUIMessageTime;
enum MESSAGE_TYPES
{
NO_MESSAGE,
COMPUTER_TURN_MESSAGE,
COMPUTER_INTERRUPT_MESSAGE,
PLAYER_INTERRUPT_MESSAGE,
MILITIA_INTERRUPT_MESSAGE,
AIR_RAID_TURN_MESSAGE,
PLAYER_TURN_MESSAGE
};
void HandleTopMessages(void);
void AddTopMessage(MESSAGE_TYPES ubType);
void EndTopMessage(void);
void InitEnemyUIBar( UINT8 ubNumEnemies, UINT8 ubDoneEnemies );
ST::string GetSoldierHealthString(const SOLDIERTYPE* s);
void ResetPhysicsTrajectoryUI(void);
void SetupPhysicsTrajectoryUI(void);
void EndPhysicsTrajectoryUI(void);
void BeginPhysicsTrajectoryUI( INT16 sGridNo, INT8 bLevel, BOOLEAN fBadCTGT );
void InitPlayerUIBar( BOOLEAN fInterrupt );
void ToggleTacticalPanels(void);
void DirtyTopMessage(void);
void BeginMultiPurposeLocator(INT16 sGridNo, INT8 bLevel);
void HandleMultiPurposeLocator(void);
void RenderTopmostMultiPurposeLocator(void);
void GetSoldierAboveGuyPositions(const SOLDIERTYPE* s, INT16* psX, INT16* psY, BOOLEAN fRadio);
void UpdateEnemyUIBar(void);
extern BOOLEAN gfInOpenDoorMenu;
extern UINT32 guiUIMessageTimeDelay;
extern BOOLEAN gfTopMessageDirty;
#endif
|
DanIverson/OpenVnmrJ | src/vnmrj/src/vnmr/jgl/CGLJNI.java | /*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.jgl;
/**
* OpenGL using Java swing and Java JNI
*/
public class CGLJNI implements GLRendererIF{
public long native_obj=0; // storage for *C3DRenderer
static public boolean libary_loaded=false;
private native long CCcreate();
private native void CCdestroy(long cobj);
private native void CCinit(int f);
private native void CCrender(int f);
private native void CCsetOptions(int indx, int i);
private native void CCresize(int w,int h);
private native void CCsetPhase(double r, double l);
private native void CCsetDataScale(double mn, double mx);
private native void CCsetDataPars(int n, int t, int s, int dtype);
private native void CCsetDataMap(String mapfile);
private native void CCsetDataPtr(float[] data);
private native void CCreleaseDataPtr(float d[]);
private native void CCsetColorArray(int id, float[] data, int n);
private native void CCsetScale(double a, double x, double y, double z);
private native void CCsetSpan(double x, double y, double z);
private native void CCsetOffset(double x, double y, double z);
private native void CCsetRotation3D(double x, double y, double z);
private native void CCsetRotation2D(double x, double y);
private native void CCsetObjectRotation(double x, double y, double z);
private native void CCsetTrace(int i,int max, int num);
private native void CCsetSlice(int i, int max, int num);
private native void CCsetStep(int i);
private native void CCsetSlant(double x,double y);
private native void CCsetIntensity(double x);
private native void CCsetBias(double x);
private native void CCsetContrast(double x);
private native void CCsetThreshold(double x);
private native void CCsetContours(double x);
private native void CCsetLimit(double x);
private native void CCsetTransparency(double x);
private native void CCsetAlphaScale(double x);
private native void CCsetSliceVector(double x, double y, double z, double w);
private native void CCrender2DPoint(int pt, int trc, int dtype);
public CGLJNI(){
if(!libary_loaded){
try {
System.loadLibrary("cgl");
libary_loaded=true;
}
catch (Exception e){
return;
}
}
native_obj=CCcreate();
}
public boolean libraryLoaded(){
return libary_loaded;
}
public void destroy() {
CCdestroy(native_obj);
}
public void init(int f) {
CCinit(f);
}
public void setOptions(int indx, int i) {
CCsetOptions(indx, i);
}
public void render(int f) {
CCrender(f);
}
public void resize(int w, int h) {
CCresize(w,h);
}
public void setPhase(double r, double l) {
CCsetPhase(r,l);
}
public void setDataMap(String path){
CCsetDataMap(path);
}
public void setDataPtr(float[] data){
CCsetDataPtr(data);
}
public void setDataPars(int n, int t, int s, int dtype){
CCsetDataPars(n,t,s,dtype);
}
public void releaseDataPtr(float[] data){
CCreleaseDataPtr(data);
}
public void setColorArray(int id, float[] data){
CCsetColorArray(id, data, data.length/4);
}
public void setDataScale(double mn, double mx){
CCsetDataScale(mn,mx);
}
public void setScale(double a,double x, double y, double z){
CCsetScale(a,x,y,z);
}
public void setSpan(double x, double y, double z){
CCsetSpan(x,y,z);
}
public void setOffset(double x, double y, double z){
CCsetOffset(x,y,z);
}
public void setRotation3D(double x, double y, double z){
CCsetRotation3D(x,y,z);
}
public void setRotation2D(double x, double y){
CCsetRotation2D(x,y);
}
public void setObjectRotation(double x, double y, double z){
CCsetObjectRotation(x,y,z);
}
public void setTrace(int i,int max, int num){
CCsetTrace(i,max,num);
}
public void setSlice(int i, int max, int num){
CCsetSlice(i,max,num);
}
public void setStep(int i){
CCsetStep(i);
}
public void setSlant(double x,double y){
CCsetSlant(x,y);
}
public void setIntensity(double x){
CCsetIntensity(x);
}
public void setBias(double x){
CCsetBias(x);
}
public void setContrast(double x){
CCsetContrast(x);
}
public void setThreshold(double x){
CCsetThreshold(x);
}
public void setContours(double x){
CCsetContours(x);
}
public void setLimit(double x){
CCsetLimit(x);
}
public void setTransparency(double x){
CCsetTransparency(x);
}
public void setAlphaScale(double x){
CCsetAlphaScale(x);
}
public void setSliceVector(double x, double y, double z, double w){
CCsetSliceVector(x,y,z,w);
}
public void render2DPoint(int pt, int trc, int dtype){
CCrender2DPoint(pt,trc,dtype);
}
}
|
netarchivesuite/webarchive-commons | src/main/java/org/archive/hadoop/func/TupleFunc.java | package org.archive.hadoop.func;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
public class TupleFunc extends EvalFunc<Tuple> {
protected TupleFactory mTupleFactory = TupleFactory.getInstance();
private ArrayList<Object> mProtoTuple = null;
public TupleFunc() {
mProtoTuple = new ArrayList<Object>();
}
private Tuple makeTuple(String va[]) {
if(va == null) {
return null;
}
for(String v : va) {
mProtoTuple.add(v);
}
Tuple t = mTupleFactory.newTuple(mProtoTuple);
mProtoTuple.clear();
return t;
}
@Override
public Tuple exec(Tuple tup) throws IOException {
if(tup == null || tup.size() != 2) {
return null;
}
String in = tup.get(0).toString();
String split = tup.get(1).toString();
return makeTuple(in.split(split));
}
}
|
civrot/ranger-clubhouse-web | app/models/access-document.js | <filename>app/models/access-document.js
import Model, { attr } from '@ember-data/model'
import { computed } from '@ember/object';
import { ticketTypeLabel } from 'clubhouse/constants/ticket-types';
import moment from 'moment';
export default class AccessDocumentModel extends Model {
@attr('number') person_id;
@attr('string') type;
@attr('string') status;
@attr('number') source_year;
@attr('string') access_date;
@attr('boolean') access_any_time;
@attr('string') name;
@attr('string', { readOnly: true }) comments;
// write-only, backend will appeand to comments
@attr('string') additional_comments;
@attr('string') expiry_date;
@attr('string', { readOnly: true }) create_date;
@attr('string', { readOnly: true }) modified_date;
// Only returned when requesting items available for delivery
@attr('boolean', { readOnly: true }) has_staff_credential;
@computed('type')
get isTicket() {
return (this.type === 'staff_credential'
|| this.type === 'reduced_price_ticket'
|| this.type === 'gift_ticket');
}
@computed('type')
get isStaffCredential() {
return this.type === 'staff_credential';
}
@computed('type')
get isReducedPriceTicket() {
return this.type === 'reduced_price_ticket';
}
@computed('type')
get isGiftTicket() {
return this.type === 'gift_ticket';
}
@computed('type')
get hasAccessDate() {
return (this.type === 'staff_credential' || this.type === 'work_access_pass' || this.type === 'work_acess_pass_so');
}
@computed('status')
get isQualified() {
return this.status === 'qualified';
}
@computed('status')
get isClaimed() {
return this.status === 'claimed';
}
@computed('status')
get isBanked() {
return this.status === 'banked';
}
@computed('status')
get isSubmitted() {
return this.status === 'submitted';
}
@computed('status')
get isUsed() {
return this.status == 'used';
}
@computed('status')
get isCancelled() {
return this.status === 'cancelled';
}
@computed('status')
get isExpired() {
return this.status === 'expired';
}
@computed('type')
get typeHuman() {
return ticketTypeLabel[this.type];
}
@computed('expiry_date')
get expiryYear() {
return moment(this.expiry_date).format('YYYY');
}
@computed('access_date')
get accessDateFormatted() {
return moment(this.access_date).format('dddd MMMM Do, YYYY');
}
get admission_date() {
if (this.access_any_time) {
return 'any';
} else {
if (this.access_date) {
return moment(this.access_date).format('YYYY-MM-DD');
} else {
return null;
}
}
}
set admission_date(value) {
if (value === 'any') {
this.set('access_any_time', true);
this.set('access_date', null);
} else {
this.set('access_any_time', false);
this.set('access_date', value);
}
}
get expiry_year() {
return moment(this.expiry_date).format('YYYY');
}
set expiry_year(year) {
this.set('expiry_date', `${year}-09-15`);
}
}
|
Robbbert/messui | src/mame/includes/pcw.h | <gh_stars>10-100
// license:GPL-2.0+
// copyright-holders:<NAME>
/*****************************************************************************
*
* includes/pcw.h
*
****************************************************************************/
#ifndef MAME_INCLUDES_PCW_H
#define MAME_INCLUDES_PCW_H
#pragma once
#include "cpu/mcs48/mcs48.h"
#include "imagedev/floppy.h"
#include "machine/upd765.h"
#include "machine/ram.h"
#include "machine/timer.h"
#include "sound/beep.h"
#include "emupal.h"
#include "screen.h"
#define PCW_BORDER_HEIGHT 8
#define PCW_BORDER_WIDTH 8
#define PCW_NUM_COLOURS 2
#define PCW_DISPLAY_WIDTH 720
#define PCW_DISPLAY_HEIGHT 256
#define PCW_SCREEN_WIDTH (PCW_DISPLAY_WIDTH + (PCW_BORDER_WIDTH<<1))
#define PCW_SCREEN_HEIGHT (PCW_DISPLAY_HEIGHT + (PCW_BORDER_HEIGHT<<1))
#define PCW_PRINTER_WIDTH (80*16)
#define PCW_PRINTER_HEIGHT (20*16)
class pcw_state : public driver_device
{
public:
pcw_state(const machine_config &mconfig, device_type type, const char *tag)
: driver_device(mconfig, type, tag)
, m_maincpu(*this, "maincpu")
, m_printer_mcu(*this, "printer_mcu")
, m_keyboard_mcu(*this, "keyboard_mcu")
, m_fdc(*this, "upd765")
, m_floppy(*this, "upd765:%u", 0U)
, m_ram(*this, RAM_TAG)
, m_beeper(*this, "beeper")
, m_screen(*this, "screen")
, m_palette(*this, "palette")
, m_ppalette(*this, "ppalette")
, m_rdbanks(*this, "bank%u", 1U)
, m_wrbanks(*this, "bank%u", 5U)
, m_iptlines(*this, "LINE%u", 0U)
{ }
void pcw(machine_config &config);
void pcw8256(machine_config &config);
void pcw8512(machine_config &config);
void pcw9512(machine_config &config);
void pcw9256(machine_config &config);
void pcw9512p(machine_config &config);
void pcw10(machine_config &config);
void init_pcw();
protected:
virtual void machine_start() override;
virtual void machine_reset() override;
virtual void video_start() override;
private:
int m_boot;
int m_system_status;
int m_fdc_interrupt_code;
int m_interrupt_counter;
uint8_t m_banks[4];
unsigned char m_bank_force;
uint8_t m_timer_irq_flag;
uint8_t m_nmi_flag;
int16_t m_printer_headpos;
uint16_t m_kb_scan_row;
uint8_t m_mcu_keyboard_data[16];
uint8_t m_mcu_transmit_reset_seq;
uint8_t m_mcu_transmit_count;
uint8_t m_mcu_selected;
uint8_t m_mcu_buffer;
uint8_t m_mcu_prev;
unsigned int m_roller_ram_addr;
unsigned short m_roller_ram_offset;
unsigned char m_vdu_video_control_register;
uint8_t m_printer_serial; // value if shift/store data pin
uint8_t m_printer_shift; // state of shift register
uint8_t m_printer_shift_output; // output presented to the paper feed motor and print head motor
uint8_t m_head_motor_state;
uint8_t m_linefeed_motor_state;
uint16_t m_printer_pins;
uint8_t m_printer_p2; // MCU port P2 state
uint32_t m_paper_feed; // amount of paper fed through printer, by n/360 inches. One line feed is 61/360in (from the linefeed command in CP/M;s ptr menu)
std::unique_ptr<bitmap_ind16> m_prn_output;
uint8_t m_printer_p2_prev;
emu_timer *m_prn_stepper;
emu_timer *m_prn_pins;
emu_timer *m_pulse_timer;
emu_timer *m_beep_setup_timer;
uint8_t pcw_keyboard_r(offs_t offset);
uint8_t pcw_keyboard_data_r(offs_t offset);
uint8_t pcw_interrupt_counter_r();
void pcw_bank_select_w(offs_t offset, uint8_t data);
void pcw_bank_force_selection_w(uint8_t data);
void pcw_roller_ram_addr_w(uint8_t data);
void pcw_pointer_table_top_scan_w(uint8_t data);
void pcw_vdu_video_control_register_w(uint8_t data);
void pcw_system_control_w(uint8_t data);
uint8_t pcw_system_status_r();
uint8_t pcw_expansion_r(offs_t offset);
void pcw_expansion_w(offs_t offset, uint8_t data);
uint8_t mcu_printer_p1_r();
void mcu_printer_p1_w(uint8_t data);
uint8_t mcu_printer_p2_r();
void mcu_printer_p2_w(uint8_t data);
DECLARE_READ_LINE_MEMBER(mcu_printer_t1_r);
DECLARE_READ_LINE_MEMBER(mcu_printer_t0_r);
uint8_t mcu_kb_scan_r();
void mcu_kb_scan_w(uint8_t data);
uint8_t mcu_kb_scan_high_r();
void mcu_kb_scan_high_w(uint8_t data);
uint8_t mcu_kb_data_r();
DECLARE_READ_LINE_MEMBER(mcu_kb_t1_r);
DECLARE_READ_LINE_MEMBER(mcu_kb_t0_r);
uint8_t pcw9512_parallel_r(offs_t offset);
void pcw9512_parallel_w(offs_t offset, uint8_t data);
void mcu_transmit_serial(uint8_t bit);
void set_8xxx_palette(palette_device &palette) const;
void set_9xxx_palette(palette_device &palette) const;
void set_printer_palette(palette_device &palette) const;
uint32_t screen_update_pcw(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect);
uint32_t screen_update_pcw_printer(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect);
TIMER_CALLBACK_MEMBER(pcw_timer_pulse);
TIMER_CALLBACK_MEMBER(pcw_stepper_callback);
TIMER_CALLBACK_MEMBER(pcw_pins_callback);
TIMER_CALLBACK_MEMBER(setup_beep);
TIMER_DEVICE_CALLBACK_MEMBER(pcw_timer_interrupt);
static void floppy_formats(format_registration &fr);
DECLARE_WRITE_LINE_MEMBER( pcw_fdc_interrupt );
required_device<cpu_device> m_maincpu;
required_device<upi41_cpu_device> m_printer_mcu;
required_device<i8048_device> m_keyboard_mcu;
required_device<upd765a_device> m_fdc;
required_device_array<floppy_connector, 2> m_floppy;
required_device<ram_device> m_ram;
required_device<beep_device> m_beeper;
required_device<screen_device> m_screen;
required_device<palette_device> m_palette;
required_device<palette_device> m_ppalette;
required_memory_bank_array<4> m_rdbanks, m_wrbanks;
required_ioport_array<16> m_iptlines;
inline void pcw_plot_pixel(bitmap_ind16 &bitmap, int x, int y, uint32_t color);
void pcw_update_interrupt_counter();
void pcw_update_irqs();
void pcw_update_read_memory_block(int block, int bank);
void pcw_update_write_memory_block(int block, int bank);
void pcw_update_mem(int block, int data);
int pcw_get_sys_status();
void pcw_printer_fire_pins(uint16_t pins);
void pcw9512_io(address_map &map);
void pcw_io(address_map &map);
void pcw_map(address_map &map);
};
#endif // MAME_INCLUDES_PCW_H
|
oyvindberg/bloop | frontend/src/test/resources/custom-test-framework/build.sbt | bloopConfigDir in Global := baseDirectory.value / "bloop-config"
lazy val framework = project
.in(file("framework"))
.settings(
libraryDependencies += "org.scala-sbt" % "test-interface" % "1.0"
)
lazy val test = project
.in(file("test"))
.dependsOn(framework)
.settings(
testFrameworks += TestFramework("foo.Framework")
)
|
NandoCruz/Aulas-do-3-semestre | ADS - 5o semestre - noite/MOBILE/Projetos/P08_API_OpenWeather_ADS_Noite/app/src/main/java/br/clima/AsyncTaskDelegate.java | package br.clima;
public interface AsyncTaskDelegate {
void processFinish(Object output);
}
|
sarboc/mymove | pkg/testdatagen/make_service_agent.go | <reponame>sarboc/mymove
package testdatagen
import (
"github.com/gobuffalo/pop"
"github.com/transcom/mymove/pkg/models"
)
// MakeServiceAgent finds or makes a single service_agent record
func MakeServiceAgent(db *pop.Connection, assertions Assertions) models.ServiceAgent {
// Create a shipment if one wasn't already created
shipment := assertions.ServiceAgent.Shipment
if shipment == nil {
s := MakeDefaultShipment(db)
shipment = &s
}
company := assertions.ServiceAgent.Company
if company == "" {
company = "ACME Movers"
}
role := assertions.ServiceAgent.Role
if role == "" {
role = models.RoleORIGIN
}
serviceAgent := models.ServiceAgent{
ShipmentID: shipment.ID,
Shipment: shipment,
Role: role,
Company: company,
PhoneNumber: stringPointer("303-867-5309"),
Email: stringPointer("<EMAIL>"),
}
mergeModels(&serviceAgent, assertions.ServiceAgent)
mustCreate(db, &serviceAgent)
return serviceAgent
}
// MakeDefaultServiceAgent makes a Service Agent with default values
func MakeDefaultServiceAgent(db *pop.Connection) models.ServiceAgent {
return MakeServiceAgent(db, Assertions{})
}
|
catalinboja/cts-2020 | 1091/Seminar2/src/ro/ase/csie/cts/g1091/seminar2/AccountTypes.java | package ro.ase.csie.cts.g1091.seminar2;
public enum AccountTypes {
CURRENT, SAVINGS;
private double interestRate = 0;
public String toString() {
return "The account type is " + this.name();
}
}
|
giannis20012001/JGeneralRepo | src/main/java/org/lumi/chapterfourandfive/controlstatements/partone/Factorial.java | <filename>src/main/java/org/lumi/chapterfourandfive/controlstatements/partone/Factorial.java
package org.lumi.chapterfourandfive.controlstatements.partone;
import java.util.Scanner;
/**
* Created by <NAME>
* (i [dot] tsantilis [at] yahoo [dot] com A.K.A lumi) on 4/3/2017.
*/
public class Factorial {
public static int findFactorial(int number) {
int factorial = 1;
int counter = 0;
if (number > 1) {
while (counter < number) {
factorial = factorial * (number - counter);
counter++;
}
}
else if (number == 1) {
factorial = 1;
}
else if (number == 0) {
factorial = 1;
}
else {
System.out.println("You have entered a negative number!");
}
return factorial;
}
@SuppressWarnings("Duplicates")
public static double findEulerNumber(int terms) {
int counter = 1;
double eulerNumber = 1;
if (terms > 1) {
while (counter <= terms) {
eulerNumber = eulerNumber + (1 / (double) findFactorial(counter));
counter++;
}
}
else if (terms == 1) {
eulerNumber = 1;
}
else if (terms == 0) {
eulerNumber = 0;
}
else {
System.out.println("You have entered a negative number!");
}
return eulerNumber;
}
@SuppressWarnings("Duplicates")
public static double findExponentialFunction(int x) {
int counter = 1;
double e = 1;
if (x > 1) {
while (counter <= x) {
e = e + (findPower(x, counter) / (double) findFactorial(counter));
counter++;
}
}
else if (x == 1) {
e = 1;
}
else if (x == 0) {
e = 0;
}
else {
System.out.println("You have entered a negative number!");
}
return e;
}
public static int findPower(int base, int exponent) {
int counter = 1;
int number = 1;
while (counter <= exponent) {
number = number * base;
counter++;
}
return number;
}
}
|
zachlatta/chromium | chrome/test/mini_installer_test/test.cc | <reponame>zachlatta/chromium
// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/platform_thread.h"
#include "base/win_util.h"
#include "chrome/installer/util/install_util.h"
#include "chrome/test/mini_installer_test/mini_installer_test_constants.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "chrome_mini_installer.h"
namespace {
class MiniInstallTest : public testing::Test {
protected:
void CleanTheSystem() {
ChromeMiniInstaller userinstall(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
userinstall.UnInstall();
ChromeMiniInstaller systeminstall(
mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
systeminstall.UnInstall();
}
virtual void SetUp() {
if (win_util::GetWinVersion() < win_util::WINVERSION_VISTA) {
CleanTheSystem();
} else {
printf("These tests don't run on Vista\n");
exit(0);
}
}
virtual void TearDown() {
if (win_util::GetWinVersion() < win_util::WINVERSION_VISTA) {
PlatformThread::Sleep(2000);
CleanTheSystem();
} else {
printf("These tests don't run on Vista\n");
exit(0);
}
}
};
};
// TODO(nsylvain): Change this for GOOGLE_CHROME_BUILD when we have the
// previous installers accessible from our Google Chrome continuous buildbot.
#if defined(OFFICIAL_BUILD)
TEST_F(MiniInstallTest, InstallLatestDevFullInstallerTest) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.InstallFullInstaller(false);
}
TEST_F(MiniInstallTest, InstallLatestDevFullInstallerTestSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.InstallFullInstaller(false);
}
TEST_F(MiniInstallTest, InstallLatestStableFullInstallerTest) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kStableChannelBuild);
installer.InstallFullInstaller(false);
}
TEST_F(MiniInstallTest, InstallLatestStableFullInstallerTestSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kStableChannelBuild);
installer.InstallFullInstaller(false);
}
TEST_F(MiniInstallTest,
InstallLatestDevFullInstallerOverPreviousFullDevInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kFullInstall);
}
TEST_F(MiniInstallTest,
InstallLatestDevFullInstallerOverPreviousFullDevInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kFullInstall);
}
TEST_F(MiniInstallTest,
InstallLatestDevDiffInstallerOverPreviousFullDevInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kDiffInstall);
}
TEST_F(MiniInstallTest,
InstallLatestDevDiffInstallerOverPreviousFullDevInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kDiffInstall);
}
TEST_F(MiniInstallTest,
InstallLatestFullStableInstallerOverPreviousFullStableInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kStableChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kFullInstall);
}
TEST_F(MiniInstallTest,
InstallLatestFullStableInstallerOverPreviousFullStableInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kStableChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kFullInstall);
}
TEST_F(MiniInstallTest,
InstallLatestDiffStableInstallerOverPreviousFullStableInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kStableChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kDiffInstall);
}
TEST_F(MiniInstallTest,
InstallLatestDiffStableInstallerOverPreviousFullStableInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kStableChannelBuild);
installer.OverInstallOnFullInstaller(mini_installer_constants::kDiffInstall);
}
TEST_F(MiniInstallTest, StandaloneInstallerTest) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kStableChannelBuild);
installer.InstallStandaloneInstaller();
}
// This test doesn't make sense. Disabling for now.
TEST_F(MiniInstallTest, DISABLED_MiniInstallerOverChromeMetaInstallerTest) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstall();
}
// Encountering issue 9593. Disabling temporarily.
TEST_F(MiniInstallTest,
DISABLED_InstallLatestStableFullInstallerOverChromeMetaInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kStableChannelBuild);
installer.OverInstall();
}
// Encountering issue 9593. Disabling temporarily.
TEST_F(MiniInstallTest,
DISABLED_InstallLatestDevFullInstallerOverChromeMetaInstallerTest) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.OverInstall();
}
// Repair testcases
TEST_F(MiniInstallTest, RepairFolderTestOnLatestDevFullInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.Repair(ChromeMiniInstaller::VERSION_FOLDER);
}
TEST_F(MiniInstallTest, RepairFolderTestOnLatestDevFullInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.Repair(ChromeMiniInstaller::VERSION_FOLDER);
}
TEST_F(MiniInstallTest, RepairRegistryTestOnLatestDevFullInstaller) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.Repair(ChromeMiniInstaller::REGISTRY);
}
TEST_F(MiniInstallTest, RepairRegistryTestOnLatestDevFullInstallerSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.Repair(ChromeMiniInstaller::REGISTRY);
}
#endif
TEST_F(MiniInstallTest, InstallLatestMiniInstallerAtSystemLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kSystemInstall,
mini_installer_constants::kDevChannelBuild);
installer.Install();
}
TEST_F(MiniInstallTest, InstallLatestMiniInstallerAtUserLevel) {
ChromeMiniInstaller installer(mini_installer_constants::kUserInstall,
mini_installer_constants::kDevChannelBuild);
installer.Install();
}
TEST(InstallUtilTests, MiniInstallTestValidWindowsVersion) {
// We run the tests on all supported OSes.
// Make sure the code agrees.
EXPECT_TRUE(InstallUtil::IsOSSupported());
}
|
damb/seiscomp3 | src/trunk/libs/3rd-party/spread/daemon/message.h | /*
* The Spread Toolkit.
*
* The contents of this file are subject to the Spread Open-Source
* License, Version 1.0 (the ``License''); you may not use
* this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.spread.org/license/
*
* or in the file ``license.txt'' found in this distribution.
*
* Software distributed under the License is distributed on an AS IS basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Creators of Spread are:
* <NAME>, <NAME>, <NAME>, <NAME>.
*
* Copyright (C) 1993-2014 Spread Concepts LLC <<EMAIL>>
*
* All Rights Reserved.
*
* Major Contributor(s):
* ---------------
* <NAME> <EMAIL> - accelerated ring protocol.
* <NAME> <EMAIL> - contributions to process groups.
* <NAME> <EMAIL> - scalable wide area support.
* <NAME> <EMAIL> - group communication security.
* <NAME> <EMAIL> - Perl, autoconf, old skiplist.
* <NAME> <EMAIL> - Java interface.
*
*/
#ifndef INC_MESSAGE
#define INC_MESSAGE
#include "arch.h"
#include "prot_objs.h"
#include "scatter.h"
#include "session.h"
int32 Obj_Inc_Refcount(void *obj);
void Message_populate_with_buffers(message_obj *msg);
message_header *Message_get_message_header(message_obj *msg);
int Message_get_header_size();
int Message_get_data_fragment_len();
scatter *Message_get_data_scatter(message_obj *msg);
int Message_get_packet_type(int mess_type);
char *Message_get_first_data_ptr(message_obj *msg);
int Message_get_first_data_len(message_obj *msg);
char *Message_get_first_group(message_obj *msg);
char *Message_get_groups_array(message_obj *msg);
int Message_get_data_header_size(void);
int Message_get_non_body_header_size();
void Message_calculate_current_location(message_obj *msg, int len_sent, struct partial_message_info *cur_msg);
void Message_reset_current_location(struct partial_message_info *cur_msg);
void Message_set_location_begin_body(struct partial_message_info *cur_msg);
void Message_add_scat_element(message_obj *msg);
void Message_remove_scat_element(message_obj *msg);
message_obj *Message_dup_and_reset_old_message(message_obj *msg, int len);
message_obj *Message_copy_message(message_obj *msg);
message_obj *Message_new_message(void);
message_obj *Message_create_message(int mess_type, char *sender_name);
void Message_endian_correct_message_header(message_obj *msg);
int Message_kill_mess_fixup(message_obj *msg, int orig_len, int mbox);
void Message_element_len_fixup(message_obj *msg);
void Message_Set_Fragment_Fields(message_obj *msg);
void Message_Buffer_to_Message_Fragments( message_obj *msg, char buf[], int num_bytes );
void Message_add_oldtype_to_reject( message_obj *msg, int32u old_type );
void Message_dispose_message(message_obj *msg);
void Message_Dec_Refcount(message_obj *msg);
#endif /* INC_MESSAGE */
|
GeertArien/low-level-programming | chapter_11/higher_order/list.h | #ifndef LIST_H_
#define LIST_H_
#include <stdlib.h>
#include <stdbool.h>
struct list_t {
int value;
struct list_t* next;
};
struct list_t* list_create(const int val);
struct list_t* list_back(struct list_t* list);
void list_add_front(const int value, struct list_t** list);
void list_add_back(const int value, struct list_t** list);
struct list_t* list_node_at(struct list_t* list, const size_t index);
int list_get(struct list_t* list, const size_t index);
void list_free(struct list_t* list);
size_t list_length(struct list_t* list);
int list_sum(struct list_t* list);
bool save(struct list_t* lst, const char* filename);
bool load(struct list_t** lst, const char* filename);
bool serialize(struct list_t* lst, const char* filename);
bool deserialize(struct list_t** lst, const char* filename);
#endif |
mikehelmick/CascadeLMS | app/models/rubric.rb | <reponame>mikehelmick/CascadeLMS
class Rubric < ActiveRecord::Base
validates_presence_of :primary_trait, :no_credit_criteria, :part_credit_criteria, :full_credit_criteria
validates_numericality_of :no_credit_points, :part_credit_points, :full_credit_points
belongs_to :assignment
acts_as_list :scope => :assignment
belongs_to :course
has_and_belongs_to_many :course_outcomes
has_many :rubric_entries, :dependent => :destroy
before_save :normalize_points
def self.process_full_mapping(params, course)
errors = Array.new
loaded_rubrics = Hash.new
loaded_outcomes = Hash.new
CourseOutcomesRubrics.transaction do
params.keys.each do |key|
if key[0..6].eql?('rubric_') && !key.index('_co_').nil?
parts = key.split('_')
rubric_id = parts[1].to_i
coutcome_id = parts[3].to_i
valid_mapping = true
# Validate rubric
if loaded_rubrics[rubric_id].nil?
rubric = Rubric.find(rubric_id) rescue rubric = nil
if (!rubric.nil? && rubric.course_id == course.id)
loaded_rubrics[rubric_id] = true
else
errors << "Rubric id #{rubric_id} is not in this course, invalid request."
valid_mapping = false
end
end
# Validate course outcome
if loaded_outcomes[coutcome_id].nil?
outcome = CourseOutcome.find(coutcome_id) rescue outcome = nil
if (!outcome.nil? && outcome.course_id == course.id)
loaded_outcomes[coutcome_id] = true
else
errors << "Course outcome id #{coutcome_id} is not in this course, invalid request."
valid_mapping = false
end
end
# Save mapping
if valid_mapping
newMapping = CourseOutcomesRubrics.new
newMapping.rubric_id = rubric_id
newMapping.course_outcome_id = coutcome_id
newMapping.save
end
end
end
end
return errors
end
def mapped_to_course_outcome?( outcome_id )
self.course_outcomes.each do |i|
return true if i.id == outcome_id
end
return false
end
def copy_to(assignment)
newCopy = self.clone()
newCopy.assignment_id = assignment.id
newCopy.save
# Copy the course outcome mappings
self.course_outcomes.each do |co|
newCopy.course_outcomes << co
end
newCopy.save
return newCopy
end
def copy_to_course(course)
newCopy = self.clone()
newCopy.assignment_id = 0
newCopy.course_id = course.id
newCopy.save
return newCopy
end
def normalize_points
# Normalize to 1 decimal point
self.no_credit_points = normalize_point_value( self.no_credit_points )
self.part_credit_points = normalize_point_value( self.part_credit_points )
self.full_credit_points = normalize_point_value( self.full_credit_points )
self.above_credit_points = normalize_point_value( self.above_credit_points )
end
def normalize_point_value( value )
value = 0 if value.nil?
as_s = sprintf("%.2f", value)
if as_s[-2..-1].eql?("00")
return value.to_i
else
return as_s.to_f
end
end
end
|
yingzhuo/spring-turbo | src/main/java/spring/turbo/util/crypto/CipherUtils.java | <gh_stars>1-10
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* ____ _ _____ _
* / ___| _ __ _ __(_)_ __ __ |_ _| _ _ __| |__ ___
* \___ \| '_ \| '__| | '_ \ / _` || || | | | '__| '_ \ / _ \
* ___) | |_) | | | | | | | (_| || || |_| | | | |_) | (_) |
* |____/| .__/|_| |_|_| |_|\__, ||_| \__,_|_| |_.__/ \___/
* |_| |___/ https://github.com/yingzhuo/spring-turbo
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
package spring.turbo.util.crypto;
import spring.turbo.util.Asserts;
import javax.crypto.Cipher;
import java.security.Key;
/**
* @author 应卓
* @see spring.turbo.util.jks.CertificateUtils
* @see spring.turbo.util.jks.KeyStoreUtils
* @since 1.0.15
*/
public final class CipherUtils {
/**
* 私有构造方法
*/
private CipherUtils() {
super();
}
public static byte[] encrypt(byte[] data, Key key) {
Asserts.notNull(data);
Asserts.notNull(key);
try {
Cipher cipher = Cipher.getInstance(key.getAlgorithm());
cipher.init(Cipher.ENCRYPT_MODE, key);
return cipher.doFinal(data);
} catch (Exception e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
public static byte[] decrypt(byte[] data, Key key) {
Asserts.notNull(data);
Asserts.notNull(key);
try {
Cipher cipher = Cipher.getInstance(key.getAlgorithm());
cipher.init(Cipher.DECRYPT_MODE, key);
return cipher.doFinal(data);
} catch (Exception e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
}
|
filedos9ig1d/kendzi0 | kendzi3d-collada-exporter/src/test/java/kendzi/util/index/SimplifyIndexTest.java | package kendzi.util.index;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class SimplifyIndexTest {
@Test
public void test() {
List<Integer> data = new ArrayList<Integer>();
data.add(1);
data.add(2);
data.add(3);
data.add(4);
data.add(5);
List<Integer> index = new ArrayList<Integer>();
index.add(2);
index.add(4);
SimplifyIndex<Integer> si = new SimplifyIndex<Integer>(data, index);
si.simple();
Assert.assertEquals(2, si.getSdata().size());
Assert.assertEquals(2, si.getSindex().size());
Assert.assertEquals(3, (int) si.getSdata().get(0));
Assert.assertEquals(5, (int) si.getSdata().get(1));
Assert.assertEquals(0, (int) si.getSindex().get(0));
Assert.assertEquals(1, (int) si.getSindex().get(1));
// System.out.println("data");
// print(si.getSdata());
// System.out.println("index");
// print(si.getSindex());
}
private static void print(List<Integer> sdata) {
for (Object o : sdata) {
System.out.println(o);
}
}
}
|
sultan1k/vk_air | vk_air/client.py | <gh_stars>1-10
"""
MIT License
Copyright (c) 2021 sultan1k
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import asyncio
import sys
from asyncio.coroutines import iscoroutinefunction
from typing import List, Optional
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
import random
import base64
import string
from .longpoll import LongPoll
from .objects.group import Group
from .enums import EVENTS
from .bot_api import BotApi
class Client:
"""
Объект основного бота.
------------
Параметры
------------
* prefix: :class:`str` - префикс бота.
* debug: :class:`bool` - запустить
бота в режиме отладки или нет. По умолчанию
запускается без режима отладки.
"""
def __init__(self, *, prefix: str = '!', debug: bool = False) -> None:
self.prefix = prefix
self.debug = debug
self.commands = {}
self.events = EVENTS
def event(
self,
*,
name: Optional[str] = None,
users: Optional[List[int]] = None,
cooldown: Optional[int] = None
) -> object:
"""
Добавление события
---------------
Функция позволяет добавить новое
событие. Перед запуском бота не забудьте
включить LongPoll в группе вк и включить передачу
тех событий, которые хотите обрабатывать
с помощью бота.
Параметры
---------------
* name: :class:`str` - название события.
* users: :class:`List[int]` - список пользователей,
на которых будет реагировать бот (необязательно).
* cooldown: :class:`int` - время (в секундах),
раз в которое пользователь сможет
влиять на событие (необязательно).
"""
def wrap(func):
if not iscoroutinefunction(func):
print(f"\033[1m[\033[91mОШИБКА\033[97m] Функция {func.__name__} должна быть асинхронной\033[0m")
sys.exit(1)
_name = func.__name__ if not name else name
if _name not in self.events:
print(f"\033[1m[\033[91mОШИБКА\033[97m] События с названием \"{_name}\" не существует\033[0m")
sys.exit(1)
if func.__code__.co_argcount == 0 and func.__code__.co_kwonlycount == 0:
print(f"\033[1m[\033[91mОШИБКА\033[97m] Функция {func.__name__} должна иметь как минимум один аргумент\033[0m")
sys.exit(1)
data = {'function': func}
if users:
data['users'] = users
if cooldown:
data['cooldown'] = cooldown
self.events[_name].append(data)
if self.debug:
print(f"\033[1m[\033[92mУСПЕШНО\033[97m] Функция {func.__name__}, отвечающая за ивент {_name}, успешно зарегистрирована.\033[0m")
return wrap
def command(
self,
*,
name: str = None,
aliases: List[str] = [],
users: Optional[List[int]] = None,
cooldown: Optional[int] = None
) -> object:
"""
Добавление команды
-------------------
Функция позволяет добавить новую
команду. Перед запуском бота не забудьте
включить LongPoll в группе вк и включить передачу
события 'message_new'.
Параметры
-------------------
* name: :class:`str` - название команды
* aliases: :class:`List[str]` - названия, на которые
команда тоже будет реагировать (необязательно).
* users: :class:`List[int]` - список пользователей,
которые смогут использовать команду (необязательно).
* cooldown: :class:`int` - время (в секундах), раз
в которое пользователь сможет вызывать
команду (необязательно).
"""
def wrap(func):
if not iscoroutinefunction(func):
print(f"\033[1m[\033[91mОШИБКА\033[97m] Функция {func.__name__} должна быть асинхронной\033[0m")
sys.exit(1)
_name = name if name else func.__name__
if _name in self.commands:
print(f"\033[1m[\033[91mОШИБКА\033[97m] Команда с именем {_name} уже существует\033[0m")
sys.exit(1)
data = {
'function': func
}
if users:
data['users'] = users
if cooldown:
data['cooldown'] = cooldown
self.commands[_name] = data
if self.debug:
if self.debug:
print(f"\033[1m[\033[92mУСПЕШНО\033[97m] Функция {func.__name__}, отвечающая за команду {_name}, успешно зарегистрирована.\033[0m")
for i in aliases:
if i in self.commands:
print(
f"\033[1m[\033[91mОШИБКА\033[97m] Псевдоним команды с именем {i}, который вызывает оригинальную команду {func.__name__}, уже принадлежит другой функции.\033[0m")
sys.exit(1)
self.commands[i] = data
return wrap
def login(self, *, token: str = None) -> None:
"""
Запуск бота
------------
Эта функция нужна для запуска бота. Её можно
вызывать только в самом конце кода.
Параметры
------------
* token: :class:`str` - токен бота,
полученный в настройках группы.
"""
if not token:
print("\033[1m[\033[91mОШИБКА\033[97m] Для запуска бота необходимо ввести токен\033[0m")
sys.exit(1)
kdf = PBKDF2HMAC(
algorithm=hashes.SHA512(),
length=32,
salt=b'VKSALT',
iterations=100000,
backend=default_backend()
)
key_to_encrypt = base64.urlsafe_b64encode(kdf.derive(''.join(random.choices(string.ascii_uppercase + string.digits, k=10)).encode()))
dangerous_f = Fernet(key_to_encrypt)
encrypt_key = dangerous_f.encrypt(token.encode())
self.api = BotApi(dangerous_f=dangerous_f, encrypt_key=encrypt_key, debug=self.debug)
loop = asyncio.get_event_loop()
self.group: Group = loop.run_until_complete(self.api.groupsGetById())[0]
if self.group == []:
print("\033[1m[\033[91mОШИБКА\033[97m] Введён неверный токен бота.\033[0m")
sys.exit(1)
if self.debug:
print("\033[1m[\033[92mУСПЕШНО\033[97m] Бот успешно запущен.\033[0m")
self.api.group_id = self.group.id
self.longpoll: LongPoll = LongPoll(api=self.api, events=self.events, commands=self.commands, prefix=self.prefix)
self.longpoll.connect() |
VersiraSec/epsilon-cfw | apps/sequence/graph/curve_view_range.cpp | #include "curve_view_range.h"
#include <cmath>
#include <ion.h>
#include <poincare/preferences.h>
#include <algorithm>
using namespace Shared;
using namespace Poincare;
namespace Sequence {
CurveViewRange::CurveViewRange(InteractiveCurveViewRangeDelegate * delegate) :
InteractiveCurveViewRange(delegate)
{
MemoizedCurveViewRange::protectedSetXMin(-k_displayLeftMarginRatio * xMax(), k_lowerMaxFloat, k_upperMaxFloat);
}
void CurveViewRange::protectedNormalize(bool canChangeX, bool canChangeY, bool canShrink) {
Shared::InteractiveCurveViewRange::protectedNormalize(canChangeX, canChangeY, canShrink);
/* The X axis is not supposed to go into the negatives, save for a small
* margin. However, after normalizing, it could be the case. We thus shift
* the X range rightward to the origin. */
float interestingXMin = m_delegate->interestingXMin();
float xRange = xMax() - xMin();
m_xRange.setMin(interestingXMin - k_displayLeftMarginRatio * xRange);
MemoizedCurveViewRange::protectedSetXMax(xMin() + xRange, k_lowerMaxFloat, k_upperMaxFloat);
}
}
|
SilensAngelusNex/CS308_SLogo | src/parser/ParserUtils.java | package parser;
/**
* @author <NAME>
*/
public class ParserUtils {
public static final String SYNTAX_FILE_PATH = "resources/languages/Syntax";
public static final String NUM_PARAMS_PATH = "resources/CommandNumParams";
public static final String ERROR_CODE = "NO MATCH";
public static final String UNKNOWN_COMMAND_CODE = "UNKNOWN COMMAND";
}
|
ScalablyTyped/SlinkyTyped | i/instagram-private-api/src/main/scala/typingsSlinky/instagramPrivateApi/newsRepositoryInboxResponseMod/NewsRepositoryInboxResponseOldStoriesItem.scala | package typingsSlinky.instagramPrivateApi.newsRepositoryInboxResponseMod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait NewsRepositoryInboxResponseOldStoriesItem extends StObject {
var args: NewsRepositoryInboxResponseArgs = js.native
var counts: NewsRepositoryInboxResponseCounts = js.native
var pk: js.UndefOr[String] = js.native
var story_type: Double = js.native
var `type`: Double = js.native
}
object NewsRepositoryInboxResponseOldStoriesItem {
@scala.inline
def apply(
args: NewsRepositoryInboxResponseArgs,
counts: NewsRepositoryInboxResponseCounts,
story_type: Double,
`type`: Double
): NewsRepositoryInboxResponseOldStoriesItem = {
val __obj = js.Dynamic.literal(args = args.asInstanceOf[js.Any], counts = counts.asInstanceOf[js.Any], story_type = story_type.asInstanceOf[js.Any])
__obj.updateDynamic("type")(`type`.asInstanceOf[js.Any])
__obj.asInstanceOf[NewsRepositoryInboxResponseOldStoriesItem]
}
@scala.inline
implicit class NewsRepositoryInboxResponseOldStoriesItemMutableBuilder[Self <: NewsRepositoryInboxResponseOldStoriesItem] (val x: Self) extends AnyVal {
@scala.inline
def setArgs(value: NewsRepositoryInboxResponseArgs): Self = StObject.set(x, "args", value.asInstanceOf[js.Any])
@scala.inline
def setCounts(value: NewsRepositoryInboxResponseCounts): Self = StObject.set(x, "counts", value.asInstanceOf[js.Any])
@scala.inline
def setPk(value: String): Self = StObject.set(x, "pk", value.asInstanceOf[js.Any])
@scala.inline
def setPkUndefined: Self = StObject.set(x, "pk", js.undefined)
@scala.inline
def setStory_type(value: Double): Self = StObject.set(x, "story_type", value.asInstanceOf[js.Any])
@scala.inline
def setType(value: Double): Self = StObject.set(x, "type", value.asInstanceOf[js.Any])
}
}
|
Kisensum/dnp3 | cpp/libs/src/opendnp3/master/CommandSetOps.h | /*
* Licensed to Green Energy Corp (www.greenenergycorp.com) under one or
* more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Green Energy Corp licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This project was forked on 01/01/2013 by Automatak, LLC and modifications
* may have been made to this file. Automatak, LLC licenses these modifications
* to you under the terms of the License.
*/
#ifndef OPENDNP3_COMMAND_SET_OPS_H
#define OPENDNP3_COMMAND_SET_OPS_H
#include "opendnp3/master/CommandSet.h"
#include "opendnp3/master/CommandCallbackT.h"
#include "opendnp3/app/HeaderWriter.h"
#include "opendnp3/app/parsing/IAPDUHandler.h"
#include <openpal/logging/Logger.h>
namespace opendnp3
{
/**
*
* Has private access to CommandSet
*
* Used to reduce the public API surface exposed in includes to users
*/
class CommandSetOps final : public IAPDUHandler, private openpal::Uncopyable
{
enum class Mode : uint8_t
{
Select,
Operate
};
CommandSetOps(Mode mode, CommandSet& commands_);
Mode mode;
public:
enum class OperateResult : uint8_t
{
OK,
FAIL_PARSE
};
enum class SelectResult : uint8_t
{
OK,
FAIL_PARSE,
FAIL_SELECT
};
/// Write the headers to an ASDU
static bool Write(const CommandSet& set, HeaderWriter& writer);
/// Invoke the callback for a response
static void InvokeCallback(const CommandSet& set, TaskCompletion result, CommandCallbackT& callback);
/**
* parses a response to a select, applying each received header to the command set
*
* @return true if every object in every header was correctly selected, false otherwise
*/
static SelectResult ProcessSelectResponse(CommandSet& set, const openpal::RSlice& headers, openpal::Logger* logger);
/**
* parses a response to an operate (or DO), applying each received header to the command set
*
* @return true if parsing was successful, the results are left in the set
*/
static OperateResult ProcessOperateResponse(CommandSet& set, const openpal::RSlice& headers, openpal::Logger* logger);
private:
virtual bool IsAllowed(uint32_t headerCount, GroupVariation gv, QualifierCode qc) override;
virtual IINField ProcessHeader(const PrefixHeader& header, const ICollection<Indexed<ControlRelayOutputBlock>>& values) override;
virtual IINField ProcessHeader(const PrefixHeader& header, const ICollection<Indexed<AnalogOutputInt16>>& values) override;
virtual IINField ProcessHeader(const PrefixHeader& header, const ICollection<Indexed<AnalogOutputInt32>>& values) override;
virtual IINField ProcessHeader(const PrefixHeader& header, const ICollection<Indexed<AnalogOutputFloat32>>& values) override;
virtual IINField ProcessHeader(const PrefixHeader& header, const ICollection<Indexed<AnalogOutputDouble64>>& values) override;
template <class T>
IINField ProcessAny(const PrefixHeader& header, const ICollection<Indexed<T>>& values);
CommandSet* commands;
};
}
#endif
|
OL1V1A/springMVC | src/main/java/com/lwj/designPattern/factory/Shape.java | <gh_stars>1-10
package com.lwj.designPattern.factory;
/**
* @Auth: lwj
* @Date: 2019/7/3 10:15
*/
public interface Shape {
void draw();
}
|
DavidHe1127/birds-supply-portal | src/Environment.js | <reponame>DavidHe1127/birds-supply-portal<gh_stars>1-10
import {Environment, Network, RecordSource, Store} from 'relay-runtime';
import auth from './auth';
const store = new Store(new RecordSource());
const network = Network.create((operation, variables) => {
return fetch('http://127.0.0.1:4000/graphql', {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
Authorization: `Bearer ${auth.get('accessToken').jwtToken}`,
},
body: JSON.stringify({
query: operation.text,
variables,
}),
})
.then(res => {
if (res.status === 401) {
document.location = res.headers.get('Location');
throw new Error(res.statusText);
}
return res.json();
})
.then(json => {
if (json.errors) {
json.data = Object.assign({}, json.data, {errors: json.errors});
}
return json;
})
.catch(console.error);
});
const environment = new Environment({
network,
store,
});
export default environment;
|
resjbc/pharmacy-backend | dist/models/entitys/receipt.entity.js | <filename>dist/models/entitys/receipt.entity.js
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
const typeorm_1 = require("typeorm");
const class_validator_1 = require("class-validator");
const person_entity_1 = require("./person.entity");
let EReceipt = class EReceipt {
};
__decorate([
typeorm_1.PrimaryGeneratedColumn(),
__metadata("design:type", Object)
], EReceipt.prototype, "id_receipt", void 0);
__decorate([
typeorm_1.Column({ name: 'id_person', nullable: true }),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], EReceipt.prototype, "id_person", void 0);
__decorate([
typeorm_1.Column({ name: 'id_member_create', nullable: true }),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], EReceipt.prototype, "id_member_create", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", String)
], EReceipt.prototype, "place", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", String)
], EReceipt.prototype, "place_address", void 0);
__decorate([
typeorm_1.Column(),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Date)
], EReceipt.prototype, "date_created", void 0);
__decorate([
typeorm_1.Column(),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Date)
], EReceipt.prototype, "date_updated", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", String)
], EReceipt.prototype, "id_reference", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", String)
], EReceipt.prototype, "id_receipt_cash", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", String)
], EReceipt.prototype, "id_receipt_cash_number", void 0);
__decorate([
typeorm_1.Column({ name: 'id_member_cash', nullable: true }),
__metadata("design:type", Number)
], EReceipt.prototype, "id_member_cash", void 0);
__decorate([
typeorm_1.OneToMany(type => EReceiptDetail, receiptDetail => receiptDetail.receipt),
__metadata("design:type", Array)
], EReceipt.prototype, "receiptDetails", void 0);
__decorate([
typeorm_1.ManyToOne(type => person_entity_1.EPerson, person => person.receipts, {
nullable: true,
onDelete: "SET NULL"
}),
typeorm_1.JoinColumn({ name: 'id_person' }),
__metadata("design:type", person_entity_1.EPerson)
], EReceipt.prototype, "person", void 0);
__decorate([
typeorm_1.ManyToOne(type => person_entity_1.EPerson, person => person.receipts, {
nullable: true,
onDelete: "SET NULL"
}),
typeorm_1.JoinColumn({ name: 'id_member_create' }),
__metadata("design:type", person_entity_1.EPerson)
], EReceipt.prototype, "member_create", void 0);
__decorate([
typeorm_1.ManyToOne(type => person_entity_1.EPerson, person => person.receipts, {
nullable: true,
onDelete: "SET NULL"
}),
typeorm_1.JoinColumn({ name: 'id_member_cash' }),
__metadata("design:type", person_entity_1.EPerson)
], EReceipt.prototype, "member_cash", void 0);
EReceipt = __decorate([
typeorm_1.Entity('Receipt')
], EReceipt);
exports.EReceipt = EReceipt;
let EReceiptDetail = class EReceiptDetail {
};
__decorate([
typeorm_1.PrimaryGeneratedColumn(),
__metadata("design:type", Object)
], EReceiptDetail.prototype, "id_receipt_detail", void 0);
__decorate([
typeorm_1.Column('id_receipt'),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], EReceiptDetail.prototype, "id_receipt", void 0);
__decorate([
typeorm_1.Column(),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], EReceiptDetail.prototype, "id_list", void 0);
__decorate([
typeorm_1.Column('text'),
class_validator_1.IsNotEmpty(),
__metadata("design:type", String)
], EReceiptDetail.prototype, "description", void 0);
__decorate([
typeorm_1.Column('text'),
class_validator_1.IsNotEmpty(),
__metadata("design:type", String)
], EReceiptDetail.prototype, "type", void 0);
__decorate([
typeorm_1.Column({ nullable: true }),
__metadata("design:type", Number)
], EReceiptDetail.prototype, "qty", void 0);
__decorate([
typeorm_1.Column(),
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], EReceiptDetail.prototype, "price", void 0);
__decorate([
typeorm_1.ManyToOne(type => EReceipt, receipt => receipt.receiptDetails, {
onUpdate: "CASCADE",
onDelete: "CASCADE"
}),
typeorm_1.JoinColumn({ name: 'id_receipt' }),
__metadata("design:type", EReceipt)
], EReceiptDetail.prototype, "receipt", void 0);
EReceiptDetail = __decorate([
typeorm_1.Entity('ReceiptDetail')
], EReceiptDetail);
exports.EReceiptDetail = EReceiptDetail;
class ParamReceipt {
}
__decorate([
class_validator_1.IsNotEmpty(),
class_validator_1.IsNumberString(),
__metadata("design:type", Object)
], ParamReceipt.prototype, "id_reference", void 0);
exports.ParamReceipt = ParamReceipt;
class ParamReceiptPerson {
}
__decorate([
class_validator_1.IsNotEmpty(),
class_validator_1.IsNumberString(),
__metadata("design:type", Object)
], ParamReceiptPerson.prototype, "cid", void 0);
exports.ParamReceiptPerson = ParamReceiptPerson;
class QueryReceipt {
}
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], QueryReceipt.prototype, "myDateStart", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], QueryReceipt.prototype, "myDateEnd", void 0);
exports.QueryReceipt = QueryReceipt;
class QueryReceiptCash {
}
exports.QueryReceiptCash = QueryReceiptCash;
class ParamReceiptDetail {
}
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], ParamReceiptDetail.prototype, "id_list", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", String)
], ParamReceiptDetail.prototype, "description", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Number)
], ParamReceiptDetail.prototype, "price", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", String)
], ParamReceiptDetail.prototype, "type", void 0);
exports.ParamReceiptDetail = ParamReceiptDetail;
class ParamInsertReceipt {
}
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamInsertReceipt.prototype, "id_person", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamInsertReceipt.prototype, "id_member_create", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Date)
], ParamInsertReceipt.prototype, "date_created", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Date)
], ParamInsertReceipt.prototype, "date_updated", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Array)
], ParamInsertReceipt.prototype, "receiptDetails", void 0);
exports.ParamInsertReceipt = ParamInsertReceipt;
class ParamUpdateCashReceipt {
}
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamUpdateCashReceipt.prototype, "id_receipt", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamUpdateCashReceipt.prototype, "id_receipt_cash", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamUpdateCashReceipt.prototype, "id_receipt_cash_number", void 0);
__decorate([
class_validator_1.IsNotEmpty(),
__metadata("design:type", Object)
], ParamUpdateCashReceipt.prototype, "id_member_cash", void 0);
exports.ParamUpdateCashReceipt = ParamUpdateCashReceipt;
class ParamDeleteReceiptDetail {
}
__decorate([
class_validator_1.IsNotEmpty(),
class_validator_1.IsNumberString(),
__metadata("design:type", Object)
], ParamDeleteReceiptDetail.prototype, "id_receipt_detail", void 0);
exports.ParamDeleteReceiptDetail = ParamDeleteReceiptDetail;
//# sourceMappingURL=receipt.entity.js.map |
jingcao80/Elastos | Sources/Elastos/Frameworks/Droid/Base/Core/inc/elastos/droid/hardware/soundtrigger/CSoundTriggerKeyphraseSoundModel.h | <reponame>jingcao80/Elastos
//=========================================================================
// Copyright (C) 2012 The Elastos Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//=========================================================================
#ifndef __ELASTOS_DROID_HARDWARE_SOUNDTRIGGER_CSOUNDTRIGGERKEYPHRASESOUNDMODEL_H__
#define __ELASTOS_DROID_HARDWARE_SOUNDTRIGGER_CSOUNDTRIGGERKEYPHRASESOUNDMODEL_H__
#include "_Elastos_Droid_Hardware_Soundtrigger_CSoundTriggerKeyphraseSoundModel.h"
#include "elastos/droid/hardware/soundtrigger/SoundTriggerSoundModel.h"
#include <elastos/core/Object.h>
namespace Elastos {
namespace Droid {
namespace Hardware {
namespace Soundtrigger {
CarClass(CSoundTriggerKeyphraseSoundModel)
, public SoundTriggerSoundModel
, public ISoundTriggerKeyphraseSoundModel
{
public:
CAR_INTERFACE_DECL()
CAR_OBJECT_DECL()
CSoundTriggerKeyphraseSoundModel();
CARAPI constructor();
CARAPI constructor(
/* [in] */ IUUID* uuid,
/* [in] */ IUUID* vendorUuid,
/* [in] */ ArrayOf<Byte>* data,
/* [in] */ ArrayOf<ISoundTriggerKeyphrase*>* keyphrases);
//@Override
CARAPI WriteToParcel(
/* [in] */ IParcel* dest);
//@Override
CARAPI ReadFromParcel(
/* [in] */ IParcel* source);
//@Override
CARAPI ToString(
/* [out] */ String* str);
// public static final Parcelable.Creator<KeyphraseSoundModel> CREATOR
// = new Parcelable.Creator<KeyphraseSoundModel>() {
// public KeyphraseSoundModel createFromParcel(Parcel in) {
// return KeyphraseSoundModel.fromParcel(in);
// }
// public KeyphraseSoundModel[] newArray(int size) {
// return new KeyphraseSoundModel[size];
// }
// };
CARAPI GetKeyphrases(
/* [out, callee] */ ArrayOf<ISoundTriggerKeyphrase*>** keyphrases);
private:
static CARAPI FromParcel(
/* [in] */ IParcel* source,
/* [out] */ ISoundTriggerKeyphraseSoundModel** result);
public:
/** Key phrases in this sound model */
AutoPtr<ArrayOf<ISoundTriggerKeyphrase*> > mKeyphrases; // keyword phrases in model
};
} //Soundtrigger
} //Hardware
} //Droid
} //Elastos
#endif //__ELASTOS_DROID_HARDWARE_SOUNDTRIGGER_CSOUNDTRIGGERKEYPHRASESOUNDMODEL_H__
|
sarakborges/insta-clone | src/Styles/Themes/Dark.js | <gh_stars>0
export default {
slug: "dark",
thumb: "#16141c",
colors: {
main: "#6447bd",
mainVariation: "#8f72e3",
bg: "#16141c",
bgContrast: "#1b1924",
bgInverted: "#ababab",
},
};
|
Boice-Technology/StackBasedScriptingLanguage | Packages/src/op_codes_stack/OP_DROP.go | <filename>Packages/src/op_codes_stack/OP_DROP.go
// Remove the top stack element.
package op_codes_stack
import "stack"
func OP_DROP(mainStack *stack.Stack) bool {
var verdict bool
_, verdict = mainStack.Pop()
return verdict
} |
jhayn94/Chess | src/main/java/chess/model/piece/Bishop.java | <gh_stars>0
package chess.model.piece;
import chess.model.ChessBoardModel;
import chess.model.Color;
import chess.model.Move;
import java.util.ArrayList;
import java.util.List;
public class Bishop extends ChessPiece {
public Bishop(final Color color, final ChessBoardModel board) {
super(color, board, PieceType.BISHOP);
}
@Override
public List<Move> getMoves(final int sourceRow, final int sourceCol, final boolean filterFriendlyPieces) {
List<Move> moves = new ArrayList<>();
this.getLinearMovesUpRight(sourceRow, sourceCol, moves);
this.getLinearMovesDownRight(sourceRow, sourceCol, moves);
this.getLinearMovesUpLeft(sourceRow, sourceCol, moves);
this.getLinearMovesDownLeft(sourceRow, sourceCol, moves);
if (filterFriendlyPieces) {
moves = this.filterFriendlyPieces(moves);
}
return moves;
}
}
|
nathanrpage97/libfort | src/wcwidth.h | <reponame>nathanrpage97/libfort<gh_stars>100-1000
#ifndef WCWIDTH_H
#define WCWIDTH_H
#include "fort_utils.h"
#ifdef FT_HAVE_WCHAR
#include <wchar.h>
FT_INTERNAL
int mk_wcswidth(const wchar_t *pwcs, size_t n);
#endif /* FT_HAVE_WCHAR */
#endif /* WCWIDTH_H */
|
Matt-Crow/JavaUtilities | SimpleChatApp/src/main/java/net/ServerMessage.java | package net;
/**
*
* @author Matt
*/
public class ServerMessage {
}
|
Rai220/Telephoto | app/src/main/java/com/rai220/securityalarmbot/commands/AbstractCommand.java | <gh_stars>10-100
package com.rai220.securityalarmbot.commands;
import com.pengrad.telegrambot.model.Message;
import com.rai220.securityalarmbot.BotService;
import com.rai220.securityalarmbot.telegram.TelegramService;
import java.util.Collection;
/**
*
*/
public abstract class AbstractCommand implements ICommand {
protected final BotService botService;
protected final TelegramService telegramService;
private boolean isEnable = true;
private boolean isHide = false;
public AbstractCommand(BotService service) {
botService = service;
telegramService = botService.getTelegramService();
}
@Override
public boolean isEnable() {
return isEnable;
}
@Override
public boolean isHide() {
return isHide;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ICommand other = (ICommand) o;
return this.getName().equals(other.getName());
}
@Override
public int hashCode() {
return getName().hashCode();
}
@Override
public Collection<ICommand> execute(Message message) {
return null;
}
public void setEnable(boolean enable) {
isEnable = enable;
}
public void setHide(boolean hide) {
isHide = hide;
}
}
|
chiragnagpal/probflow | tests/unit/pytorch/distributions/test_one_hot_categorical.py | import numpy as np
import pytest
import torch
from probflow.distributions import OneHotCategorical
tod = torch.distributions
def is_close(a, b, tol=1e-3):
return np.abs(a - b) < tol
def test_OneHotCategorical():
"""Tests OneHotCategorical distribution"""
# Create the distribution
dist = OneHotCategorical(probs=torch.tensor([0.1, 0.2, 0.7]))
# Check default params
assert dist.logits is None
assert isinstance(dist.probs, torch.Tensor)
# Call should return backend obj
assert isinstance(dist(), tod.one_hot_categorical.OneHotCategorical)
# Test methods
assert is_close(dist.prob(torch.tensor([1.0, 0, 0])).numpy(), 0.1)
assert is_close(dist.prob(torch.tensor([0, 1.0, 0])).numpy(), 0.2)
assert is_close(dist.prob(torch.tensor([0, 0, 1.0])).numpy(), 0.7)
"""
# Mean should return the mode!
mean = dist.mean().numpy()
assert mean.ndim == 1
assert mean.shape[0] == 3
"""
# Test sampling
samples = dist.sample()
assert isinstance(samples, torch.Tensor)
assert samples.ndim == 1
assert samples.shape[0] == 3
samples = dist.sample(10)
assert isinstance(samples, torch.Tensor)
assert samples.ndim == 2
assert samples.shape[0] == 10
assert samples.shape[1] == 3
# Should be able to set params
dist = OneHotCategorical(logits=torch.tensor([1, 7, 2]))
assert isinstance(dist.logits, torch.Tensor)
assert dist.probs is None
# But only with Tensor-like objs
with pytest.raises(TypeError):
dist = OneHotCategorical("lalala")
# Multi-dim
dist = OneHotCategorical(
probs=torch.tensor(
[
[0.1, 0.7, 0.2],
[0.8, 0.1, 0.1],
[0.01, 0.01, 0.98],
[0.3, 0.3, 0.4],
]
)
)
probs = dist.prob(
torch.tensor(
[
[0.0, 1.0, 0.0],
[1.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 0.0, 1.0],
]
)
)
assert is_close(probs[0], 0.7)
assert is_close(probs[1], 0.8)
assert is_close(probs[2], 0.01)
assert is_close(probs[3], 0.4)
# And ensure sample dims are correct
samples = dist.sample()
assert isinstance(samples, torch.Tensor)
assert samples.ndim == 2
assert samples.shape[0] == 4
assert samples.shape[1] == 3
samples = dist.sample(10)
assert isinstance(samples, torch.Tensor)
assert samples.ndim == 3
assert samples.shape[0] == 10
assert samples.shape[1] == 4
assert samples.shape[2] == 3
|
aviolette/foodtrucklocator | main/src/test/java/foodtruck/model/TrackingDeviceTest.java | package foodtruck.model;
import com.google.common.testing.EqualsTester;
import org.joda.time.DateTime;
import org.junit.Test;
/**
* @author aviolette
* @since 11/23/16
*/
public class TrackingDeviceTest {
@Test
public void equals() {
TrackingDevice.Builder builder = TrackingDevice.builder()
.label("label")
.deviceNumber("device number");
DateTime dt = new DateTime(2016, 11, 22, 0, 1);
Location location = Location.builder()
.name("FOO")
.lat(12)
.lng(-13)
.build();
new EqualsTester().addEqualityGroup(builder.build(), builder.build())
.addEqualityGroup(builder.degreesFromNorth(125)
.build(), builder.degreesFromNorth(125)
.build())
.addEqualityGroup(builder.atBlacklistedLocation(true)
.build(), builder.atBlacklistedLocation(true)
.build())
.addEqualityGroup(builder.fuelLevel("123")
.build(), builder.fuelLevel("123")
.build())
.addEqualityGroup(builder.batteryCharge("12.3v")
.build(), builder.batteryCharge("12.3v")
.build())
.addEqualityGroup(builder.key(123)
.build(), builder.key(123)
.build())
.addEqualityGroup(builder.lastBroadcast(dt)
.build(), builder.lastBroadcast(dt)
.build())
.addEqualityGroup(builder.lastLocation(location)
.build(), builder.lastLocation(location)
.build())
.addEqualityGroup(builder.lastModified(dt)
.build(), builder.lastModified(dt)
.build())
.addEqualityGroup(builder.truckOwnerId("foo")
.build(), builder.truckOwnerId("foo")
.build())
.addEqualityGroup(builder.lastActualLocation(location).build(),
builder.lastActualLocation(location).build())
.testEquals();
}
} |
7kbird/chrome | mojo/tools/run_mojo_python_tests.py | <gh_stars>0
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
import sys
import time
import unittest
def main():
parser = argparse.ArgumentParser()
parser.usage = 'run_mojo_python_tests.py [options] [tests...]'
parser.add_argument('-v', '--verbose', action='count', default=0)
parser.add_argument('--metadata', action='append', default=[],
help=('optional key=value metadata that will be stored '
'in the results files (can be used for revision '
'numbers, etc.)'))
parser.add_argument('--write-full-results-to', metavar='FILENAME',
action='store',
help='path to write the list of full results to.')
parser.add_argument('tests', nargs='*')
args = parser.parse_args()
bad_metadata = False
for val in args.metadata:
if '=' not in val:
print >> sys.stderr, ('Error: malformed metadata "%s"' % val)
bad_metadata = True
if bad_metadata:
print >> sys.stderr
parser.print_help()
return 2
chromium_src_dir = os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir)
loader = unittest.loader.TestLoader()
print "Running Python unit tests under mojo/public/tools/bindings/pylib ..."
pylib_dir = os.path.join(chromium_src_dir, 'mojo', 'public',
'tools', 'bindings', 'pylib')
if args.tests:
if pylib_dir not in sys.path:
sys.path.append(pylib_dir)
suite = unittest.TestSuite()
for test_name in args.tests:
suite.addTests(loader.loadTestsFromName(test_name))
else:
suite = loader.discover(pylib_dir, pattern='*_unittest.py')
runner = unittest.runner.TextTestRunner(verbosity=(args.verbose + 1))
result = runner.run(suite)
full_results = _FullResults(suite, result, args.metadata)
if args.write_full_results_to:
with open(args.write_full_results_to, 'w') as fp:
json.dump(full_results, fp, indent=2)
fp.write("\n")
return 0 if result.wasSuccessful() else 1
TEST_SEPARATOR = '.'
def _FullResults(suite, result, metadata):
"""Convert the unittest results to the Chromium JSON test result format.
This matches run-webkit-tests (the layout tests) and the flakiness dashboard.
"""
full_results = {}
full_results['interrupted'] = False
full_results['path_delimiter'] = TEST_SEPARATOR
full_results['version'] = 3
full_results['seconds_since_epoch'] = time.time()
for md in metadata:
key, val = md.split('=', 1)
full_results[key] = val
all_test_names = _AllTestNames(suite)
failed_test_names = _FailedTestNames(result)
full_results['num_failures_by_type'] = {
'FAIL': len(failed_test_names),
'PASS': len(all_test_names) - len(failed_test_names),
}
full_results['tests'] = {}
for test_name in all_test_names:
value = {}
value['expected'] = 'PASS'
if test_name in failed_test_names:
value['actual'] = 'FAIL'
value['is_unexpected'] = True
else:
value['actual'] = 'PASS'
_AddPathToTrie(full_results['tests'], test_name, value)
return full_results
def _AllTestNames(suite):
test_names = []
# _tests is protected pylint: disable=W0212
for test in suite._tests:
if isinstance(test, unittest.suite.TestSuite):
test_names.extend(_AllTestNames(test))
else:
test_names.append(test.id())
return test_names
def _FailedTestNames(result):
return set(test.id() for test, _ in result.failures + result.errors)
def _AddPathToTrie(trie, path, value):
if TEST_SEPARATOR not in path:
trie[path] = value
return
directory, rest = path.split(TEST_SEPARATOR, 1)
if directory not in trie:
trie[directory] = {}
_AddPathToTrie(trie[directory], rest, value)
if __name__ == '__main__':
sys.exit(main())
|
Limoentaart/PXL_IT | PXL_DIGITAL_JAAR_1/Programming Basics/Oplossingen Oefeningen/H6/oefening8/Oefening8.java | package oefening8;
import java.awt.Point;
import java.awt.Rectangle;
public class Oefening8 {
public static void main(String[] args) {
Rectangle rect = new Rectangle(1, 2, 20, 30);
Point punt = new Point(5, 5);
if (rect.contains(punt)) {
System.out.println("punt ligt in rect.");
} else {
System.out.println("punt ligt niet in rect.");
}
punt.translate(40, 50);
Rectangle rect2 = new Rectangle(punt);
rect2.setSize(100, 110);
System.out.println("X: " + rect2.x + " en Y: " + rect2.y);
if (rect2.contains(1, 2, 100, 110)) {
System.out.println("rect ligt in rect2.");
} else {
System.out.println("rect ligt niet in rect2.");
}
rect.grow(20, 40); // Voegt 20 en 40 toe, boven en onderaan (aka links en rechts)!!
if (rect.getHeight() == rect2.getHeight()) {
System.out.println("rect heeft dezelfde hoogte als rect2.");
} else {
System.out.println("rect heeft niet dezelfde hoogte als rect2.");
}
}
}
|
MirageEarl/activiti | modules/activiti-engine/src/main/java/org/activiti/engine/impl/juel/ExpressionNode.java | <filename>modules/activiti-engine/src/main/java/org/activiti/engine/impl/juel/ExpressionNode.java
/*
* Based on JUEL 2.2.1 code, 2006-2009 Odysseus Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.juel;
import org.activiti.engine.impl.javax.el.ELContext;
import org.activiti.engine.impl.javax.el.MethodInfo;
import org.activiti.engine.impl.javax.el.ValueReference;
/**
* Expression node interface. This interface provides all the methods needed for value expressions
* and method expressions.
*
* @see org.activiti.engine.impl.juel.Tree
* @author <NAME>
*/
public interface ExpressionNode extends Node {
/**
* @return <code>true</code> if this node represents literal text
*/
public boolean isLiteralText();
/**
* @return <code>true</code> if the subtree rooted at this node could be used as an lvalue
* expression (identifier or property sequence with non-literal prefix).
*/
public boolean isLeftValue();
/**
* @return <code>true</code> if the subtree rooted at this node is a method invocation.
*/
public boolean isMethodInvocation();
/**
* Evaluate node.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @param expectedType
* result type
* @return evaluated node, coerced to the expected type
*/
public Object getValue(Bindings bindings, ELContext context, Class<?> expectedType);
/**
* Get value reference.
*
* @param bindings
* @param context
* @return value reference
*/
public ValueReference getValueReference(Bindings bindings, ELContext context);
/**
* Get the value type accepted in {@link #setValue(Bindings, ELContext, Object)}.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @return accepted type or <code>null</code> for non-lvalue nodes
*/
public Class<?> getType(Bindings bindings, ELContext context);
/**
* Determine whether {@link #setValue(Bindings, ELContext, Object)} will throw a
* {@link org.activiti.engine.impl.javax.el.PropertyNotWritableException}.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @return <code>true</code> if this a read-only expression node
*/
public boolean isReadOnly(Bindings bindings, ELContext context);
/**
* Assign value.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @param value
* value to set
*/
public void setValue(Bindings bindings, ELContext context, Object value);
/**
* Get method information. If this is a non-lvalue node, answer <code>null</code>.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @param returnType
* expected method return type (may be <code>null</code> meaning don't care)
* @param paramTypes
* expected method argument types
* @return method information or <code>null</code>
*/
public MethodInfo getMethodInfo(Bindings bindings, ELContext context, Class<?> returnType, Class<?>[] paramTypes);
/**
* Invoke method.
*
* @param bindings
* bindings containing variables and functions
* @param context
* evaluation context
* @param returnType
* expected method return type (may be <code>null</code> meaning don't care)
* @param paramTypes
* expected method argument types
* @param paramValues
* parameter values
* @return result of the method invocation
*/
public Object invoke(Bindings bindings, ELContext context, Class<?> returnType, Class<?>[] paramTypes, Object[] paramValues);
/**
* Get the canonical expression string for this node. Variable and funtion names will be
* replaced in a way such that two expression nodes that have the same node structure and
* bindings will also answer the same value here.
* <p/>
* For example, <code>"${foo:bar()+2*foobar}"</code> may lead to
* <code>"${<fn>() + 2 * <var>}"</code> if <code>foobar</code> is a bound variable.
* Otherwise, the structural id would be <code>"${<fn>() + 2 * foobar}"</code>.
* <p/>
* If the bindings is <code>null</code>, the full canonical subexpression is returned.
*/
public String getStructuralId(Bindings bindings);
}
|
ahenson/flatworm | flatworm-core/src/main/java/com/blackbear/flatworm/BeanMappingStrategy.java | <reponame>ahenson/flatworm
/*
* Flatworm - A Java Flat File Importer/Exporter Copyright (C) 2004 <NAME>.
* Extended by <NAME>rence 2005
* Extended by <NAME> in 2011 and 2012
* Extended by <NAME> in 2016
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.blackbear.flatworm;
import com.blackbear.flatworm.config.ConversionOptionBO;
import com.blackbear.flatworm.errors.FlatwormParserException;
import java.util.Map;
/**
* Provides the ability to specify how the raw data parsed from the input file is to be mapped into the beans.
*
* @author <NAME>
*/
public interface BeanMappingStrategy {
void mapBean(Object bean, String beanName, String property, Object value,
Map<String, ConversionOptionBO> conv) throws FlatwormParserException;
}
|
bethyd/Peace-Of-Mind | src/renderer/ChunkRenderer.cpp | <gh_stars>0
#include "ChunkRenderer.hpp"
ChunkRenderer::ChunkRenderer()
{
m_texture = TextureManager::get().getTexture("atlas.png");
}
void ChunkRenderer::addChunk(const ChunkSection& chunk)
{
auto& meshes = chunk.getMeshes();
if (meshes.liquid.getIndicesCount() > 0)
{
m_liquidMeshes.push_back(&meshes.liquid);
}
if (meshes.solid.getIndicesCount() > 0)
{
m_solidMeshes.push_back(&meshes.solid);
}
}
void ChunkRenderer::renderSolid()
{
m_texture->bind();
for (auto mesh : m_solidMeshes)
{
mesh->bind();
glDrawElements(GL_TRIANGLES, mesh->getIndicesCount(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
}
m_solidMeshes.clear();
}
void ChunkRenderer::renderLiquid()
{
glDisable(GL_CULL_FACE);
glEnable(GL_BLEND);
for (auto mesh : m_liquidMeshes)
{
mesh->bind();
glDrawElements(GL_TRIANGLES, mesh->getIndicesCount(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
}
m_liquidMeshes.clear();
glDisable(GL_BLEND);
glEnable(GL_CULL_FACE);
}
|
garmin/Wildcard_Plugin | Godeps/_workspace/src/github.com/cloudfoundry/cli/cf/app_files/fakes/fake_app_files.go | <gh_stars>10-100
// This file was generated by counterfeiter
package fakes
import (
. "github.com/cloudfoundry/cli/cf/app_files"
"github.com/cloudfoundry/cli/cf/models"
"sync"
)
type FakeAppFiles struct {
AppFilesInDirStub func(dir string) (appFiles []models.AppFileFields, err error)
appFilesInDirMutex sync.RWMutex
appFilesInDirArgsForCall []struct {
dir string
}
appFilesInDirReturns struct {
result1 []models.AppFileFields
result2 error
}
CopyFilesStub func(appFiles []models.AppFileFields, fromDir, toDir string) (err error)
copyFilesMutex sync.RWMutex
copyFilesArgsForCall []struct {
appFiles []models.AppFileFields
fromDir string
toDir string
}
copyFilesReturns struct {
result1 error
}
CountFilesStub func(directory string) int64
countFilesMutex sync.RWMutex
countFilesArgsForCall []struct {
directory string
}
countFilesReturns struct {
result1 int64
}
WalkAppFilesStub func(dir string, onEachFile func(string, string) error) (err error)
walkAppFilesMutex sync.RWMutex
walkAppFilesArgsForCall []struct {
dir string
onEachFile func(string, string) error
}
walkAppFilesReturns struct {
result1 error
}
}
func (fake *FakeAppFiles) AppFilesInDir(dir string) (appFiles []models.AppFileFields, err error) {
fake.appFilesInDirMutex.Lock()
defer fake.appFilesInDirMutex.Unlock()
fake.appFilesInDirArgsForCall = append(fake.appFilesInDirArgsForCall, struct {
dir string
}{dir})
if fake.AppFilesInDirStub != nil {
return fake.AppFilesInDirStub(dir)
} else {
return fake.appFilesInDirReturns.result1, fake.appFilesInDirReturns.result2
}
}
func (fake *FakeAppFiles) AppFilesInDirCallCount() int {
fake.appFilesInDirMutex.RLock()
defer fake.appFilesInDirMutex.RUnlock()
return len(fake.appFilesInDirArgsForCall)
}
func (fake *FakeAppFiles) AppFilesInDirArgsForCall(i int) string {
fake.appFilesInDirMutex.RLock()
defer fake.appFilesInDirMutex.RUnlock()
return fake.appFilesInDirArgsForCall[i].dir
}
func (fake *FakeAppFiles) AppFilesInDirReturns(result1 []models.AppFileFields, result2 error) {
fake.appFilesInDirReturns = struct {
result1 []models.AppFileFields
result2 error
}{result1, result2}
}
func (fake *FakeAppFiles) CopyFiles(appFiles []models.AppFileFields, fromDir string, toDir string) (err error) {
fake.copyFilesMutex.Lock()
defer fake.copyFilesMutex.Unlock()
fake.copyFilesArgsForCall = append(fake.copyFilesArgsForCall, struct {
appFiles []models.AppFileFields
fromDir string
toDir string
}{appFiles, fromDir, toDir})
if fake.CopyFilesStub != nil {
return fake.CopyFilesStub(appFiles, fromDir, toDir)
} else {
return fake.copyFilesReturns.result1
}
}
func (fake *FakeAppFiles) CopyFilesCallCount() int {
fake.copyFilesMutex.RLock()
defer fake.copyFilesMutex.RUnlock()
return len(fake.copyFilesArgsForCall)
}
func (fake *FakeAppFiles) CopyFilesArgsForCall(i int) ([]models.AppFileFields, string, string) {
fake.copyFilesMutex.RLock()
defer fake.copyFilesMutex.RUnlock()
return fake.copyFilesArgsForCall[i].appFiles, fake.copyFilesArgsForCall[i].fromDir, fake.copyFilesArgsForCall[i].toDir
}
func (fake *FakeAppFiles) CopyFilesReturns(result1 error) {
fake.copyFilesReturns = struct {
result1 error
}{result1}
}
func (fake *FakeAppFiles) CountFiles(directory string) int64 {
fake.countFilesMutex.Lock()
defer fake.countFilesMutex.Unlock()
fake.countFilesArgsForCall = append(fake.countFilesArgsForCall, struct {
directory string
}{directory})
if fake.CountFilesStub != nil {
return fake.CountFilesStub(directory)
} else {
return fake.countFilesReturns.result1
}
}
func (fake *FakeAppFiles) CountFilesCallCount() int {
fake.countFilesMutex.RLock()
defer fake.countFilesMutex.RUnlock()
return len(fake.countFilesArgsForCall)
}
func (fake *FakeAppFiles) CountFilesArgsForCall(i int) string {
fake.countFilesMutex.RLock()
defer fake.countFilesMutex.RUnlock()
return fake.countFilesArgsForCall[i].directory
}
func (fake *FakeAppFiles) CountFilesReturns(result1 int64) {
fake.countFilesReturns = struct {
result1 int64
}{result1}
}
func (fake *FakeAppFiles) WalkAppFiles(dir string, onEachFile func(string, string) error) (err error) {
fake.walkAppFilesMutex.Lock()
defer fake.walkAppFilesMutex.Unlock()
fake.walkAppFilesArgsForCall = append(fake.walkAppFilesArgsForCall, struct {
dir string
onEachFile func(string, string) error
}{dir, onEachFile})
if fake.WalkAppFilesStub != nil {
return fake.WalkAppFilesStub(dir, onEachFile)
} else {
return fake.walkAppFilesReturns.result1
}
}
func (fake *FakeAppFiles) WalkAppFilesCallCount() int {
fake.walkAppFilesMutex.RLock()
defer fake.walkAppFilesMutex.RUnlock()
return len(fake.walkAppFilesArgsForCall)
}
func (fake *FakeAppFiles) WalkAppFilesArgsForCall(i int) (string, func(string, string) error) {
fake.walkAppFilesMutex.RLock()
defer fake.walkAppFilesMutex.RUnlock()
return fake.walkAppFilesArgsForCall[i].dir, fake.walkAppFilesArgsForCall[i].onEachFile
}
func (fake *FakeAppFiles) WalkAppFilesReturns(result1 error) {
fake.walkAppFilesReturns = struct {
result1 error
}{result1}
}
var _ AppFiles = new(FakeAppFiles)
|
DreamLi1314/shared | java/v3/pushweather/src/main/java/org/javafamily/model/Weather.java | <reponame>DreamLi1314/shared<gh_stars>0
package org.javafamily.model;
import java.io.Serializable;
public class Weather implements Serializable {
private WeatherItem[] weather;
public WeatherItem[] getWeather() {
return weather;
}
public void setWeather(WeatherItem[] weather) {
this.weather = weather;
}
}
|
Nuvoloso/gui_open_source | slog/src/reducers/accounts.js | <gh_stars>0
// Copyright 2019 <NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { getErrorMsg } from '../components/utils';
import * as types from '../actions/types';
/**
* Account shape is
* accounts: Array of Account objects as defined by the API
* error: Current error message
* loading: boolean to indicate if operation is in progress
* settingDomain: boolean indicating if operation is in progress
* domainSetId: name of the domain id that the acccount was set to
*/
export const initialState = {
accounts: [],
domainSetId: '',
error: null,
loading: false,
settingDomain: false,
settingPolicy: false,
snapshotCatalogDomainId: '',
};
/**
* Normalize account data returned from the REST API
* "description" will be set to "" if not returned
* "disabled" will be set if false or not returned
* @param { array } accounts - Accounts configured in system
* @returns { array } - Normalized array of accounts
*/
export function normalize(accounts) {
const normalized = [];
if (!accounts) {
return normalized;
}
accounts.forEach(account => {
const {
description = '',
disabled = false,
meta,
name,
protectionDomains,
snapshotCatalogPolicy,
tags,
tenantAccountId,
userRoles = {},
} = account || {};
normalized.push({
description,
disabled,
meta,
name,
protectionDomains,
snapshotCatalogPolicy,
tags,
tenantAccountId,
userRoles,
});
});
return normalized;
}
export default function(state = initialState, action) {
switch (action.type) {
case types.GET_ACCOUNTS_REQUEST:
return {
...state,
error: null,
loading: true,
};
case types.GET_ACCOUNTS_FAILURE:
return {
...state,
accounts: [],
error: getErrorMsg(action.error),
loading: false,
};
case types.GET_ACCOUNTS_SUCCESS:
return {
...state,
accounts: normalize(action.payload),
error: null,
loading: false,
};
case types.POST_ACCOUNTS_REQUEST:
return {
...state,
loading: true,
error: null,
};
case types.POST_ACCOUNTS_FAILURE:
return {
...state,
error: getErrorMsg(action.error),
loading: false,
};
case types.POST_ACCOUNTS_SUCCESS:
return {
...state,
accounts: [...state.accounts, action.payload],
error: null,
loading: false,
};
case types.DELETE_ACCOUNTS_REQUEST:
return {
...state,
loading: true,
error: null,
};
case types.DELETE_ACCOUNTS_FAILURE: {
const errorMessage = `Error deleting ${
action.payload.failedDeletes.length === 1
? 'Account'
: action.payload.failedDeletes.length + ' Accounts'
}`;
const failedDeletes = action.payload.failedDeletes;
const allFailures = failedDeletes.reduce(
(failedMessages, failure, idx) =>
failedMessages +
` "${failure.name}": ${failure.message} ${idx !== failedDeletes.length - 1 ? ',' : ''} `,
''
);
return {
...state,
error: errorMessage.concat(allFailures),
loading: false,
};
}
case types.DELETE_ACCOUNTS_SUCCESS:
return {
...state,
accounts: state.accounts.filter(a => !action.payload.ids.includes(a.meta.id)),
error: null,
loading: false,
};
case types.UPDATE_ACCOUNTS_REQUEST:
return {
...state,
loading: true,
error: null,
};
case types.UPDATE_ACCOUNTS_FAILURE:
return {
...state,
error: getErrorMsg(action.error),
loading: false,
};
case types.UPDATE_ACCOUNTS_SUCCESS:
return {
...state,
accounts: state.accounts.map(acct => {
if (acct.meta.id === action.payload.meta.id) {
return action.payload;
} else {
return acct;
}
}),
error: null,
loading: false,
};
case types.POST_ACCOUNT_PROTECTION_DOMAIN_SET_REQUEST:
return {
...state,
settingDomain: true,
error: null,
domainSetId: '',
};
case types.POST_ACCOUNT_PROTECTION_DOMAIN_SET_FAILURE:
return {
...state,
settingDomain: false,
error: getErrorMsg(action.error),
domainSetId: '',
};
case types.POST_ACCOUNT_PROTECTION_DOMAIN_SET_SUCCESS: {
const { protectionDomain } = action;
const { meta } = protectionDomain || {};
const { id } = meta || {};
return {
...state,
error: null,
settingDomain: false,
domainSetId: id,
};
}
case types.POST_ACCOUNT_SNAPSHOT_POLICY_SET_REQUEST:
return {
...state,
settingPolicy: true,
error: null,
};
case types.POST_ACCOUNT_SNAPSHOT_POLICY_FAILURE:
return {
...state,
settingPolicy: false,
error: getErrorMsg(action.error),
};
case types.POST_ACCOUNT_SNAPSHOT_POLICY_SUCCESS: {
return {
...state,
accounts: state.accounts.map(acct => {
if (acct.meta.id === action.payload.meta.id) {
return action.payload;
} else {
return acct;
}
}),
error: null,
settingPolicy: false,
};
}
default:
return state;
}
}
|
lizhifuabc/spring-boot-learn | spring-boot-design/design-adapter/src/main/java/com/design/adapter/classtype/Test.java | <reponame>lizhifuabc/spring-boot-learn
package com.design.adapter.classtype;
/**
* 从代码中我们可以看到,其实适配器做的主要工作就是为了让目标角色的API可以调用到源角色的API,
* 适配器在中间做的是一个类似的中转作用,并且不影响源角色和目标角色原有的功能和逻辑。
* 总结:Adapter类,通过继承源类,实现目标类接口,完成源类->目标角色的适配
* @author lizhifu
* @date 2020/12/28
*/
public class Test {
public static void main(String[] args) {
DC5V dc5V = new Adapter();
int dc5 = dc5V.dc5v();
System.out.println("转换后的电压为:" + dc5 + " 伏...");
}
}
|
alaasamameer/micro-workflow-dir | module-mwf/src/org/cvpv/KafkaaProducer.java | <filename>module-mwf/src/org/cvpv/KafkaaProducer.java<gh_stars>0
/*
*
* First version in 2017.
* vesrion 5-light (2020).
* '$Author: <NAME> $'
* <EMAIL>
*
*/
package org.ameer.kafka;
import ptolemy.actor.Director;
import ptolemy.actor.TypedIOPort;
import ptolemy.actor.lib.LimitedFiringSource;
import ptolemy.actor.parameters.PortParameter;
import ptolemy.data.RecordToken;
import ptolemy.data.StringToken;
import ptolemy.data.type.BaseType;
import ptolemy.kernel.CompositeEntity;
import ptolemy.kernel.util.IllegalActionException;
import ptolemy.kernel.util.NameDuplicationException;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;
/**
* The KafkaaProducer actor receives data as Record of tokens from input port,
* serializes the data as an Avro message and publishes it into the Kafka message queue
* Also the schema registry server provided by Confluent must be used.
* Also it able to check if the related environment parameters in the host are set to values,
* if yes, so set them to the variable in Actor,
* or take the values from the GUI.
* the environment parameters as follow:
* OUT_KAFKA_SERVER
* OUT_SCHEMA_REG_SERVER
* OUT_TOPIC
* OUT_AVRO_SCHEMA
* OUT_PARMS_TO_READ // it can be sets of parameters separated by spaces
*
* @author <NAME>
*/
public class KafkaaProducer extends LimitedFiringSource {
///////////////////////////////////////////////////////////////////
//// public methods ////
public KafkaaProducer(CompositeEntity container, String name)
throws NameDuplicationException, IllegalActionException {
super(container, name);
P_checkCallProp = true;
input = new TypedIOPort(this, "input", true, false);
input.setTypeEquals(BaseType.RECORD);
P_KafkaAddressInputParam = new PortParameter(this, "Kafka Address and Port ");
P_SchemaRegistryInputParam = new PortParameter(this, "Schema Registry Address and Port ");
P_TopicParam = new PortParameter(this, "Topic To Subscribe ");
P_SchemaParam = new PortParameter(this, "Schema ");
P_ParametersToReadParam = new PortParameter(this, "Parameters To Read ");
P_KafkaAddressInputParam.setStringMode(true);
P_KafkaAddressInputParam.setTypeEquals(BaseType.STRING);
P_KafkaAddressInputParam.getPort().setTypeEquals(BaseType.STRING);
P_KafkaAddressInputParam.addChoice("192.168.1.2:9092");
P_KafkaAddressInputParam.addChoice("localhost:9092");
P_SchemaRegistryInputParam.setStringMode(true);
P_SchemaRegistryInputParam.setTypeEquals(BaseType.STRING);
P_SchemaRegistryInputParam.getPort().setTypeEquals(BaseType.STRING);
P_SchemaRegistryInputParam.addChoice("http://192.168.1.2:8081");
P_SchemaRegistryInputParam.addChoice("http://localhost:8081");
P_TopicParam.setStringMode(true);
P_TopicParam.setTypeEquals(BaseType.STRING);
P_TopicParam.getPort().setTypeEquals(BaseType.STRING);
P_TopicParam.addChoice("Kepler-customer-avro");
P_TopicParam.addChoice("test-avro");
P_SchemaParam.setStringMode(true);
P_SchemaParam.setTypeEquals(BaseType.STRING);
P_SchemaParam.getPort().setTypeEquals(BaseType.STRING);
P_SchemaParam.addChoice("none");
P_ParametersToReadParam.setStringMode(true);
P_ParametersToReadParam.setTypeEquals(BaseType.STRING);
P_ParametersToReadParam.getPort().setTypeEquals(BaseType.STRING);
P_ParametersToReadParam.addChoice("first_name last_name age height weight automated_email");
}
@Override
public void fire() throws IllegalActionException {
super.fire();
Director director = getDirector();
if (director == null) {
throw new IllegalActionException(this, "No director!");
}
if (P_checkCallProp)
callProp();
String [] getVaribleFromString = _p_parametersToRead.trim().split("\\s+");
String myTopic=_p_topic;
if (input.hasToken(0)) {
RecordToken in_record = (RecordToken) input.get(0);
GenericRecordBuilder sendRecordBuilder = new GenericRecordBuilder(_p_schema);
for (int i =0 ; i < getVaribleFromString.length;i++)
{
if (in_record.get(getVaribleFromString[i])==null)
{
System.out.println("No such field in record: "+getVaribleFromString[i]);
}else {
String check=nameOf(in_record.get(getVaribleFromString[i]));
switch (check) {
case "StringToken":
{
sendRecordBuilder.set(getVaribleFromString[i], in_record.get(getVaribleFromString[i]).toString());
}
break;
case "IntToken":
{
Integer value = Integer.parseInt( in_record.get(getVaribleFromString[i]).toString());
sendRecordBuilder.set(getVaribleFromString[i], value );
}
break;
case "LongToken":
{
Long value = Long.valueOf( in_record.get(getVaribleFromString[i]).toString().replace("L", ""));
sendRecordBuilder.set(getVaribleFromString[i], value );
}
break;
case "DoubleToken":
{
Double value = Double.parseDouble( in_record.get(getVaribleFromString[i]).toString());
sendRecordBuilder.set(getVaribleFromString[i], value );
}
break;
case "FloatToken":
{
Float value = Float.parseFloat( in_record.get(getVaribleFromString[i]).toString());
sendRecordBuilder.set(getVaribleFromString[i], value );
}
break;
case "BooleanToken":
{
Boolean value = Boolean.parseBoolean( in_record.get(getVaribleFromString[i]).toString());
sendRecordBuilder.set(getVaribleFromString[i], value );
}
break;
default:
{
}
break;
}
}
}
// Appending the sending timestamp
sendRecordBuilder.set("k_send_ts", System.currentTimeMillis());
GenericData.Record sendRecord = sendRecordBuilder.build();
ProducerRecord<String, GenericRecord> producerRecord = new ProducerRecord<String, GenericRecord>(
myTopic,sendRecord
);
_p_kafkaProducer.send(producerRecord, new Callback() {
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (e == null) {
System.out.println("Success!");
} else {
e.printStackTrace();
}
}
});
_p_kafkaProducer.flush();
}
}
///////////////////////////////////////////////////////////////////
//// ports and parameters ////
/** Ip and port for Kafka server. */
private static PortParameter P_KafkaAddressInputParam;
/** Url and port for Schema registry server. */
private static PortParameter P_SchemaRegistryInputParam;
/** Kafka topic to publishes the data. */
private static PortParameter P_TopicParam;
/** The schema */
private static PortParameter P_SchemaParam;
/** A set of parameters which reflects the fields names required to read from each incoming record */
private static PortParameter P_ParametersToReadParam;
TypedIOPort input;
private static boolean P_checkCallProp;
private static String _p_kafkaUrlAndPort = "";
private static String _p_schemaRegistryUrlAndPort = "";
private static String _p_topic = "";
private static String _p_schemaStr = "";
private static String _p_parametersToRead = "";
private static Schema.Parser _p_parser;
private static Schema _p_schema;
private static Properties _p_properties = new Properties();
private static KafkaProducer<String, GenericRecord> _p_kafkaProducer;
///////////////////////////////////////////////////////////////////
//// private methods ////
// Return the type of Object
private static String nameOf(Object o) {
return o.getClass().getSimpleName();
}
// Set variable value and Kafka properties
private static void callProp () throws IllegalActionException {
/*
* This section, check if the related environment parameters in the host are set to values, if yes,
* so set them to the variable in Actor, or take the values from the GUI.
*/
//---------------------------------------------
if (System.getenv("OUT_KAFKA_SERVER")!=null)
{
if (!System.getenv("OUT_KAFKA_SERVER").isEmpty())
{
_p_kafkaUrlAndPort = System.getenv("OUT_KAFKA_SERVER").toString();
}
else
{
_p_kafkaUrlAndPort = ((StringToken)P_KafkaAddressInputParam.getToken()).stringValue();
}
}else {
_p_kafkaUrlAndPort = ((StringToken)P_KafkaAddressInputParam.getToken()).stringValue();
}
if (System.getenv("OUT_SCHEMA_REG_SERVER")!=null)
{
if (!System.getenv("OUT_SCHEMA_REG_SERVER").isEmpty())
{
_p_schemaRegistryUrlAndPort = System.getenv("OUT_SCHEMA_REG_SERVER").toString();
}
else
{
_p_schemaRegistryUrlAndPort = ((StringToken)P_SchemaRegistryInputParam.getToken()).stringValue();
}
}else {
_p_schemaRegistryUrlAndPort = ((StringToken)P_SchemaRegistryInputParam.getToken()).stringValue();
}
if (System.getenv("OUT_TOPIC")!=null)
{
if (!System.getenv("OUT_TOPIC").isEmpty())
{
_p_topic = System.getenv("OUT_TOPIC").toString();
}
else
{
_p_topic = ((StringToken)P_TopicParam.getToken()).stringValue();
}
}else {
_p_topic = ((StringToken)P_TopicParam.getToken()).stringValue();
}
if (System.getenv("OUT_AVRO_SCHEMA")!=null)
{
if (!System.getenv("OUT_AVRO_SCHEMA").isEmpty())
{
_p_schemaStr = System.getenv("OUT_AVRO_SCHEMA").toString();
}
else
{
_p_schemaStr = ((StringToken)P_SchemaParam.getToken()).stringValue();
}
}else {
_p_schemaStr = ((StringToken)P_SchemaParam.getToken()).stringValue();
}
if (System.getenv("OUT_PARMS_TO_READ")!=null)
{
if (!System.getenv("OUT_PARMS_TO_READ").isEmpty())
{
_p_parametersToRead = System.getenv("OUT_PARMS_TO_READ").toString();
}
else
{
_p_parametersToRead = ((StringToken)P_ParametersToReadParam.getToken()).stringValue();
}
}else {
_p_parametersToRead = ((StringToken)P_ParametersToReadParam.getToken()).stringValue();
}
_p_properties.setProperty("bootstrap.servers",_p_kafkaUrlAndPort);
_p_properties.setProperty("acks","all");
_p_properties.setProperty("retries","0");
_p_properties.setProperty("key.serializer", StringSerializer.class.getName());
_p_properties.setProperty("value.serializer", KafkaAvroSerializer.class.getName());
_p_properties.setProperty("schema.registry.url",_p_schemaRegistryUrlAndPort);
_p_parser = new Schema.Parser();
_p_schema = _p_parser.parse(_p_schemaStr);
_p_kafkaProducer=new KafkaProducer<String, GenericRecord>(_p_properties);
P_checkCallProp = false;
}
} |
blitz3d-ng/blitz3d-ng | src/modules/bb/audio/commands.h | <gh_stars>10-100
#ifndef BB_AUDIO_COMMANDS_H
#define BB_AUDIO_COMMANDS_H
#include <bb/blitz/commands.h>
#ifdef __cplusplus
extern "C" {
#else
typedef void BBSound;
typedef void BBChannel;
#endif
BBSound * BBCALL bbLoadSound( BBStr *file );
BBSound * BBCALL bbLoad3DSound( BBStr *file );
void BBCALL bbFreeSound( BBSound *sound );
BBChannel * BBCALL bbPlaySound( BBSound *sound );
void BBCALL bbLoopSound( BBSound *sound );
void BBCALL bbSoundPitch( BBSound *sound,int pitch );
void BBCALL bbSoundVolume( BBSound *sound,float volume );
void BBCALL bbSoundPan( BBSound *sound,float pan );
BBChannel * BBCALL bbPlayMusic( BBStr *s );
BBChannel * BBCALL bbPlayCDTrack( int track,int mode );
void BBCALL bbStopChannel( BBChannel *channel );
void BBCALL bbPauseChannel( BBChannel *channel );
void BBCALL bbResumeChannel( BBChannel *channel );
void BBCALL bbChannelPitch( BBChannel *channel,int pitch );
void BBCALL bbChannelVolume( BBChannel *channel,float volume );
void BBCALL bbChannelPan( BBChannel *channel,float pan );
int BBCALL bbChannelPlaying( BBChannel *channel );
#ifdef __cplusplus
}
#endif
#endif
|
sjro/enact | packages/ui/internal/IdProvider/IdProvider.js | <gh_stars>0
import hoc from '@enact/core/hoc';
import React from 'react';
import useId from './useId';
/**
* Default config for {@link ui/IdProvider.IdProvider}
*
* @hocconfig
* @memberof ui/IdProvider.IdProvider
*/
const defaultConfig = {
/**
* Prop to pass the identifier generation function
*
* @type {String}
* @default generateId
* @memberof ui/IdProvider.IdProvider.defaultConfig
*/
generateProp: 'generateId',
/**
* Prop to pass the identifier
*
* @type {String}
* @default id
* @memberof ui/IdProvider.IdProvider.defaultConfig
*/
idProp: 'id',
/**
* Optional prefix for the identifier
*
* @type {String}
* @default 'c_'
* @memberof ui/IdProvider.IdProvider.defaultConfig
*/
prefix: 'c_'
};
/**
* A higher-order component that generates globally-unique identifiers
*
* @class IdProvider
* @hoc
* @private
* @memberof ui/IdProvider
*/
const IdProvider = hoc(defaultConfig, (config, Wrapped) => {
const {generateProp, idProp, prefix} = config;
// eslint-disable-next-line no-shadow
function IdProvider (props) {
const updated = {...props};
const {generateId} = useId({prefix});
if (generateProp) {
updated[generateProp] = generateId;
}
if (idProp && !updated[idProp]) {
updated[idProp] = generateId();
}
return (
<Wrapped {...updated} />
);
}
return IdProvider;
});
export default IdProvider;
export {
IdProvider,
useId
};
|
chlorm-forks/gyp | test/linux/gyptest-ldflags-from-environment.py | <reponame>chlorm-forks/gyp
#!/usr/bin/env python
# Copyright (c) 2017 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies the use of linker flags in environment variables.
In this test, gyp and build both run in same local environment.
"""
import TestGyp
import re
import subprocess
import sys
FORMATS = ('make', 'ninja')
if sys.platform.startswith('linux'):
test = TestGyp.TestGyp(formats=FORMATS)
CHDIR = 'ldflags-from-environment'
with TestGyp.LocalEnv({'LDFLAGS': '-Wl,--dynamic-linker=/target',
'LDFLAGS_host': '-Wl,--dynamic-linker=/host',
'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', chdir=CHDIR)
def GetDynamicLinker(p):
p = test.built_file_path(p, chdir=CHDIR)
r = re.compile(r'\[Requesting program interpreter: ([^\]]+)\]')
proc = subprocess.Popen(['readelf', '-l', p], stdout=subprocess.PIPE)
o = proc.communicate()[0].decode('utf-8')
assert not proc.returncode
return r.search(o).group(1)
if GetDynamicLinker('ldflags') != '/target':
test.fail_test()
if GetDynamicLinker('ldflags_host') != '/host':
test.fail_test()
test.pass_test()
|
mycolab/ncbi-blast | blast/src/serial/datatool/xsdparser.hpp | <reponame>mycolab/ncbi-blast
#ifndef XSDPARSER_HPP
#define XSDPARSER_HPP
/* $Id: xsdparser.hpp 547822 2017-10-04 15:45:27Z gouriano $
* ===========================================================================
*
* PUBLIC DOMAIN NOTICE
* National Center for Biotechnology Information
*
* This software/database is a "United States Government Work" under the
* terms of the United States Copyright Act. It was written as part of
* the author's official duties as a United States Government employee and
* thus cannot be copyrighted. This software/database is freely available
* to the public for use. The National Library of Medicine and the U.S.
* Government have not placed any restriction on its use or reproduction.
*
* Although all reasonable efforts have been taken to ensure the accuracy
* and reliability of the software and data, the NLM and the U.S.
* Government do not and cannot warrant the performance or results that
* may be obtained by using this software or data. The NLM and the U.S.
* Government disclaim all warranties, express or implied, including
* warranties of performance, merchantability or fitness for any particular
* purpose.
*
* Please cite the author in any work or product based on this material.
*
* ===========================================================================
*
* Author: <NAME>
*
* File Description:
* XML Schema parser
*
* ===========================================================================
*/
#include <corelib/ncbiutil.hpp>
#include "dtdparser.hpp"
#include "xsdlexer.hpp"
BEGIN_NCBI_SCOPE
/////////////////////////////////////////////////////////////////////////////
// XSDParser
class XSDParser : public DTDParser
{
public:
XSDParser( XSDLexer& lexer);
virtual ~XSDParser(void);
enum EElementNamespace {
eUnknownNamespace,
eSchemaNamespace,
eWsdlNamespace,
eSoapNamespace
};
protected:
virtual void BeginDocumentTree(void) override;
virtual void BuildDocumentTree(CDataTypeModule& module) override;
void Reset(void);
TToken GetNextToken(void);
EElementNamespace GetElementNamespace(const string& prefix);
bool IsAttribute(const char* att) const;
bool IsValue(const char* value) const;
bool DefineElementType(DTDElement& node);
bool DefineAttributeType(DTDAttribute& att);
void ParseHeader(void);
void ParseInclude(void);
void ParseImport(void);
void ParseAnnotation(void);
void ParseDocumentation(void);
void ParseAppInfo(void);
TToken GetRawAttributeSet(void);
bool GetAttribute(const string& att);
void SkipContent();
DTDElement::EOccurrence ParseMinOccurs( DTDElement& node, DTDElement::EOccurrence occNow);
DTDElement::EOccurrence ParseMaxOccurs( DTDElement& node, DTDElement::EOccurrence occNow);
string ParseElementContent(DTDElement* owner, int emb);
string ParseGroup(DTDElement* owner, int emb);
void ParseGroupRef(DTDElement& node);
bool ParseContent(DTDElement& node, bool extended=false);
void ParseContainer(DTDElement& node);
void ParseComplexType(DTDElement& node);
void ParseSimpleType(DTDElement& node);
void ParseSimpleContent(DTDElement& node);
void ParseExtension(DTDElement& node);
void ParseRestriction(DTDElement& node);
void ParseFacet(DTDElement& node, TToken tok);
void ParseEnumeration(DTDElement& node);
void ParseAttribute(DTDElement& node);
void ParseAttributeGroup(DTDElement& node);
void ParseAttributeGroupRef(DTDElement& node);
void ParseAny(DTDElement& node);
void ParseUnion(DTDElement& node);
void ParseList(DTDElement& node);
string ParseAttributeContent(void);
void ParseContent(DTDAttribute& att);
void ParseExtension(DTDAttribute& att);
void ParseRestriction(DTDAttribute& att);
void ParseEnumeration(DTDAttribute& att);
void ParseUnion(DTDAttribute& att);
void ParseList(DTDAttribute& att);
string CreateTmpEmbeddedName(const string& name, int emb);
string CreateEntityId( const string& name,DTDEntity::EType type,
const string* prefix=NULL);
void CreateTypeDefinition(DTDEntity::EType type);
void ParseTypeDefinition(DTDEntity& ent);
void ProcessNamedTypes(void);
void BeginScope(DTDEntity* ent);
void EndScope(void);
virtual DTDEntity* PushEntityLexer(const string& name) override;
virtual bool PopEntityLexer(void) override;
virtual AbstractLexer* CreateEntityLexer(
CNcbiIstream& in, const string& name, bool autoDelete=true) override;
#if defined(NCBI_DTDPARSER_TRACE)
virtual void PrintDocumentTree(void);
#endif
protected:
string m_Raw;
string m_Element;
string m_ElementPrefix;
string m_Attribute;
string m_AttributePrefix;
string m_Value;
string m_ValuePrefix;
map<string, pair< string,string > > m_RawAttributes;
map<string,string> m_PrefixToNamespace;
map<string,string> m_NamespaceToPrefix;
map<string,DTDAttribute> m_MapAttribute;
string m_TargetNamespace;
bool m_ElementFormDefault;
bool m_AttributeFormDefault;
private:
stack< map<string,string> > m_StackPrefixToNamespace;
stack< map<string,string> > m_StackNamespaceToPrefix;
stack<string> m_StackTargetNamespace;
stack<bool> m_StackElementFormDefault;
stack<bool> m_StackAttributeFormDefault;
set<string> m_EmbeddedNames;
bool m_ResolveTypes;
bool m_EnableNamespaceRedefinition;
};
END_NCBI_SCOPE
#endif // XSDPARSER_HPP
|
eldarion-client/scaife-viewer | sv_pdl/stats/__init__.py | import os
import json
from django.core.cache import cache
from django.contrib.humanize.templatetags.humanize import intword, intcomma
LIBRARY_STATS_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"library_stats.json"
)
def get_library_stats():
"""
Loads library stats as computed via `write_library_stats` management command.
In the future, these stats may be served up directly from `scaife-cts-api`
at the conclusion of indexing. For now, `scaife-viewer` seens to be the most
sensible place to compute the stats since it has the connection info for both
Nautilus and ElasticSearch.
"""
key = "library-stats"
library_stats = cache.get(key, None)
if library_stats is None:
data = json.load(open(LIBRARY_STATS_PATH))
library_stats = {
"works_count": intcomma(data["works_count"]),
"text_counts_total": intcomma(data["text_counts"]["total"]),
"text_counts_greek": intcomma(data["text_counts"]["grc"]),
"text_counts_latin": intcomma(data["text_counts"]["lat"]),
"word_counts_total": intword(data["word_counts"]["total"]).split(" ")[0],
"word_counts_greek": intword(data["word_counts"]["grc"]).split(" ")[0],
"word_counts_latin": intword(data["word_counts"]["lat"]).split(" ")[0]
}
cache.set(key, library_stats, None)
return library_stats
|
CPSC-410-Future-Gadget-Laboratory/dsl-project | src/main/java/cpsc/dlsproject/ast/statements/Var.java | <reponame>CPSC-410-Future-Gadget-Laboratory/dsl-project<filename>src/main/java/cpsc/dlsproject/ast/statements/Var.java
package cpsc.dlsproject.ast.statements;
import cpsc.dlsproject.ast.BaseAST;
import cpsc.dlsproject.ast.expressions.BinaryOperation;
import cpsc.dlsproject.ast.expressions.Expression;
import cpsc.dlsproject.ast.expressions.VarAccess;
import cpsc.dlsproject.ast.expressions.values.BooleanValue;
import cpsc.dlsproject.ast.expressions.values.NumberValue;
import cpsc.dlsproject.ast.expressions.values.StringValue;
public class Var extends BaseAST {
public String name;
public Expression expression;
private BinaryOperation handleOper(BinaryOperation operation, Expression expression) {
BinaryOperation oper = operation;
if (oper == null) {
oper = new BinaryOperation(tokenizer.getNext());
oper.lhs = expression;
} else {
oper.rhs = expression;
}
return oper;
}
private StringValue handleString() {
tokenizer.getAndCheckNext("\"");
StringValue stringValue = new StringValue(tokenizer.getNext());
tokenizer.getAndCheckNext("\"");
return stringValue;
}
private void expressionHandler() {
BinaryOperation operation = null;
Expression expression;
while (!tokenizer.checkNext().equals(";")) {
String token = tokenizer.checkNext();
if (token.matches("\"")) {
expression = handleString();
} else if (token.equals("true") || token.equals("false")) {
expression = new BooleanValue(Boolean.parseBoolean(tokenizer.getNext()));
} else if (token.matches("^\\((?=.)([+-]?([0-9]*)(\\.([0-9]+))?)\\)$") || token.matches("^[-+]?\\d+$")) {
expression = new NumberValue(Double.parseDouble(tokenizer.getNext()));
} else {
expression = new VarAccess(tokenizer.getNext());
}
operation = handleOper(operation, expression);
}
if (operation == null) {
System.out.println("Error in expression creation in AST. EXIT");
System.exit(0);
} else {
this.expression = operation;
}
}
public void parse() {
name = tokenizer.getNext();
if (tokenizer.checkNext().equals("String")) {
tokenizer.getNext();
if (tokenizer.checkNext().matches("\"") && tokenizer.checkAheadOfNext(3).equals(";")) {
expression = handleString();
} else {
expressionHandler();
}
} else if (tokenizer.checkNext().equals("Number")) {
tokenizer.getNext();
if (tokenizer.checkAheadOfNext(1).equals(";")) {
expression = new NumberValue(Double.parseDouble(tokenizer.getNext()));
} else {
expressionHandler();
}
} else if (tokenizer.checkNext().equals("Boolean")) {
tokenizer.getNext();
if (tokenizer.checkAheadOfNext(1).equals(";")) {
expression = new BooleanValue(Boolean.parseBoolean(tokenizer.getNext()));
} else {
expressionHandler();
}
}
tokenizer.getAndCheckNext(";");
}
}
|
disclearing/javaee-plugins | dUHC/src/main/java/me/javaee/uhc/command/commands/ScenarioCommand.java | <gh_stars>1-10
package me.javaee.uhc.command.commands;
import me.javaee.uhc.UHC;
import me.javaee.uhc.command.BaseCommand;
import me.javaee.uhc.menu.menu.ScenariosMenu;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import java.util.Arrays;
/*
* Copyright (c) 2017, <NAME>. All rights reserved.
*
* Do not redistribute without permission from the author.
*/
public class ScenarioCommand extends BaseCommand {
public ScenarioCommand() {
super("scenarios", Arrays.asList("scen", "scenario", "escenario", "escenarios"), false, true);
}
@Override
public void execute(CommandSender sender, Command command, String label, String[] args) {
Player player = (Player) sender;
new ScenariosMenu(player).open(player);
}
@Override
public String getDescription() {
return "Lists you the scenarios";
}
}
|
motomototv/telebot | cmd/stat/main.go | <filename>cmd/stat/main.go
package main
import (
"flag"
"fmt"
"os"
"os/signal"
"syscall"
"github.com/godcong/telebot/config"
"github.com/godcong/telebot/internal/client"
"github.com/godcong/telebot/log"
)
var path = flag.String("path", "bot.cfg", "default property path")
var chatid = flag.Int64("chatid", 0, "chat id")
func main() {
flag.Parse()
cfg, err := config.LoadConfig(*path)
if err != nil {
panic(err)
}
log.Debug = cfg.Debug
c, err := client.NewClient("user1", cfg)
if err != nil {
panic(err)
}
go c.Run()
fmt.Println("Bot is running")
members, err := c.SearchChatMembersByID(*chatid)
if err == nil {
fmt.Println("Group members:", len(members.Members), members.TotalCount)
for i := range members.Members {
fmt.Println("User:", members.Members[i].MemberID)
request, err := c.GetUserByID(members.Members[i].MemberID)
if err == nil {
fmt.Println("User:", request.Username, "joined chat:", -1102281440)
}
}
} else {
fmt.Println("GetChatMembers error:", err)
}
handleInterrupt()
fmt.Println("end")
}
func handleInterrupt() error {
interrupts := make(chan os.Signal, 1)
signal.Notify(interrupts, os.Interrupt, syscall.SIGTERM)
_, ok := <-interrupts
if ok {
fmt.Println("interrupt exit")
}
return nil
}
|
Rhunter1/xively-client-c | src/tests/itests/xi_itest_tls_error.h | /* Copyright (c) 2003-2018, Xively All rights reserved.
*
* This is part of the Xively C Client library,
* it is licensed under the BSD 3-Clause license.
*/
#ifndef __XI_ITEST_TLS_ERROR_H__
#define __XI_ITEST_TLS_ERROR_H__
extern int xi_itest_tls_error_setup( void** state );
extern int xi_itest_tls_error_teardown( void** state );
extern void xi_itest_tls_error__connection_flow__basic_checks( void** state );
extern void
xi_itest_tls_error__tls_init_and_connect_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_push_CONNECT_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_push_infinite_SUBSCRIBE_errors__reSUBSCRIBE_occurs_once_in_a_second(
void** state );
extern void
xi_itest_tls_error__tls_push_SUBSCRIBE_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_push_PUBLISH_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_pull_CONNACK_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_pull_SUBACK_errors__graceful_error_handling( void** state );
extern void
xi_itest_tls_error__tls_pull_PUBACK_errors__graceful_error_handling( void** state );
#ifdef XI_MOCK_TEST_PREPROCESSOR_RUN
struct CMUnitTest xi_itests_tls_error[] = {
cmocka_unit_test_setup_teardown( xi_itest_tls_error__connection_flow__basic_checks,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_init_and_connect_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_push_CONNECT_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_push_infinite_SUBSCRIBE_errors__reSUBSCRIBE_occurs_once_in_a_second,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_push_SUBSCRIBE_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_push_PUBLISH_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_pull_CONNACK_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_pull_SUBACK_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown ),
cmocka_unit_test_setup_teardown(
xi_itest_tls_error__tls_pull_PUBACK_errors__graceful_error_handling,
xi_itest_tls_error_setup,
xi_itest_tls_error_teardown )};
#endif
#endif /* __XI_ITEST_TLS_ERROR_H__ */
|
5starsmedia/offline-map-admin | app/base/filters/translate.js | //stUserAgentIcon
export default
/*@ngInject*/
function ($rootScope) {
return function (input) {
if (angular.isObject(input)) {
return input[$rootScope.currentLocale] || input['en-US'];
}
var translates = $rootScope.translates || {};
return translates[input] || input;
};
}; |
iwt-axelzimmermann/java-library | src/main/java/com/urbanairship/api/push/model/notification/ios/MediaAttachment.java | /*
* Copyright (c) 2013-2016. Urban Airship and Contributors
*/
package com.urbanairship.api.push.model.notification.ios;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.urbanairship.api.push.model.PushModelObject;
/**
* MediaAttachment for iOS specific push messages.
*/
public final class MediaAttachment extends PushModelObject {
private final String url;
private final Optional<IOSMediaContent> content;
private final Optional<IOSMediaOptions> options;
private MediaAttachment(String url, Optional<IOSMediaContent> content, Optional<IOSMediaOptions> options) {
this.url = url;
this.content = content;
this.options = options;
}
/**
* Get a MediaAttachment builder
* @return Builder
*/
public static Builder newBuilder() {
return new Builder();
}
/**
* Get the url used for the iOS media
* @return String representation of the url
*/
public String getUrl() {
return url;
}
/**
* Get the Content object that describes portions of the notification that should be modified if the media attachment succeeds
* @return Optional Content object
*/
public Optional<IOSMediaContent> getContent() {
return content;
}
/**
* Get the IOSMediaOptions that describes how to display the resource at the URL
* @return Optional IOSMediaOptions object
*/
public Optional<IOSMediaOptions> getOptions() {
return options;
}
@Override
public boolean equals(Object o){
if(this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MediaAttachment that = (MediaAttachment)o;
if (url != null ? !url.equals(that.url) : that.url != null) {
return false;
}
if (content != null ? !content.equals(that.content) : that.content != null) {
return false;
}
if (options != null ? !options.equals(that.options) : that.options != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = (url != null ? url.hashCode() : 0);
result = 31 * result + (content != null ? content.hashCode() : 0);
result = 31 * result + (options != null ? options.hashCode() : 0);
return result;
}
@Override
public String toString(){
return "MediaAttachment{" +
"content=" + content +
", options=" + options;
}
public static class Builder{
private String url = null;
private IOSMediaContent content = null;
private IOSMediaOptions options = null;
private Builder() { }
/**
* Set the url string for iOS media.
* @param url String url
* @return Builder
*/
public Builder setUrl(String url) {
this.url = url;
return this;
}
/**
* Set the Content object that describes portions of the notification that should be modified if the media attachment succeeds.
* @param content Content
* @return Builder
*/
public Builder setContent(IOSMediaContent content) {
this.content = content;
return this;
}
/**
* Set the IOSMediaOptions object that describes how to display the resource at the URL specified.
* @param options IOSMediaOptions
* @return Builder
*/
public Builder setOptions(IOSMediaOptions options) {
this.options = options;
return this;
}
/**
* Build MediaAttachment
* @return MediaAttachment
*/
public MediaAttachment build() {
Preconditions.checkNotNull(url, "'url' must be set");
return new MediaAttachment(url,
Optional.fromNullable(content),
Optional.fromNullable(options));
}
}
} |
ronaldomg/Valim_EFS_REDIS | src/static/himportaboletocepremessa.js | /**@preserve GeneXus Java 10_3_12-110051 on December 12, 2020 12:49:12.35
*/
gx.evt.autoSkip = false;
gx.define('himportaboletocepremessa', false, function () {
this.ServerClass = "himportaboletocepremessa" ;
this.PackageName = "" ;
this.setObjectType("web");
this.setOnAjaxSessionTimeout("Warn");
this.hasEnterEvent = true;
this.skipOnEnter = false;
this.addKeyListener("12", "'FECHAR'");
this.addKeyListener("5", "REFRESH");
this.addKeyListener("12", "CANCEL");
this.addKeyListener("1", "HELP");
this.SetStandaloneVars=function()
{
};
this.e11l42_client=function()
{
this.executeServerEvent("'FECHAR'", false, null, false, false);
};
this.e13l42_client=function()
{
this.executeServerEvent("ENTER", true, null, false, false);
};
this.e15l42_client=function()
{
this.executeServerEvent("CANCEL", true, null, false, false);
};
this.GXValidFnc = [];
var GXValidFnc = this.GXValidFnc ;
this.GXCtrlIds=[3,6,9,12,14,24];
this.GXLastCtrlId =24;
GXValidFnc[3]={fld:"TABLE2",grid:0};
GXValidFnc[6]={fld:"TABLE7",grid:0};
GXValidFnc[9]={fld:"TABLE3",grid:0};
GXValidFnc[12]={fld:"TEXTBLOCK1", format:0,grid:0};
GXValidFnc[14]={lvl:0,type:"char",len:150,dec:0,sign:false,ro:0,grid:0,gxgrid:null,fnc:null,isvalid:null,rgrid:[],fld:"vARQUIVO",gxz:"ZV14Arquivo",gxold:"OV14Arquivo",gxvar:"AV14Arquivo",ucs:[],op:[],ip:[],nacdep:[],ctrltype:"edit",v2v:function(Value){gx.O.AV14Arquivo=Value},v2z:function(Value){gx.O.ZV14Arquivo=Value},v2c:function(){gx.fn.setControlValue("vARQUIVO",gx.O.AV14Arquivo,0)},c2v:function(){gx.O.AV14Arquivo=this.val()},val:function(){return gx.fn.getControlValue("vARQUIVO")},nac:gx.falseFn};
GXValidFnc[24]={fld:"BTNHELP",grid:0};
this.AV14Arquivo = "" ;
this.ZV14Arquivo = "" ;
this.OV14Arquivo = "" ;
this.AV14Arquivo = "" ;
this.Events = {"e11l42_client": ["'FECHAR'", true] ,"e13l42_client": ["ENTER", true] ,"e15l42_client": ["CANCEL", true]};
this.EvtParms["REFRESH"] = [[],[]];
this.EvtParms["'FECHAR'"] = [[],[]];
this.EvtParms["ENTER"] = [[{av:'AV14Arquivo',fld:'vARQUIVO'}],[{av:'AV14Arquivo',fld:'vARQUIVO'}]];
this.EnterCtrl = ["BUTTON1"];
this.InitStandaloneVars( );
});
gx.setParentObj(new himportaboletocepremessa());
|
MeridianExplorer/ocs-ci | tests/manage/z_cluster/test_delete_osd_deployment.py | import logging
import pytest
from ocs_ci.framework.testlib import (
ManageTest,
tier4c,
skipif_ocs_version,
)
from ocs_ci.framework import config
from ocs_ci.ocs.resources.pod import get_osd_deployments
from ocs_ci.ocs.resources.storage_cluster import osd_encryption_verification
from ocs_ci.ocs.utils import get_pod_name_by_pattern
from ocs_ci.ocs import constants
from ocs_ci.ocs.exceptions import CommandFailed
from ocs_ci.ocs.ocp import OCP
from ocs_ci.utility.utils import ceph_health_check
logger = logging.getLogger(__name__)
@tier4c
@skipif_ocs_version("<4.10")
@pytest.mark.polarion_id("OCS-3731")
@pytest.mark.bugzilla("2032656")
class TestDeleteOSDDeployment(ManageTest):
"""
This test case deletes all the OSD deployments one after the other.
The expected result is that once the OSD deployment is deleted, a new OSD
deployment and pod should be created in its place.
"""
def test_delete_rook_ceph_osd_deployment(self):
osd_deployments = get_osd_deployments()
deployment_obj = OCP(
kind=constants.DEPLOYMENT, namespace=constants.OPENSHIFT_STORAGE_NAMESPACE
)
pod_obj = OCP(
kind=constants.POD, namespace=constants.OPENSHIFT_STORAGE_NAMESPACE
)
for osd_deployment in osd_deployments:
# Get rook-ceph-osd pod name associated with the deployment
osd_deployment_name = osd_deployment.name
old_osd_pod = get_pod_name_by_pattern(
pattern=osd_deployment_name,
namespace=constants.OPENSHIFT_STORAGE_NAMESPACE,
)[0]
logger.info(f"Deleting OSD deployment: {osd_deployment_name}")
try:
deployment_obj.delete(resource_name=osd_deployment_name)
deployment_obj.wait_for_resource(
condition="0/1", resource_name=osd_deployment_name, column="READY"
)
except CommandFailed as err:
if "NotFound" not in str(err):
raise
# Wait for new OSD deployment to be Ready
deployment_obj.wait_for_resource(
condition="1/1", resource_name=osd_deployment_name, column="READY"
)
# Check if a new OSD pod is created
new_osd_pod = get_pod_name_by_pattern(
pattern=osd_deployment_name,
namespace=constants.OPENSHIFT_STORAGE_NAMESPACE,
)[0]
assert old_osd_pod != new_osd_pod, "New OSD pod not created"
# Check if new OSD pod is up and running
logger.info(
"Waiting for a new OSD pod to get created and reach Running state"
)
assert pod_obj.wait_for_resource(
condition=constants.STATUS_RUNNING,
resource_name=new_osd_pod,
column="STATUS",
), f"New OSD pod {new_osd_pod} is not in {constants.STATUS_RUNNING} state"
# If clusterwide encryption is enabled, verify that the new OSDs are encrypted
if config.ENV_DATA.get("encryption_at_rest"):
osd_encryption_verification()
assert ceph_health_check(delay=120, tries=50), "Ceph health check failed"
|
dxflqm/blog | pzblog-system/pzblog-common/src/main/java/org/panzhi/blog/common/result/BuildResponseMsg.java | <reponame>dxflqm/blog<gh_stars>1-10
package org.panzhi.blog.common.result;
import org.apache.commons.lang3.StringUtils;
import org.panzhi.blog.common.error.CommonErrorMsg;
import org.panzhi.blog.common.error.ErrorMsg;
import com.alibaba.fastjson.JSON;
public class BuildResponseMsg {
public static String buildCustomeMsg(Object data){
return JSON.toJSONString(data);
}
/**
* 操作成功,无返回数据
* @return
*/
public static String buildSuccessMsgNoData(){
ResultMsg<Object> responseMsg = new ResultMsg<Object>();
responseMsg.setCode(200);
responseMsg.setMsg("");
responseMsg.setData(null);
return JSON.toJSONString(responseMsg);
}
/**
* 操作成功,有返回数据
* @param data
* @return
*/
public static String buildSuccessMsgAndData(Object data){
ResultMsg<Object> responseMsg = new ResultMsg<Object>();
responseMsg.setCode(200);
responseMsg.setMsg("");
responseMsg.setData(data);
return JSON.toJSONString(responseMsg);
}
/**
* 操作失败,无返回数据
* @param errorMsg
* @return
*/
public static String buildFailMsgNoData(ErrorMsg errorMsg){
ResultMsg<Object> responseMsg = new ResultMsg<Object>();
responseMsg.setCode(errorMsg.getCode());
responseMsg.setMsg(errorMsg.getMessage());
responseMsg.setData(null);
return JSON.toJSONString(responseMsg);
}
public static String buildCommonFailMsg(String msg){
if(StringUtils.isNotEmpty(msg)) {
CommonErrorMsg.CUSTOM_ERROR.setMessage(msg);
}
ResultMsg<Object> responseMsg = new ResultMsg<Object>();
responseMsg.setCode(CommonErrorMsg.CUSTOM_ERROR.getCode());
responseMsg.setMsg(CommonErrorMsg.CUSTOM_ERROR.getMessage());
responseMsg.setData(null);
return JSON.toJSONString(responseMsg);
}
/**
* 操作失败,有返回数据
* @param errorMsg
* @param object
* @return
*/
public static String buildFailMsgAndData(ErrorMsg errorMsg, Object object){
ResultMsg<Object> responseMsg = new ResultMsg<Object>();
responseMsg.setCode(errorMsg.getCode());
responseMsg.setMsg(errorMsg.getMessage());
responseMsg.setData(object);
return JSON.toJSONString(responseMsg);
}
}
|
fangjinuo/avoid996 | agileway-sshclient/src/main/java/com/jn/agileway/ssh/client/supports/command/executor/SshCommandLineExecutor.java | package com.jn.agileway.ssh.client.supports.command.executor;
import com.jn.agileway.ssh.client.SshConnection;
import com.jn.langx.commandline.DefaultCommandLineExecutor;
import com.jn.langx.util.Objs;
import java.io.File;
public class SshCommandLineExecutor extends DefaultCommandLineExecutor {
public SshCommandLineExecutor(SshConnection connection) {
this(null, connection);
}
public SshCommandLineExecutor(File workingDirectory, SshConnection connection) {
setWorkingDirectory(Objs.useValueIfNull(workingDirectory, new File("~")));
setLauncher(new SshCommandLineLauncher(connection));
}
}
|
RiceKab/project-geowars | projects/parametergame/core/src/be/howest/twentytwo/parametergame/model/event/pickup/BasePickupCallback.java | <reponame>RiceKab/project-geowars<filename>projects/parametergame/core/src/be/howest/twentytwo/parametergame/model/event/pickup/BasePickupCallback.java
package be.howest.twentytwo.parametergame.model.event.pickup;
import com.badlogic.ashley.core.Entity;
public abstract class BasePickupCallback {
public abstract void handle(Entity entity);
}
|
lindsaygelle/animalcrossing | villager/horse/winnie/doc.go | // Package winnie exports the Animal Crossing villager Winnie.
package winnie
|
gyan42/interview-preparation-qns | python/tree/travesal.py | <filename>python/tree/travesal.py
from collections import defaultdict
class TreeNode:
def __init__(self, data, left=None, right=None):
self.data = data
self.left = left
self.right = right
def __repr__(self):
return str(self.data)
def insert(self, data):
"""
- If value is less then root node, find empty leaf and insert into left
- If value is greater than root node, find empty leaf and insert into right
:param data:
:return:
"""
if data:
if data < self.data:
if self.left is None:
# print(f"{data} inserted into left side of {self.data}")
self.left = TreeNode(data=data)
else:
self.left.insert(data=data)
else:
if self.right is None:
# print(f"{data} inserted into right side of {self.data}")
self.right = TreeNode(data=data)
else:
self.right.insert(data=data)
t = TreeNode(25)
t.insert(15)
t.insert(50)
t.insert(10)
t.insert(22)
t.insert(35)
t.insert(70)
t.insert(4)
t.insert(12)
t.insert(18)
t.insert(24)
t.insert(31)
t.insert(44)
t.insert(66)
t.insert(90)
# -----------------------------------------------------------------------------------------------------------------------
"""
The most important points is, BFS starts visiting nodes from root while DFS starts visiting nodes from leaves.
So if our problem is to search something that is more likely to closer to root, we would prefer BFS.
And if the target node is close to a leaf, we would prefer DFS.
"""
# Breadth First Search a.k.a Inorder
# https://www.geeksforgeeks.org/level-order-tree-traversal/
def bfs_recursive(root: TreeNode, visited=defaultdict(lambda: False)):
if root:
if root.data not in visited:
print(root, end=" ")
if root.left and not visited[root.left.data]:
print(root.left, end=" ")
visited[root.left.data] = True
if root.right and not visited[root.right.data]:
print(root.right, end=" ")
visited[root.right.data] = True
bfs_recursive(root.left, visited=visited)
bfs_recursive(root.right, visited=visited)
print("bfs_recursive")
bfs_recursive(t)
print()
def bfs_queue(root: TreeNode):
queue = [] # FIFO
# current = None
queue.append(root)
while len(queue) > 0:
current_node = queue.pop(0)
print(current_node, end=" ")
if current_node.left:
queue.append(current_node.left)
if current_node.right:
queue.append(current_node.right)
print("bfs_queue")
bfs_queue(t)
print()
# -----------------------------------------------------------------------------------------------------------------------
# Depth First Search
def inorder(root: TreeNode):
if root:
inorder(root.left)
print(root, end=" ")
inorder(root.right)
def inorder_stack(root):
stack = []
current = root
while True:
if current:
stack.append(current)
current = current.left
elif stack:
current = stack.pop()
print(current.data, end=" ")
current = current.right
else:
break
print("inorder")
inorder(t)
print("\ninorder_stack")
inorder_stack(t)
# -----------------------------------------------------------------------------------------------------------------------
def preorder(root: TreeNode):
if root:
print(root, end=" ")
preorder(root.left)
preorder(root.right)
def iterative_preorder(root: TreeNode):
stack = []
stack.append(root)
while(len(stack) > 0):
current = stack.pop()
print(current.data, end=" ")
if current.right:
stack.append(current.right) # Note right first!
if current.left:
stack.append(current.left)
def postorder(root: TreeNode):
if root:
postorder(root.left)
postorder(root.right)
print(root, end=" ")
print("\npreorder")
preorder(t)
print("\niterative_preorder")
iterative_preorder(t)
print("\npostorder"); postorder(t) |
code-dot-org/code-dot-org | apps/src/applab/dropletConfig.js | /* global dashboard */
import $ from 'jquery';
import * as api from './api';
import dontMarshalApi from '../dontMarshalApi';
import {dropletStringBlocks, dropletArrayBlocks} from '../dropletUtils';
import consoleApi from '../consoleApi';
import * as audioApi from '@cdo/apps/lib/util/audioApi';
import audioApiDropletConfig from '@cdo/apps/lib/util/audioApiDropletConfig';
import * as timeoutApi from '@cdo/apps/lib/util/timeoutApi';
import * as makerApi from '@cdo/apps/lib/kits/maker/api';
import color from '../util/color';
import getAssetDropdown from '../assetManagement/getAssetDropdown';
import {getTables, getColumns} from '@cdo/apps/storage/getColumnDropdown';
import ChartApi from './ChartApi';
import * as elementUtils from './designElements/elementUtils';
import {
setPropertyDropdown,
setPropertyValueSelector
} from './setPropertyDropdown';
import {getStore} from '../redux';
import * as applabConstants from './constants';
var DEFAULT_WIDTH = applabConstants.APP_WIDTH.toString();
var DEFAULT_HEIGHT = (
applabConstants.APP_HEIGHT - applabConstants.FOOTER_HEIGHT
).toString();
// Flip the argument order so we can bind `typeFilter`.
function chooseAsset(typeFilter, callback) {
dashboard.assets.showAssetManager(callback, typeFilter, null, {
showUnderageWarning: !getStore().getState().pageConstants.is13Plus
});
}
// Configure shared APIs for App Lab
// We wrap this because it runs before window.Applab exists
function applabExecuteCmd(...args) {
return Applab.executeCmd.call(Applab, ...args);
}
audioApi.injectExecuteCmd(applabExecuteCmd);
timeoutApi.injectExecuteCmd(applabExecuteCmd);
makerApi.injectExecuteCmd(applabExecuteCmd);
/**
* Generate a list of screen ids for our setScreen dropdown
*/
function getScreenIds() {
var ret = elementUtils.getScreens().map(function() {
return '"' + elementUtils.getId(this) + '"';
});
// Convert from jQuery's array-like object to a true array
return $.makeArray(ret);
}
/**
* @param {string?} selector Filters to ids on elements that match selector, or
* all elements if undefined
* @returns {function} Dropdown function that returns a list of ids for the selector
*/
function idDropdownWithSelector(selector) {
return function() {
return Applab.getIdDropdown(selector);
};
}
// Basic dropdown that shows ids for all DOM elements in the applab app.
var ID_DROPDOWN_PARAM_0 = {
0: idDropdownWithSelector()
};
// NOTE : format of blocks detailed at top of apps/src/dropletUtils.js
export var blocks = [
{
func: 'onEvent',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'type', 'callback'],
params: ['"id"', '"click"', 'function( ) {\n \n}'],
allowFunctionDrop: {2: true},
dropdown: {
0: idDropdownWithSelector(),
1: [
'"click"',
'"change"',
'"keyup"',
'"keydown"',
'"keypress"',
'"mousemove"',
'"mousedown"',
'"mouseup"',
'"mouseover"',
'"mouseout"',
'"input"'
]
}
},
{
func: 'button',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'text'],
params: ['"id"', '"text"']
},
{
func: 'textInput',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'text'],
params: ['"id"', '"text"']
},
{
func: 'textLabel',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'text'],
params: ['"id"', '"text"']
},
{
func: 'dropdown',
parent: api,
category: 'UI controls',
paramButtons: {minArgs: 1},
paletteParams: ['id', 'option1', 'etc'],
params: ['"id"', '"option1"', '"etc"']
},
{
func: 'getText',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0,
type: 'value'
},
{
func: 'setText',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'text'],
params: ['"id"', '"text"'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'getNumber',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0,
type: 'value'
},
{
func: 'setNumber',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'number'],
params: ['"id"', '0'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'checkbox',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'checked'],
params: ['"id"', 'false'],
dropdown: {1: ['true', 'false']}
},
{
func: 'radioButton',
parent: api,
category: 'UI controls',
paramButtons: {minArgs: 2, maxArgs: 3},
paletteParams: ['id', 'checked'],
params: ['"id"', 'false', '"group"'],
dropdown: {1: ['true', 'false']}
},
{
func: 'getChecked',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
type: 'value'
},
{
func: 'setChecked',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'checked'],
params: ['"id"', 'true'],
dropdown: {1: ['true', 'false']}
},
{
func: 'image',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'url'],
params: ['"id"', '"https://code.org/images/logo.png"'],
dropdown: {
1: function() {
return getAssetDropdown('image');
}
},
assetTooltip: {1: chooseAsset.bind(null, 'image')}
},
{
func: 'getImageURL',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: {
0: function() {
return [
...idDropdownWithSelector('img')(),
...idDropdownWithSelector('.img-upload')()
];
}
},
type: 'value'
},
{
func: 'setImageURL',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'url'],
params: ['"id"', '"https://code.org/images/logo.png"'],
dropdown: {
0: idDropdownWithSelector('img'),
1: () => {
return getAssetDropdown('image');
}
},
assetTooltip: {1: chooseAsset.bind(null, 'image')}
},
{...audioApiDropletConfig.playSound, category: 'UI controls'},
{...audioApiDropletConfig.stopSound, category: 'UI controls'},
{...audioApiDropletConfig.playSpeech, category: 'UI controls'},
{
func: 'showElement',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'hideElement',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'deleteElement',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'setPosition',
parent: api,
category: 'UI controls',
paramButtons: {minArgs: 3, maxArgs: 5},
paletteParams: ['id', 'x', 'y', 'width', 'height'],
params: ['"id"', '0', '0', '100', '100'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'setSize',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'width', 'height'],
params: ['"id"', '100', '100'],
dropdown: ID_DROPDOWN_PARAM_0
},
{
func: 'setProperty',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'property', 'value'],
params: ['"id"', '"width"', '100'],
dropdown: {
0: idDropdownWithSelector(),
1: setPropertyDropdown(true),
2: setPropertyValueSelector()
}
},
{
func: 'getProperty',
parent: api,
category: 'UI controls',
paletteParams: ['id', 'property'],
params: ['"id"', '"width"'],
dropdown: {0: idDropdownWithSelector(), 1: setPropertyDropdown(false)},
type: 'value'
},
{
func: 'write',
parent: api,
category: 'UI controls',
paletteParams: ['text'],
params: ['"text"']
},
{
func: 'getXPosition',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0,
type: 'value'
},
{
func: 'getYPosition',
parent: api,
category: 'UI controls',
paletteParams: ['id'],
params: ['"id"'],
dropdown: ID_DROPDOWN_PARAM_0,
type: 'value'
},
{
func: 'setScreen',
parent: api,
category: 'UI controls',
paletteParams: ['screenId'],
params: ['"screen1"'],
dropdown: {0: getScreenIds}
},
{
func: 'rgb',
parent: api,
category: 'UI controls',
paramButtons: {minArgs: 3, maxArgs: 4},
paletteParams: ['r', 'g', 'b', 'a'],
params: ['250', '0', '75', '0.5'],
type: 'value'
},
{
func: 'open',
parent: api,
category: 'UI controls',
paletteParams: ['url'],
params: ['"https://code.org"']
},
{
func: 'createCanvas',
parent: api,
category: 'Canvas',
paramButtons: {minArgs: 1, maxArgs: 3},
paletteParams: ['id', 'width', 'height'],
params: ['"id"', DEFAULT_WIDTH, DEFAULT_HEIGHT]
},
{
func: 'setActiveCanvas',
parent: api,
category: 'Canvas',
paletteParams: ['id'],
params: ['"id"'],
dropdown: {0: idDropdownWithSelector('canvas')}
},
{
func: 'line',
parent: api,
category: 'Canvas',
paletteParams: ['x1', 'y1', 'x2', 'y2'],
params: ['0', '0', DEFAULT_WIDTH / 2, DEFAULT_HEIGHT / 2]
},
{
func: 'circle',
parent: api,
category: 'Canvas',
paletteParams: ['x', 'y', 'radius'],
params: [DEFAULT_WIDTH / 2, DEFAULT_HEIGHT / 2, '100']
},
{
func: 'rect',
parent: api,
category: 'Canvas',
paletteParams: ['x', 'y', 'width', 'height'],
params: ['80', '120', DEFAULT_WIDTH / 2, DEFAULT_HEIGHT / 2]
},
{
func: 'setStrokeWidth',
parent: api,
category: 'Canvas',
paletteParams: ['width'],
params: ['3']
},
{
func: 'setStrokeColor',
parent: api,
category: 'Canvas',
paletteParams: ['color'],
params: ['"red"'],
dropdown: {0: ['"red"', 'rgb(255,0,0)', 'rgb(255,0,0,0.5)', '"#FF0000"']}
},
{
func: 'setFillColor',
parent: api,
category: 'Canvas',
paletteParams: ['color'],
params: ['"yellow"'],
dropdown: {
0: ['"yellow"', 'rgb(255,255,0)', 'rgb(255,255,0,0.5)', '"#FFFF00"']
}
},
// drawImage has been deprecated in favor of drawImageURL
{
func: 'drawImage',
parent: api,
category: 'Canvas',
paletteParams: ['id', 'x', 'y'],
params: ['"id"', '0', '0'],
dropdown: {0: idDropdownWithSelector('img')},
noAutocomplete: true
},
{
func: 'drawImageURL',
parent: api,
category: 'Canvas',
paramButtons: {minArgs: 1, maxArgs: 6},
paletteParams: ['url'],
params: ['"https://code.org/images/logo.png"'],
allowFunctionDrop: {1: true, 5: true}
},
{
func: 'getImageData',
parent: api,
category: 'Canvas',
paletteParams: ['x', 'y', 'width', 'height'],
params: ['0', '0', DEFAULT_WIDTH, DEFAULT_HEIGHT],
type: 'value'
},
{
func: 'putImageData',
parent: api,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y'],
params: ['imgData', '0', '0']
},
{func: 'clearCanvas', parent: api, category: 'Canvas'},
{
func: 'getRed',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y'],
params: ['imgData', '0', '0'],
type: 'value',
dontMarshal: true
},
{
func: 'getGreen',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y'],
params: ['imgData', '0', '0'],
type: 'value',
dontMarshal: true
},
{
func: 'getBlue',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y'],
params: ['imgData', '0', '0'],
type: 'value',
dontMarshal: true
},
{
func: 'getAlpha',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y'],
params: ['imgData', '0', '0'],
type: 'value',
dontMarshal: true
},
{
func: 'setRed',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y', 'r'],
params: ['imgData', '0', '0', '255'],
dontMarshal: true
},
{
func: 'setGreen',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y', 'g'],
params: ['imgData', '0', '0', '255'],
dontMarshal: true
},
{
func: 'setBlue',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y', 'b'],
params: ['imgData', '0', '0', '255'],
dontMarshal: true
},
{
func: 'setAlpha',
parent: dontMarshalApi,
category: 'Canvas',
paletteParams: ['imgData', 'x', 'y', 'a'],
params: ['imgData', '0', '0', '255'],
dontMarshal: true
},
{
func: 'setRGB',
parent: dontMarshalApi,
category: 'Canvas',
paramButtons: {minArgs: 6, maxArgs: 7},
paletteParams: ['imgData', 'x', 'y', 'r', 'g', 'b'],
params: ['imgData', '0', '0', '255', '255', '255'],
dontMarshal: true
},
{
func: 'getColumn',
parent: api,
category: 'Data',
paletteParams: ['table', 'column'],
params: ['"mytable"', '"mycolumn"'],
nativeIsAsync: true,
type: 'value',
dropdown: {
0: getTables(),
1: getColumns()
}
},
{
func: 'startWebRequest',
parent: api,
category: 'Data',
paletteParams: ['url', 'callback'],
params: [
'"https://en.wikipedia.org/w/api.php?origin=*&action=parse&format=json&prop=text&page=computer§ion=1&disablelimitreport=true"',
'function(status, type, content) {\n \n}'
],
allowFunctionDrop: {1: true}
},
{
func: 'startWebRequestSync',
parent: api,
category: 'Data',
paletteParams: ['url'],
params: [
'"https://en.wikipedia.org/w/api.php?origin=*&action=parse&format=json&prop=text&page=computer§ion=1&disablelimitreport=true"'
],
nativeIsAsync: true,
noAutocomplete: true
},
{
func: 'setKeyValue',
parent: api,
category: 'Data',
paletteParams: ['key', 'value', 'callback'],
params: ['"key"', '"value"', 'function () {\n \n}'],
allowFunctionDrop: {2: true, 3: true}
},
{
func: 'setKeyValueSync',
parent: api,
category: 'Data',
paletteParams: ['key', 'value'],
params: ['"key"', '"value"'],
nativeIsAsync: true,
noAutocomplete: true
},
{
func: 'getKeyValue',
parent: api,
category: 'Data',
paletteParams: ['key', 'callback'],
params: ['"key"', 'function (value) {\n \n}'],
allowFunctionDrop: {1: true, 2: true}
},
{
func: 'getKeyValueSync',
parent: api,
category: 'Data',
paletteParams: ['key'],
params: ['"key"'],
type: 'value',
nativeIsAsync: true,
noAutocomplete: true
},
{
func: 'createRecord',
parent: api,
category: 'Data',
paletteParams: ['table', 'record', 'callback'],
params: ['"mytable"', "{name:'Alice'}", 'function(record) {\n \n}'],
allowFunctionDrop: {2: true, 3: true}
},
{
func: 'createRecordSync',
parent: api,
category: 'Data',
paletteParams: ['table', 'record'],
params: ['"mytable"', "{name:'Alice'}"],
allowFunctionDrop: {2: true},
nativeIsAsync: true,
type: 'either'
},
{
func: 'readRecords',
parent: api,
category: 'Data',
paletteParams: ['table', 'terms', 'callback'],
params: [
'"mytable"',
'{}',
"function(records) {\n for (var i =0; i < records.length; i++) {\n console.log(records[i].id + ': ' + records[i].name);\n }\n}"
],
allowFunctionDrop: {2: true, 3: true}
},
{
func: 'readRecordsSync',
parent: api,
category: 'Data',
paletteParams: ['table'],
params: ['"mytable"'],
nativeIsAsync: true,
type: 'either'
},
{
func: 'updateRecord',
parent: api,
category: 'Data',
paletteParams: ['table', 'record', 'callback'],
params: [
'"mytable"',
"{id:1, name:'Bob'}",
'function(record, success) {\n \n}'
],
allowFunctionDrop: {2: true, 3: true}
},
{
func: 'updateRecordSync',
parent: api,
category: 'Data',
paletteParams: ['table', 'record'],
params: ['"mytable"', "{id:1, name:'Bob'}"],
allowFunctionDrop: {2: true},
nativeIsAsync: true,
type: 'either'
},
{
func: 'deleteRecord',
parent: api,
category: 'Data',
paletteParams: ['table', 'record', 'callback'],
params: ['"mytable"', '{id:1}', 'function(success) {\n \n}'],
allowFunctionDrop: {2: true, 3: true}
},
{
func: 'deleteRecordSync',
parent: api,
category: 'Data',
paletteParams: ['table', 'record'],
params: ['"mytable"', '{id:1}'],
allowFunctionDrop: {2: true},
nativeIsAsync: true,
type: 'either'
},
{
func: 'onRecordEvent',
parent: api,
category: 'Data',
paletteParams: ['table', 'callback'],
params: [
'"mytable"',
"function(record, eventType) {\n if (eventType === 'create') {\n textLabel('id', 'record with id ' + record.id + ' was created');\n } \n}"
],
allowFunctionDrop: {1: true}
},
{func: 'getUserId', parent: api, category: 'Data', type: 'value'},
{
func: 'drawChart',
parent: api,
category: 'Data',
paramButtons: {minArgs: 3, maxArgs: 5},
paletteParams: ['chartId', 'chartType', 'chartData'],
params: [
'"chartId"',
'"bar"',
'[\n\t{ label: "Row 1", value: 1 },\n\t{ label: "Row 2", value: 2 }\n]'
],
allowFunctionDrop: {4: true},
dropdown: {
0: idDropdownWithSelector('.chart'),
1: ChartApi.getChartTypeDropdown
}
},
{
func: 'drawChartFromRecords',
parent: api,
category: 'Data',
paramButtons: {minArgs: 4, maxArgs: 6},
paletteParams: ['chartId', 'chartType', 'tableName', 'columns'],
params: ['"chartId"', '"bar"', '"mytable"', '["columnOne", "columnTwo"]'],
allowFunctionDrop: {5: true},
dropdown: {
0: idDropdownWithSelector('.chart'),
1: ChartApi.getChartTypeDropdown
}
},
{
func: 'moveForward',
parent: api,
category: 'Turtle',
paletteParams: ['pixels'],
params: ['25'],
dropdown: {0: ['25', '50', '100', '200']}
},
{
func: 'moveBackward',
parent: api,
category: 'Turtle',
paletteParams: ['pixels'],
params: ['25'],
dropdown: {0: ['25', '50', '100', '200']}
},
{
func: 'move',
parent: api,
category: 'Turtle',
paletteParams: ['x', 'y'],
params: ['25', '25'],
dropdown: {0: ['25', '50', '100', '200'], 1: ['25', '50', '100', '200']}
},
{
func: 'moveTo',
parent: api,
category: 'Turtle',
paletteParams: ['x', 'y'],
params: ['0', '0']
},
{
func: 'dot',
parent: api,
category: 'Turtle',
paletteParams: ['radius'],
params: ['5'],
dropdown: {0: ['1', '5', '10']}
},
{
func: 'turnRight',
parent: api,
category: 'Turtle',
paramButtons: {minArgs: 0, maxArgs: 1},
paletteParams: ['angle'],
params: ['90'],
dropdown: {0: ['30', '45', '60', '90']}
},
{
func: 'turnLeft',
parent: api,
category: 'Turtle',
paramButtons: {minArgs: 0, maxArgs: 1},
paletteParams: ['angle'],
params: ['90'],
dropdown: {0: ['30', '45', '60', '90']}
},
{
func: 'turnTo',
parent: api,
category: 'Turtle',
paletteParams: ['angle'],
params: ['0'],
dropdown: {0: ['0', '90', '180', '270']}
},
{
func: 'arcRight',
parent: api,
category: 'Turtle',
paletteParams: ['angle', 'radius'],
params: ['90', '25'],
dropdown: {0: ['30', '45', '60', '90'], 1: ['25', '50', '100', '200']}
},
{
func: 'arcLeft',
parent: api,
category: 'Turtle',
paletteParams: ['angle', 'radius'],
params: ['90', '25'],
dropdown: {0: ['30', '45', '60', '90'], 1: ['25', '50', '100', '200']}
},
{func: 'getX', parent: api, category: 'Turtle', type: 'value'},
{func: 'getY', parent: api, category: 'Turtle', type: 'value'},
{func: 'getDirection', parent: api, category: 'Turtle', type: 'value'},
{func: 'penUp', parent: api, category: 'Turtle'},
{func: 'penDown', parent: api, category: 'Turtle'},
{
func: 'penWidth',
parent: api,
category: 'Turtle',
paletteParams: ['width'],
params: ['3'],
dropdown: {0: ['1', '3', '5']}
},
{
func: 'penColor',
parent: api,
category: 'Turtle',
paletteParams: ['color'],
params: ['"red"'],
dropdown: {0: ['"red"', 'rgb(255,0,0)', 'rgb(255,0,0,0.5)', '"#FF0000"']}
},
{
func: 'penRGB',
parent: api,
category: 'Turtle',
paramButtons: {minArgs: 3, maxArgs: 4},
paletteParams: ['r', 'g', 'b'],
params: ['120', '180', '200']
},
{func: 'show', parent: api, category: 'Turtle'},
{func: 'hide', parent: api, category: 'Turtle'},
{
func: 'speed',
parent: api,
category: 'Turtle',
paletteParams: ['value'],
params: ['50'],
dropdown: {0: ['25', '50', '75', '100']}
},
{...timeoutApi.dropletConfig.setTimeout},
{...timeoutApi.dropletConfig.clearTimeout},
{...timeoutApi.dropletConfig.setInterval},
{...timeoutApi.dropletConfig.clearInterval},
{...timeoutApi.dropletConfig.timedLoop},
{...timeoutApi.dropletConfig.stopTimedLoop},
{
func: 'console.log',
parent: consoleApi,
category: 'Variables',
paletteParams: ['message'],
params: ['"message"']
},
...dropletStringBlocks,
...dropletArrayBlocks,
{
func: 'imageUploadButton',
parent: api,
category: 'Advanced',
params: ['"id"', '"text"']
},
{
func: 'container',
parent: api,
category: 'Advanced',
params: ['"id"', '"html"']
},
{
func: 'innerHTML',
parent: api,
category: 'Advanced',
params: ['"id"', '"html"']
},
{
func: 'setParent',
parent: api,
category: 'Advanced',
params: ['"id"', '"parentId"']
},
{
func: 'setStyle',
parent: api,
category: 'Advanced',
params: ['"id"', '"color:red;"']
},
{
func: 'getAttribute',
parent: api,
category: 'Advanced',
params: ['"id"', '"scrollHeight"'],
type: 'value',
noAutocomplete: true
},
{
func: 'setAttribute',
parent: api,
category: 'Advanced',
params: ['"id"', '"scrollHeight"', '200'],
noAutocomplete: true
},
{
func: 'setSelectionRange',
parent: api,
category: 'Advanced',
paletteParams: ['id', 'start', 'end'],
params: ['"id"', '0', '0'],
paramButtons: {minArgs: 3, maxArgs: 4}
},
{
func: 'comment_Goals_1',
block: '// Goal 1',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_2',
block: '// Goal 2',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_3',
block: '// Goal 3',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_4',
block: '// Goal 4',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_5',
block: '// Goal 5',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_6',
block: '// Goal 6',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_7',
block: '// Goal 7',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_8',
block: '// Goal 8',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_9',
block: '// Goal 9',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_10',
block: '// Goal 10',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_11',
block: '// Goal 11',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_12',
block: '// Goal 12',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_13',
block: '// Goal 13',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_14',
block: '// Goal 14',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_15',
block: '// Goal 15',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_16',
block: '// Goal 16',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_17',
block: '// Goal 17',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_18',
block: '// Goal 18',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_19',
block: '// Goal 19',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'comment_Goals_20',
block: '// Goal 20',
docFunc: 'comment',
category: 'Goals',
noAutocomplete: true
},
{
func: 'getPrediction',
parent: api,
category: 'Data',
paletteParams: ['name', 'id', 'data', 'callback'],
params: ['"name"', '"id"', 'data', 'function (value) {\n \n}']
},
{
func: 'declareAssign_object',
block: `var object = {"key": "value"};`,
category: 'Variables',
noAutocomplete: true
},
{
func: 'getValue',
parent: dontMarshalApi,
category: 'Variables',
paletteParams: ['object', '"key"'],
params: ['{"key": "value"}', '"key"'],
dontMarshal: true
},
{
func: 'addPair',
parent: dontMarshalApi,
category: 'Variables',
paletteParams: ['object', '"key"', '"value"'],
params: ['object', '"key"', '"value"'],
dontMarshal: true
}
];
export const categories = {
'UI controls': {
id: 'uicontrols',
color: 'yellow',
rgb: color.droplet_yellow,
blocks: []
},
Canvas: {
id: 'canvas',
color: 'red',
rgb: color.droplet_red,
blocks: []
},
Data: {
id: 'data',
color: 'lightgreen',
rgb: color.droplet_light_green,
blocks: []
},
Turtle: {
id: 'turtle',
color: 'cyan',
rgb: color.droplet_cyan,
blocks: []
},
Advanced: {
id: 'advanced',
color: 'blue',
rgb: color.droplet_bright_blue,
blocks: []
},
Goals: {
id: 'goals',
color: 'deeppurple',
blocks: []
}
};
/*
* Set the showExamplesLink config value so that the droplet tooltips will show
* an 'Examples' link that opens documentation in a lightbox:
*/
export var showExamplesLink = true;
/*
* Set the showParamDropdowns config value so that ace autocomplete dropdowns
* will appear for each parameter based on the dropdown properties above:
*/
export var showParamDropdowns = true;
|
poiuyqwert/SCMS | File Handlers/CHK/Sections/CHKSectionIVER.cpp | <filename>File Handlers/CHK/Sections/CHKSectionIVER.cpp
//
// CHKSectionIVER.cpp
// SCMS
//
// Created by <NAME> on 2016-01-04.
//
//
#include "CHKSectionIVER.h"
#include "CHK.h"
#include "CHKSectionVER.h"
const CHKRequirements CHKSectionIVER::Requirements = {CHKVer::None, CHKGameMode::None};
CHKSectionIVER::CHKSectionIVER(CHK *chk)
: CHKSection(chk)
{
this->version = CHKSectionIVER::Release;
if (this->chk) {
CHKSectionVER *verSect = this->chk->get_section<CHKSectionVER>(false, false);
if (verSect != nullptr && verSect->get_version() == CHKSectionVER::Beta) {
this->version = CHKSectionIVER::Beta;
}
}
}
void CHKSectionIVER::load_data(const u8 *data, u32 size) {
this->version = *(u16 *)data;
}
u8* CHKSectionIVER::save_data(u32 &size) {
size = sizeof(u16);
u16 *buffer = new u16;
*buffer = this->version;
return (u8*)buffer;
}
|
aholkner/bacon | native/Source/Bacon/Keyboard.cpp | #include "Bacon.h"
#include "BaconInternal.h"
#include <cstring>
#include <deque>
using namespace std;
static char s_KeyStates[Key_MaxKey];
static Bacon_KeyEventHandler s_Handler = nullptr;
void Keyboard_Init()
{
memset(s_KeyStates, 0, sizeof(s_KeyStates));
}
void Keyboard_Shutdown()
{
}
void Keyboard_SetKeyState(int key, bool value)
{
if (key >= 0 && key < Key_MaxKey)
{
if (s_KeyStates[key] != (char)value)
{
s_KeyStates[key] = value;
if (s_Handler)
s_Handler(key, value);
}
}
// Toggle debug overlay
if (value && key == Key_Backtick && s_KeyStates[Key_Ctrl])
DebugOverlay_Toggle();
}
int Bacon_GetKeyState(int key, int* outPressed)
{
if (key < 0 || key >= Key_MaxKey)
return Bacon_Error_InvalidArgument;
*outPressed = s_KeyStates[key];
return Bacon_Error_None;
}
int Bacon_SetKeyEventHandler(Bacon_KeyEventHandler handler)
{
s_Handler = handler;
return Bacon_Error_None;
}
|
N0hbdy/godot | modules/webrtc/webrtc_peer_connection_gdnative.cpp | <gh_stars>1-10
/*************************************************************************/
/* webrtc_peer_connection_gdnative.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 <NAME>, <NAME>. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifdef WEBRTC_GDNATIVE_ENABLED
#include "webrtc_peer_connection_gdnative.h"
#include "core/io/resource_loader.h"
#include "modules/gdnative/nativescript/nativescript.h"
#include "webrtc_data_channel_gdnative.h"
const godot_net_webrtc_library *WebRTCPeerConnectionGDNative::default_library = NULL;
Error WebRTCPeerConnectionGDNative::set_default_library(const godot_net_webrtc_library *p_lib) {
if (default_library) {
const godot_net_webrtc_library *old = default_library;
default_library = NULL;
old->unregistered();
}
default_library = p_lib;
return OK; // Maybe add version check and fail accordingly
}
WebRTCPeerConnection *WebRTCPeerConnectionGDNative::_create() {
WebRTCPeerConnectionGDNative *obj = memnew(WebRTCPeerConnectionGDNative);
ERR_FAIL_COND_V_MSG(!default_library, obj, "Default GDNative WebRTC implementation not defined.");
// Call GDNative constructor
Error err = (Error)default_library->create_peer_connection(obj);
ERR_FAIL_COND_V_MSG(err != OK, obj, "GDNative default library constructor returned an error.");
return obj;
}
void WebRTCPeerConnectionGDNative::_bind_methods() {
}
WebRTCPeerConnectionGDNative::WebRTCPeerConnectionGDNative() {
interface = NULL;
}
WebRTCPeerConnectionGDNative::~WebRTCPeerConnectionGDNative() {
}
Error WebRTCPeerConnectionGDNative::initialize(Dictionary p_config) {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->initialize(interface->data, (const godot_dictionary *)&p_config);
}
Ref<WebRTCDataChannel> WebRTCPeerConnectionGDNative::create_data_channel(String p_label, Dictionary p_options) {
ERR_FAIL_COND_V(interface == NULL, NULL);
return (WebRTCDataChannel *)interface->create_data_channel(interface->data, p_label.utf8().get_data(), (const godot_dictionary *)&p_options);
}
Error WebRTCPeerConnectionGDNative::create_offer() {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->create_offer(interface->data);
}
Error WebRTCPeerConnectionGDNative::set_local_description(String p_type, String p_sdp) {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->set_local_description(interface->data, p_type.utf8().get_data(), p_sdp.utf8().get_data());
}
Error WebRTCPeerConnectionGDNative::set_remote_description(String p_type, String p_sdp) {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->set_remote_description(interface->data, p_type.utf8().get_data(), p_sdp.utf8().get_data());
}
Error WebRTCPeerConnectionGDNative::add_ice_candidate(String sdpMidName, int sdpMlineIndexName, String sdpName) {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->add_ice_candidate(interface->data, sdpMidName.utf8().get_data(), sdpMlineIndexName, sdpName.utf8().get_data());
}
Error WebRTCPeerConnectionGDNative::poll() {
ERR_FAIL_COND_V(interface == NULL, ERR_UNCONFIGURED);
return (Error)interface->poll(interface->data);
}
void WebRTCPeerConnectionGDNative::close() {
ERR_FAIL_COND(interface == NULL);
interface->close(interface->data);
}
WebRTCPeerConnection::ConnectionState WebRTCPeerConnectionGDNative::get_connection_state() const {
ERR_FAIL_COND_V(interface == NULL, STATE_DISCONNECTED);
return (ConnectionState)interface->get_connection_state(interface->data);
}
void WebRTCPeerConnectionGDNative::set_native_webrtc_peer_connection(const godot_net_webrtc_peer_connection *p_impl) {
interface = p_impl;
}
#endif // WEBRTC_GDNATIVE_ENABLED
|
Jovian-Dsouza/MOC-Detector | src/vis/vis_det.py | <gh_stars>100-1000
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import cv2
import numpy as np
import torch
import pickle
import sys
from tiny_opt import opts
from vis_dataset import VisualizationDataset
from build import build_tubes
from vis_utils import pkl_decode, vis_bbox, rgb2avi, video2frames, rgb2gif
sys.path.append("..")
from detector.stream_moc_det import MOCDetector
class PrefetchDataset(torch.utils.data.Dataset):
def __init__(self, opt, dataset, pre_process, pre_process_single_frame):
self.pre_process = pre_process
self.pre_process_single_frame = pre_process_single_frame
self.opt = opt
self.nframes = dataset._nframes
self.imagefile = dataset.imagefile
self.flowfile = dataset.flowfile
self.input_h = dataset._resize_height
self.input_w = dataset._resize_width
self.output_h = self.input_h // self.opt.down_ratio
self.output_w = self.input_w // self.opt.down_ratio
self.indices = []
for i in range(1, 1 + self.nframes - self.opt.K + 1):
if not os.path.exists(self.outfile(i)):
self.indices.append(i)
self.img_buffer = []
self.flow_buffer = []
self.img_buffer_flip = []
self.flow_buffer_flip = []
self.last_frame = -1
self.h, self.w, _ = cv2.imread(self.imagefile(1)).shape
def __getitem__(self, index):
frame = self.indices[index]
images = []
flows = []
video_tag = 0
# if there is a new video
if frame == self.last_frame + 1:
video_tag = 1
else:
video_tag = 0
self.last_frame = frame
if video_tag == 0:
if self.opt.rgb_model != '':
images = [cv2.imread(self.imagefile(frame + i)).astype(np.float32) for i in range(self.opt.K)]
images = self.pre_process(images)
if self.opt.flip_test:
self.img_buffer = images[:self.opt.K]
self.img_buffer_flip = images[self.opt.K:]
else:
self.img_buffer = images
if self.opt.pre_extracted_brox_flow and self.opt.flow_model != '':
flows = [cv2.imread(self.flowfile(min(frame + i, self.nframes))).astype(np.float32) for i in range(self.opt.K + self.opt.ninput - 1)]
flows = self.pre_process(flows, is_flow=True, ninput=self.opt.ninput)
if self.opt.flip_test:
self.flow_buffer = flows[:self.opt.K]
self.flow_buffer_flip = flows[self.opt.K:]
else:
self.flow_buffer = flows
else:
if self.opt.rgb_model != '':
image = cv2.imread(self.imagefile(frame + self.opt.K - 1)).astype(np.float32)
image, image_flip = self.pre_process_single_frame(image)
del self.img_buffer[0]
self.img_buffer.append(image)
if self.opt.flip_test:
del self.img_buffer_flip[0]
self.img_buffer_flip.append(image_flip)
images = self.img_buffer + self.img_buffer_flip
else:
images = self.img_buffer
if self.opt.pre_extracted_brox_flow and self.opt.flow_model != '':
flow = cv2.imread(self.flowfile(min(frame + self.opt.K + self.opt.ninput - 2, self.nframes))).astype(np.float32)
data_last_flip = self.flow_buffer_flip[-1] if self.opt.flip_test else None
data_last = self.flow_buffer[-1]
flow, flow_flip = self.pre_process_single_frame(flow, is_flow=True, ninput=self.opt.ninput, data_last=data_last, data_last_flip=data_last_flip)
del self.flow_buffer[0]
self.flow_buffer.append(flow)
if self.opt.flip_test:
del self.flow_buffer_flip[0]
self.flow_buffer_flip.append(flow_flip)
flows = self.flow_buffer + self.flow_buffer_flip
else:
flows = self.flow_buffer
outfile = self.outfile(frame)
if not os.path.isdir(os.path.dirname(outfile)):
os.system("mkdir -p '" + os.path.dirname(outfile) + "'")
return {'outfile': outfile, 'images': images, 'flows': flows, 'meta': {'height': self.h, 'width': self.w, 'output_height': self.output_h, 'output_width': self.output_w}, 'video_tag': video_tag}
def outfile(self, i):
return os.path.join(self.opt.inference_dir, "{:0>5}.pkl".format(i))
def __len__(self):
return len(self.indices)
def stream_inference(opt):
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
# torch.backends.cudnn.benchmark = True
dataset = VisualizationDataset(opt)
detector = MOCDetector(opt)
prefetch_dataset = PrefetchDataset(opt, dataset, detector.pre_process, detector.pre_process_single_frame)
data_loader = torch.utils.data.DataLoader(
prefetch_dataset,
batch_size=1,
shuffle=False,
num_workers=0,
pin_memory=False,
drop_last=False)
print('inference begin!', flush=True)
for iter, data in enumerate(data_loader):
outfile = data['outfile']
detections = detector.run(data)
for i in range(len(outfile)):
with open(outfile[i], 'wb') as file:
pickle.dump(detections[i], file)
def det():
opt = opts().parse()
if opt.flow_model != "":
assert 'online flow is not supported yet!'
os.system("rm -rf " + opt.inference_dir + "/*")
os.system("rm -rf tmp")
os.system("mkdir -p '" + os.path.join(opt.inference_dir, 'rgb') + "'")
os.system("mkdir -p '" + os.path.join(opt.inference_dir, 'flow') + "'")
video2frames(opt)
stream_inference(opt)
build_tubes(opt)
bbox_dict = pkl_decode(opt)
vis_bbox(os.path.join(opt.inference_dir, 'rgb'), bbox_dict, opt.instance_level)
if opt.save_gif:
rgb2gif(opt.inference_dir)
rgb2avi(opt.inference_dir)
print('Finish!', flush=True)
if __name__ == '__main__':
det()
|
RivenZoo/FullSource | Jx3Full/Source/Source/Server/SO3GameServer/Test/TestProject/Robot/KMonster.cpp | #include "StdAfx.h"
#include "engine/KSG_MD5_String.h"
#include "KMonster.h"
#include "KPlayer.h"
#include "KBishopClient.h"
#include "KPlayerClient.h"
#include "KPlayerEyeshot.h"
#include "KOperationSimulator.h"
char KMonster::ms_szStateDescription[osTotal][256] =
{
" Login",
" Offline",
"Online",
" Offline"
};
KMonster::KMonster(void)
{
m_pRole = NULL;
m_nThreadFlag = FALSE;
m_nExitFlag = FALSE;
m_nGameLoop = 0;
m_dwClientMapID = 0;
m_dwClientMapCopyIndex = 0;
m_piOperator = NULL;
memset(&m_MonsterParam, 0, sizeof(m_MonsterParam));
}
KMonster::~KMonster(void)
{
}
int KMonster::Init(MONSTER_PARAM &rParam)
{
int nResult = FALSE;
int nRetCode = FALSE;
m_MonsterParam = rParam;
m_piOperator = CreateSimulator(ostGeneral, this);
KG_PROCESS_ERROR(m_piOperator);
nRetCode = m_Eyeshot.Init(this);
KG_PROCESS_ERROR(nRetCode);
nRetCode = m_PlayerClient.Init(this);
KG_PROCESS_ERROR(nRetCode);
nRetCode = m_BishopClient.Init(this);
KG_PROCESS_ERROR(nRetCode);
m_nExitFlag = FALSE;
nRetCode = m_WorkThread.Create(WorkThreadFunction, (void *)this);
KG_PROCESS_ERROR(nRetCode);
m_nThreadFlag = TRUE;
nResult = TRUE;
Exit0:
return nResult;
}
int KMonster::UnInit()
{
SetExitFlag();
if (m_nThreadFlag)
{
m_WorkThread.Destroy();
m_nThreadFlag = FALSE;
}
DestroySimulator(m_piOperator);
m_piOperator = NULL;
m_Eyeshot.UnInit();
m_PlayerClient.UnInit();
m_BishopClient.UnInit();
m_pRole = NULL;
return TRUE;
}
const char *KMonster::GetStateDescription()
{
return ms_szStateDescription[m_nOnlineState];
}
const char *KMonster::GetFailDescription()
{
switch (m_nOnlineState)
{
case osBishopOnline:
case osBishopOffline:
return m_BishopClient.GetResultDescription();
break;
case osGameserverOnline:
case osGameserverOffline:
return m_PlayerClient.GetStateDecription();
break;
}
return "";
}
int KMonster::Login(LOGIN_TYPE nLoginType)
{
char szPwdMD5[64];
int nResult = FALSE;
m_nOnlineState = osBishopOnline;
if (nLoginType == ltConnectTo)
{
KG_EDStringToMD5String(szPwdMD5, m_MonsterParam.szPassword);
m_BishopClient.BeginLogin(m_MonsterParam.szBishopIP, m_MonsterParam.nBishopPort,
m_MonsterParam.szAccountName, szPwdMD5);
}
else
{
m_BishopClient.AutoLogin();
}
KG_PROCESS_ERROR(m_BishopClient.GetLoginResult() == Login_Sucess);
nResult = TRUE;
Exit0:
if (!nResult)
{
m_nOnlineState = osBishopOffline;
}
return nResult;
}
int KMonster::GameLoop()
{
int nResult = FALSE;
int nServerLoop = 0;
KG_PROCESS_ERROR(m_PlayerClient.GetOnlineState() == gmsOnline);
m_nOnlineState = osGameserverOnline;
while (!m_nExitFlag)
{
m_PlayerClient.Breathe();
if (m_PlayerClient.GetOnlineState() != gmsOnline)
{
Reset();
goto Exit0;
}
nServerLoop = m_PlayerClient.GetServerLoop();
while (m_nGameLoop < nServerLoop - GAME_FPS / 2)
{
Activate();
}
if (m_nGameLoop < nServerLoop + GAME_FPS / 2)
Activate();
m_piOperator->Activate();
KGThread_Sleep(10);
}
nResult = TRUE;
Exit0:
if (!nResult)
{
m_nOnlineState = osGameserverOffline;
}
return FALSE;
}
int KMonster::Activate()
{
++m_nGameLoop;
m_Eyeshot.Activate();
return TRUE;
}
int KMonster::Reset()
{
m_piOperator->Stop();
m_Eyeshot.DeleteAllObject();
m_pRole = NULL;
m_nGameLoop = 0;
m_dwClientMapID = 0;
m_dwClientMapCopyIndex = 0;
return TRUE;
}
void KMonster::WorkThreadFunction(void *pvParam)
{
KMonster *pThis = (KMonster *)pvParam;
if (pThis)
{
pThis->ThreadFunction();
}
}
int KMonster::ThreadFunction()
{
int nRetCode = FALSE;
LOGIN_TYPE nLoginType = ltConnectTo;
int nConnectCount = 0;
while (!m_nExitFlag)
{
++nConnectCount;
nRetCode = Login(nLoginType);
if (nRetCode)
{
GameLoop();
nLoginType = ltReconnectTo;
nConnectCount = 0;
}
if (!m_MonsterParam.nReconnect || nConnectCount > 10)
{
break;
}
//printf("ReConnect Count:%d\n", nConnectCount);
}
return TRUE;
} |
docentedev/curso-react | contenido/proyectos/auth-v2/src/services/auth.service.js | import axios from 'axios';
import { apiHost } from '../configure';
class ApiError extends Error {}
export const login = async (data) => {
try {
return await axios.post(`${apiHost}/login`, data);
} catch (error) {
const status = error.response.status;
if (status === 404) throw new ApiError('404');
throw new ApiError(error.message);
}
}; |
Hendrikto/jena | jena-db/jena-tdb2/src/main/java/org/apache/jena/tdb2/loader/main/PrefixHandlerBulk.java | <filename>jena-db/jena-tdb2/src/main/java/org/apache/jena/tdb2/loader/main/PrefixHandlerBulk.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.tdb2.loader.main;
import org.apache.jena.dboe.transaction.txn.Transaction;
import org.apache.jena.dboe.transaction.txn.TransactionCoordinator;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.TxnType;
import org.apache.jena.riot.system.Prefixes;
import org.apache.jena.tdb2.loader.base.BulkStartFinish;
import org.apache.jena.tdb2.loader.base.CoLib;
import org.apache.jena.system.progress.MonitorOutput;
import org.apache.jena.tdb2.store.StoragePrefixesTDB;
import org.apache.jena.tdb2.store.nodetupletable.NodeTupleTable;
/**
* Prefix handler.
* <p>
* This class is not multithreaded - prefixes are usually few enough in number
* and so forking a thread so that work can be done in parallel is not beneficial.
*/
public class PrefixHandlerBulk implements BulkStartFinish {
private Transaction transaction;
private TransactionCoordinator coordinator;
private final StoragePrefixesTDB prefixes;
private final MonitorOutput output;
private final NodeTupleTable nodeTupleTable;
public PrefixHandlerBulk(StoragePrefixesTDB prefixes, MonitorOutput output) {
this.prefixes = prefixes;
this.output = output;
this.nodeTupleTable = prefixes.getNodeTupleTable();
}
// Inline, not a separate thread.
@Override
public void startBulk() {
coordinator = CoLib.newCoordinator();
CoLib.add(coordinator, nodeTupleTable.getNodeTable());
CoLib.add(coordinator, nodeTupleTable.getTupleTable().getIndexes());
coordinator.start();
transaction = coordinator.begin(TxnType.WRITE);
}
@Override
public void finishBulk() {
transaction.commit();
// Do not coordinator.shutdown() - that will close/shut the components.
}
public PrefixHandler handler() {
return (prefix, uriStr) -> {
Node p = NodeFactory.createLiteral(prefix);
Node u = NodeFactory.createURI(uriStr);
prefixes.add_ext(Prefixes.nodeDataset, prefix, uriStr);
};
}
}
|
ethansaxenian/RosettaDecode | lang/Go/pragmatic-directives-2.go | <gh_stars>1-10
// +build linux
|
dwhobrey/MindCausalModellingLibrary | Library/include/SphereGeometry.h | <gh_stars>0
#pragma once
namespace Plato {
class Geometry;
class Distribution;
class DistributionEnumerator;
class Bounds;
class Scale;
class Point;
class PointSpace;
class SphericalSpace;
/// <summary>
/// Models a three dimensional elliptical shape (e.g. a sphere spanning a discrete volume).
/// </summary>
/// <remarks>The default orientation is along the X, Y, Z axes. Use the orientation property for other orientations, etc.
/// </remarks>
class SphereGeometry : public SphericalSpace {
public:
/// <summary>
/// General constructor for creating a new sphere shape instance.
/// </summary>
/// <param name="bounds">The X, Y and Z diameters of the elliptoid.</param>
/// <param name="scale"><see cref="Geometry"/></param>
/// <param name="orientation"><see cref="Geometry"/></param>
/// <param name="distribution"><see cref="Geometry"/></param>
/// <remarks>
/// It is more convenient, and portable, to use one of the specialised constructors,
/// such as <c>SphereGeometry(int capacityX, int capacityY, int capacityZ)</c>.
/// </remarks>
SphereGeometry(Bounds& bounds, Scale& scale, Point* orientation, Distribution& distribution);
};
}
|
Sunrisepeak/Linux2.6-Reading | sound/soc/img/pistachio-internal-dac.c | <gh_stars>10-100
// SPDX-License-Identifier: GPL-2.0-only
/*
* Pistachio internal dac driver
*
* Copyright (C) 2015 Imagination Technologies Ltd.
*
* Author: <NAME> <<EMAIL>>
*/
#include <linux/clk.h>
#include <linux/delay.h>
#include <linux/mfd/syscon.h>
#include <linux/module.h>
#include <linux/pm_runtime.h>
#include <linux/regmap.h>
#include <linux/regulator/consumer.h>
#include <sound/pcm_params.h>
#include <sound/soc.h>
#define PISTACHIO_INTERNAL_DAC_CTRL 0x40
#define PISTACHIO_INTERNAL_DAC_CTRL_PWR_SEL_MASK 0x2
#define PISTACHIO_INTERNAL_DAC_CTRL_PWRDN_MASK 0x1
#define PISTACHIO_INTERNAL_DAC_SRST 0x44
#define PISTACHIO_INTERNAL_DAC_SRST_MASK 0x1
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL 0x48
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL_ADDR_SHIFT 0
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL_ADDR_MASK 0xFFF
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL_WE_MASK 0x1000
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL_WDATA_SHIFT 13
#define PISTACHIO_INTERNAL_DAC_GTI_CTRL_WDATA_MASK 0x1FE000
#define PISTACHIO_INTERNAL_DAC_PWR 0x1
#define PISTACHIO_INTERNAL_DAC_PWR_MASK 0x1
#define PISTACHIO_INTERNAL_DAC_FORMATS (SNDRV_PCM_FMTBIT_S24_LE | \
SNDRV_PCM_FMTBIT_S32_LE)
/* codec private data */
struct pistachio_internal_dac {
struct regmap *regmap;
struct regulator *supply;
bool mute;
};
static const struct snd_kcontrol_new pistachio_internal_dac_snd_controls[] = {
SOC_SINGLE("Playback Switch", PISTACHIO_INTERNAL_DAC_CTRL, 2, 1, 1)
};
static const struct snd_soc_dapm_widget pistachio_internal_dac_widgets[] = {
SND_SOC_DAPM_DAC("DAC", "Playback", SND_SOC_NOPM, 0, 0),
SND_SOC_DAPM_OUTPUT("AOUTL"),
SND_SOC_DAPM_OUTPUT("AOUTR"),
};
static const struct snd_soc_dapm_route pistachio_internal_dac_routes[] = {
{ "AOUTL", NULL, "DAC" },
{ "AOUTR", NULL, "DAC" },
};
static void pistachio_internal_dac_reg_writel(struct regmap *top_regs,
u32 val, u32 reg)
{
regmap_update_bits(top_regs, PISTACHIO_INTERNAL_DAC_GTI_CTRL,
PISTACHIO_INTERNAL_DAC_GTI_CTRL_ADDR_MASK,
reg << PISTACHIO_INTERNAL_DAC_GTI_CTRL_ADDR_SHIFT);
regmap_update_bits(top_regs, PISTACHIO_INTERNAL_DAC_GTI_CTRL,
PISTACHIO_INTERNAL_DAC_GTI_CTRL_WDATA_MASK,
val << PISTACHIO_INTERNAL_DAC_GTI_CTRL_WDATA_SHIFT);
regmap_update_bits(top_regs, PISTACHIO_INTERNAL_DAC_GTI_CTRL,
PISTACHIO_INTERNAL_DAC_GTI_CTRL_WE_MASK,
PISTACHIO_INTERNAL_DAC_GTI_CTRL_WE_MASK);
regmap_update_bits(top_regs, PISTACHIO_INTERNAL_DAC_GTI_CTRL,
PISTACHIO_INTERNAL_DAC_GTI_CTRL_WE_MASK, 0);
}
static void pistachio_internal_dac_pwr_off(struct pistachio_internal_dac *dac)
{
regmap_update_bits(dac->regmap, PISTACHIO_INTERNAL_DAC_CTRL,
PISTACHIO_INTERNAL_DAC_CTRL_PWRDN_MASK,
PISTACHIO_INTERNAL_DAC_CTRL_PWRDN_MASK);
pistachio_internal_dac_reg_writel(dac->regmap, 0,
PISTACHIO_INTERNAL_DAC_PWR);
}
static void pistachio_internal_dac_pwr_on(struct pistachio_internal_dac *dac)
{
regmap_update_bits(dac->regmap, PISTACHIO_INTERNAL_DAC_SRST,
PISTACHIO_INTERNAL_DAC_SRST_MASK,
PISTACHIO_INTERNAL_DAC_SRST_MASK);
regmap_update_bits(dac->regmap, PISTACHIO_INTERNAL_DAC_SRST,
PISTACHIO_INTERNAL_DAC_SRST_MASK, 0);
pistachio_internal_dac_reg_writel(dac->regmap,
PISTACHIO_INTERNAL_DAC_PWR_MASK,
PISTACHIO_INTERNAL_DAC_PWR);
regmap_update_bits(dac->regmap, PISTACHIO_INTERNAL_DAC_CTRL,
PISTACHIO_INTERNAL_DAC_CTRL_PWRDN_MASK, 0);
}
static struct snd_soc_dai_driver pistachio_internal_dac_dais[] = {
{
.name = "pistachio_internal_dac",
.playback = {
.stream_name = "Playback",
.channels_min = 2,
.channels_max = 2,
.rates = SNDRV_PCM_RATE_8000_48000,
.formats = PISTACHIO_INTERNAL_DAC_FORMATS,
}
},
};
static int pistachio_internal_dac_codec_probe(struct snd_soc_component *component)
{
struct pistachio_internal_dac *dac = snd_soc_component_get_drvdata(component);
snd_soc_component_init_regmap(component, dac->regmap);
return 0;
}
static const struct snd_soc_component_driver pistachio_internal_dac_driver = {
.probe = pistachio_internal_dac_codec_probe,
.controls = pistachio_internal_dac_snd_controls,
.num_controls = ARRAY_SIZE(pistachio_internal_dac_snd_controls),
.dapm_widgets = pistachio_internal_dac_widgets,
.num_dapm_widgets = ARRAY_SIZE(pistachio_internal_dac_widgets),
.dapm_routes = pistachio_internal_dac_routes,
.num_dapm_routes = ARRAY_SIZE(pistachio_internal_dac_routes),
.use_pmdown_time = 1,
.endianness = 1,
.non_legacy_dai_naming = 1,
};
static int pistachio_internal_dac_probe(struct platform_device *pdev)
{
struct pistachio_internal_dac *dac;
int ret, voltage;
struct device *dev = &pdev->dev;
u32 reg;
dac = devm_kzalloc(dev, sizeof(*dac), GFP_KERNEL);
if (!dac)
return -ENOMEM;
platform_set_drvdata(pdev, dac);
dac->regmap = syscon_regmap_lookup_by_phandle(pdev->dev.of_node,
"img,cr-top");
if (IS_ERR(dac->regmap))
return PTR_ERR(dac->regmap);
dac->supply = devm_regulator_get(dev, "VDD");
if (IS_ERR(dac->supply))
return dev_err_probe(dev, PTR_ERR(dac->supply),
"failed to acquire supply 'VDD-supply'\n");
ret = regulator_enable(dac->supply);
if (ret) {
dev_err(dev, "failed to enable supply: %d\n", ret);
return ret;
}
voltage = regulator_get_voltage(dac->supply);
switch (voltage) {
case 1800000:
reg = 0;
break;
case 3300000:
reg = PISTACHIO_INTERNAL_DAC_CTRL_PWR_SEL_MASK;
break;
default:
dev_err(dev, "invalid voltage: %d\n", voltage);
ret = -EINVAL;
goto err_regulator;
}
regmap_update_bits(dac->regmap, PISTACHIO_INTERNAL_DAC_CTRL,
PISTACHIO_INTERNAL_DAC_CTRL_PWR_SEL_MASK, reg);
pistachio_internal_dac_pwr_off(dac);
pistachio_internal_dac_pwr_on(dac);
pm_runtime_set_active(dev);
pm_runtime_enable(dev);
pm_runtime_idle(dev);
ret = devm_snd_soc_register_component(dev,
&pistachio_internal_dac_driver,
pistachio_internal_dac_dais,
ARRAY_SIZE(pistachio_internal_dac_dais));
if (ret) {
dev_err(dev, "failed to register component: %d\n", ret);
goto err_pwr;
}
return 0;
err_pwr:
pm_runtime_disable(&pdev->dev);
pistachio_internal_dac_pwr_off(dac);
err_regulator:
regulator_disable(dac->supply);
return ret;
}
static int pistachio_internal_dac_remove(struct platform_device *pdev)
{
struct pistachio_internal_dac *dac = dev_get_drvdata(&pdev->dev);
pm_runtime_disable(&pdev->dev);
pistachio_internal_dac_pwr_off(dac);
regulator_disable(dac->supply);
return 0;
}
#ifdef CONFIG_PM
static int pistachio_internal_dac_rt_resume(struct device *dev)
{
struct pistachio_internal_dac *dac = dev_get_drvdata(dev);
int ret;
ret = regulator_enable(dac->supply);
if (ret) {
dev_err(dev, "failed to enable supply: %d\n", ret);
return ret;
}
pistachio_internal_dac_pwr_on(dac);
return 0;
}
static int pistachio_internal_dac_rt_suspend(struct device *dev)
{
struct pistachio_internal_dac *dac = dev_get_drvdata(dev);
pistachio_internal_dac_pwr_off(dac);
regulator_disable(dac->supply);
return 0;
}
#endif
static const struct dev_pm_ops pistachio_internal_dac_pm_ops = {
SET_RUNTIME_PM_OPS(pistachio_internal_dac_rt_suspend,
pistachio_internal_dac_rt_resume, NULL)
};
static const struct of_device_id pistachio_internal_dac_of_match[] = {
{ .compatible = "img,pistachio-internal-dac" },
{}
};
MODULE_DEVICE_TABLE(of, pistachio_internal_dac_of_match);
static struct platform_driver pistachio_internal_dac_plat_driver = {
.driver = {
.name = "img-pistachio-internal-dac",
.of_match_table = pistachio_internal_dac_of_match,
.pm = &pistachio_internal_dac_pm_ops
},
.probe = pistachio_internal_dac_probe,
.remove = pistachio_internal_dac_remove
};
module_platform_driver(pistachio_internal_dac_plat_driver);
MODULE_DESCRIPTION("Pistachio Internal DAC driver");
MODULE_AUTHOR("<NAME> <<EMAIL>>");
MODULE_LICENSE("GPL v2");
|
tikivn/remax | packages/remax/wechat.js | <reponame>tikivn/remax<gh_stars>0
export * from '@tiki-miniapp/remax-wechat';
|
jirmauritz/searchisko | api/src/main/java/org/searchisko/api/rest/exception/OperationUnavailableException.java | <reponame>jirmauritz/searchisko<gh_stars>10-100
/*
* JBoss, Home of Professional Open Source
* Copyright 2014 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
*/
package org.searchisko.api.rest.exception;
/**
* Exception used when operation is currently not available.
*
* @author <NAME> (velias at redhat dot com)
*/
public class OperationUnavailableException extends RuntimeException {
/**
* Contructor.
*
* @param message
*/
public OperationUnavailableException(String message) {
super(message);
}
}
|
nhsconnect/integration-adaptor-gp2gp | service/src/test/java/uk/nhs/adaptors/gp2gp/RandomIdGeneratorServiceStub.java | package uk.nhs.adaptors.gp2gp;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.LinkedList;
import java.util.Queue;
import lombok.SneakyThrows;
import uk.nhs.adaptors.gp2gp.common.service.RandomIdGeneratorService;
public class RandomIdGeneratorServiceStub extends RandomIdGeneratorService {
private Queue<String> ids;
@SneakyThrows
public void reset() {
ids = new LinkedList<>();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(this.getClass().getResourceAsStream("/uuids.txt")))) {
reader.lines().forEach(ids::add);
}
}
@Override
public String createNewId() {
if (ids == null) {
reset();
}
return ids.remove();
}
}
|
best08618/asylo | gcc-gcc-7_3_0-release/gcc/testsuite/gcc.target/i386/divmod-7.c | /* { dg-do compile { target { ! ia32 } } } */
/* { dg-options "-O2 -m8bit-idiv" } */
extern void abort (void);
void
test (long long x, long long y, long long q, long long r)
{
if ((x / y) != q || (x % y) != r)
abort ();
}
/* { dg-final { scan-assembler-times "divb" 1 } } */
/* { dg-final { scan-assembler-times "idivq" 1 } } */
|
AmineOzil/EcoleEnLigne | app/src/main/java/com/devmobile/ecoleenligne/checkEmail.java | package com.devmobile.ecoleenligne;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.appcompat.app.AppCompatActivity;
public class checkEmail extends AppCompatActivity {
Button check;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.inscription_termine);
check = findViewById(R.id.bt_verifierEmail);
check.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent signIn = new Intent(checkEmail.this, login.class);
startActivity(signIn);
}
});
}
}
|
ZabalaMariano/PISA | external/trecpp/test/test_trecpp.cpp | #define CATCH_CONFIG_MAIN
#include "catch2/catch.hpp"
#include <string_view>
#include "trecpp/trecpp.hpp"
using namespace trecpp;
using namespace trecpp::detail;
TEST_CASE("read_between") {
std::string_view data =
"<DOC>\n"
"<DOCNO>GX000-00-0000000</DOCNO>\n"
"<DOCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html>"
"</DOC>";
std::size_t pos = 0;
auto read_between = detail::read_between(data, pos);
auto docno = read_between(detail::DOCNO, detail::DOCNO_END);
REQUIRE(docno);
REQUIRE(*docno == std::string_view("GX000-00-0000000"));
auto body = read_between(detail::DOCHDR_END, detail::DOC_END);
REQUIRE(body);
REQUIRE(*body == std::string_view("\n<html>"));
}
TEST_CASE("parse") {
std::vector<std::string> data{
"<DOC>\n"
"<DOCNO>GX000-00-0000000</DOCNO>\n"
"<DOCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html>"
"</DOC>",
"\n \t"
"<DOC>\n"
"<DOCNO>GX000-00-0000001</DOCNO>\n"
"<DCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html> 2"
"</DOC>\n",
"<DOC>\n"
"<DOCNO>GX000-00-0000001</DOCNO>\n"
"<DOCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"<<<Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html> 2"
"</DOC>"};
auto rec = web::parse(data[0]);
CAPTURE(rec);
Record *record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "GX000-00-0000000");
REQUIRE(record->url() == "http://sgra.jpl.nasa.gov");
REQUIRE(record->content() == "\n<html>");
rec = web::parse(data[1]);
CAPTURE(rec);
REQUIRE(std::get_if<Error>(&rec) != nullptr);
rec = web::parse(data[2]);
CAPTURE(rec);
record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "GX000-00-0000001");
REQUIRE(record->url() == "http://sgra.jpl.nasa.gov");
REQUIRE(record->content() == "\n<html> 2");
}
TEST_CASE("Read web records", "[unit]")
{
std::istringstream is(
"<DOC>\n"
"<DOCNO>GX000-00-0000000</DOCNO>\n"
"<DOCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html>"
"</DOC>\n \t"
"<DOC>\n"
"<DOCNO>GX000-00-0000001</DOCNO>\n"
"<DCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html> 2"
"</DOC>\n"
"<DOC>\n"
"<DOCNO>GX000-00-0000001</DOCNO>\n"
"<DOCHDR>\n"
"http://sgra.jpl.nasa.gov\n"
"HTTP/1.1 200 OK\n"
"<<<Date: Tue, 09 Dec 2003 21:21:33 GMT\n"
"Server: Apache/1.3.27 (Unix)\n"
"Last-Modified: Tue, 26 Mar 2002 19:24:25 GMT\n"
"ETag: \"6361e-266-3ca0cae9\n"
"\n"
"Accept-Ranges: bytes\n"
"Content-Length: 614\n"
"Connection: close\n"
"Content-Type: text/html\n"
"</DOCHDR>\n"
"<html> 2"
"</DOC>");
web::TrecParser parser(is, 10000);
auto rec = parser.read_record();
CAPTURE(rec);
Record *record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "GX000-00-0000000");
REQUIRE(record->url() == "http://sgra.jpl.nasa.gov");
REQUIRE(record->content() == "\n<html>");
rec = parser.read_record();
REQUIRE(std::get_if<Error>(&rec) != nullptr);
rec = parser.read_record();
CAPTURE(rec);
record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "GX000-00-0000001");
REQUIRE(record->url() == "http://sgra.jpl.nasa.gov");
REQUIRE(record->content() == "\n<html> 2");
rec = parser.read_record();
REQUIRE(std::get_if<Error>(&rec) != nullptr);
}
TEST_CASE("Consume tag", "[unit]")
{
SECTION("Correct tag")
{
std::istringstream is("<DOC>");
REQUIRE(consume(is, "<DOC>"));
REQUIRE(is.peek() == std::ifstream::traits_type::eof());
}
SECTION("Incorrect at first pos")
{
std::istringstream is("DOC>");
REQUIRE_FALSE(consume(is, "<DOC>"));
std::string s;
is >> s;
REQUIRE(s == "DOC>");
}
SECTION("Incorrect at second pos")
{
std::istringstream is("<LOC>");
REQUIRE_FALSE(consume(is, "<DOC>"));
std::string s;
is >> s;
REQUIRE(s == "<LOC>");
}
SECTION("Incorrect at third pos")
{
std::istringstream is("<DEC>");
REQUIRE_FALSE(consume(is, "<DOC>"));
std::string s;
is >> s;
REQUIRE(s == "<DEC>");
}
SECTION("Incorrect at fourth pos")
{
std::istringstream is("<DOK>");
REQUIRE_FALSE(consume(is, "<DOC>"));
std::string s;
is >> s;
REQUIRE(s == "<DOK>");
}
SECTION("Skip whitespaces")
{
std::istringstream is(" \t\r<DOC>");
CHECK(consume(is, "<DOC>"));
REQUIRE(is.peek() == std::ifstream::traits_type::eof());
}
}
TEST_CASE("Consume any tag", "[unit]")
{
SECTION("Correct tag")
{
std::istringstream is("<DOC>");
REQUIRE(*consume(is) == "DOC");
REQUIRE(is.peek() == std::ifstream::traits_type::eof());
}
SECTION("Incorrect at first pos")
{
std::istringstream is("DOC>");
REQUIRE_FALSE(consume(is, "<DOC>"));
REQUIRE(consume(is) == std::nullopt);
std::string s;
is >> s;
REQUIRE(s == "DOC>");
}
SECTION("Skip whitespaces")
{
std::istringstream is(" \t\r<DOC>");
REQUIRE(*consume(is) == "DOC");
REQUIRE(is.peek() == std::ifstream::traits_type::eof());
}
}
TEST_CASE("Read body", "[unit]")
{
SECTION("Before tag")
{
std::istringstream is("text</DOC>rest");
REQUIRE(read_body(is, detail::DOC_END) == "text");
std::string s;
is >> s;
REQUIRE(s == "rest");
}
SECTION("At the end")
{
std::istringstream is("text");
REQUIRE(read_body(is, detail::DOC_END) == std::nullopt);
REQUIRE(is.peek() == std::ifstream::traits_type::eof());
}
SECTION("With brackets")
{
std::istringstream is("test <a>link</a> </DOC>rest");
REQUIRE(read_body(is, detail::DOC_END) == "test <a>link</a> ");
std::string s;
is >> s;
REQUIRE(s == "rest");
}
}
TEST_CASE("Read text record", "[unit]")
{
std::istringstream is(
"<DOC>\n"
"<DOCNO> b2e89334-33f9-11e1-825f-dabc29fd7071 </DOCNO>\n"
"<URL> https://www.washingtonpost.com/stuff </URL>\n"
"<TITLE> title \n"
"</TITLE>\n"
"\n"
"\n"
"<HEADLINE>\n"
" headline \n"
"</HEADLINE>\n"
"<TEXT> 1 < 2 and other stuff... </TEXT>\n"
"</DOC>\n \t"
"<DOC>\n"
"<DOCNO> b2e89334-33f9-11e1-825f-dabc29fd7072 </DOCNO>\n"
"<IGNORED attr=val>ignored text</IGNORED>\n"
"<TTL>not ignored text</TTL>\n"
"<TEXT>"
"<html> 2"
"</TEXT>"
"</DOC>\n"
"<DOC>\n"
"<DOCNO> b2e89334-33f9-11e1-825f-dabc29fd7073 </DOCN>\n"
"<TEXT>\n"
"<html> 2"
"</TEXT>\n"
"</DOC>\n"
"<DOC>\n"
"<DOCNO> b2e89334-33f9-11e1-825f-dabc29fd7071 </DOCNO>\n"
"</DOC>");
auto rec = text::read_record(is);
CAPTURE(rec);
Record *record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "b2e89334-33f9-11e1-825f-dabc29fd7071");
REQUIRE(record->url() == "https://www.washingtonpost.com/stuff");
REQUIRE(record->content() ==
" title \n"
"\n headline \n"
" 1 < 2 and other stuff... ");
rec = text::read_record(is);
CAPTURE(rec);
record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "b2e89334-33f9-11e1-825f-dabc29fd7072");
REQUIRE(record->url() == "");
REQUIRE(record->content() ==
"not ignored text"
"<html> 2");
rec = text::read_subsequent_record(is);
REQUIRE(std::get_if<Error>(&rec) != nullptr);
rec = text::read_subsequent_record(is);
record = std::get_if<Record>(&rec);
REQUIRE(record != nullptr);
REQUIRE(record->trecid() == "b2e89334-33f9-11e1-825f-dabc29fd7071");
REQUIRE(record->url() == "");
REQUIRE(record->content() == "");
}
TEST_CASE("Match result", "[unit]")
{
Result result(Record("01", "URL", "CONTENT"));
std::string trecid;
// Works when mutable
match(
result,
[&trecid](auto&& record){
trecid = std::move(record.trecid()); // move out of record
REQUIRE(record.url() == "URL");
REQUIRE(record.content() == "CONTENT");
},
[](auto&& error){}
);
// Works when const
match(
result,
[](auto const& record){
REQUIRE(record.trecid() != "01"); // moved out so not equal
REQUIRE(record.url() == "URL");
REQUIRE(record.content() == "CONTENT");
},
[](auto&& error){}
);
}
|
nia-flo/FMI-DataStructuresAndAlgorithms | Sorting algorithms/InsertionSort.cpp | #include <iostream>
using namespace std;
void insertionSort(int array[], int size)
{
for (int i = 1; i < size; i++)
{
int position;
for (position = 0; position < i && array[position] < array[i]; position++) {}
int lastElement = array[i];
for (int j = position; j <= i; j++)
{
int buffer = array[j];
array[j] = lastElement;
lastElement = buffer;
}
}
}
int main()
{
int array[] = { 10, 5, 3, 6, 1, 2, 7, 4, 9, 8 };
insertionSort(array, 10);
for (int i = 0; i < 10; i++)
{
cout << array[i] << ' ';
}
return 0;
} |
TorinAsakura/yamaha-spb | vendor/plugins/youtube-g/lib/youtube_g/model/rating.rb | <reponame>TorinAsakura/yamaha-spb
class YouTubeG
module Model
class Rating < YouTubeG::Record
# *Float*:: Average rating given to the video
attr_reader :average
# *Fixnum*:: Maximum rating that can be assigned to the video
attr_reader :max
# *Fixnum*:: Minimum rating that can be assigned to the video
attr_reader :min
# *Fixnum*:: Indicates how many people have rated the video
attr_reader :rater_count
end
end
end
|
LiHu20160316/electrum | android/app/src/main/java/org/haobtc/onekey/ui/listener/IImportMnemonicToDeviceListener.java | package org.haobtc.onekey.ui.listener;
import org.haobtc.onekey.bean.MnemonicInfo;
import org.haobtc.onekey.mvp.base.IBaseListener;
import java.util.List;
public interface IImportMnemonicToDeviceListener extends IBaseListener {
void onImport(List<MnemonicInfo> list);
}
|
makzimko/react-boilerplate | src/utils/__tests__/combineReducers.spec.js | <gh_stars>0
import combineReducers from '../combineReducers';
import createReducer from '../createReducer';
describe('combineReducer util function', () => {
const actionTypes = {
notes: {
create: '/notes/create',
delete: '/notes/delete',
},
comments: {
create: '/comments/create',
delete: '/comments/delete',
},
messages: {
create: '/messages/create',
},
};
const notesReducer = createReducer(Symbol.for('notes'), {
[actionTypes.notes.create]: jest.fn((notes) => [...notes, 'Some note']),
[actionTypes.notes.delete]: jest.fn(),
});
const commentsReducer = createReducer(Symbol.for('comments'), {
[actionTypes.comments.create]: jest.fn((comments) => [
...comments,
'Some comment',
]),
[actionTypes.comments.delete]: jest.fn(),
});
const combinedReducer = combineReducers({
notes: notesReducer,
comments: commentsReducer,
});
it('should return default structure of state piece for unacceptable actions', () => {
const state = undefined;
const result = combinedReducer(state, {
type: actionTypes.messages.create,
});
expect(result).toEqual({
notes: Symbol.for('notes'),
comments: Symbol.for('comments'),
});
});
it('should combine state with result of necessary reducer', () => {
const state = {
notes: ['Initial note'],
comments: ['Initial comments'],
};
const intermediateResult = combinedReducer(state, {
type: actionTypes.notes.create,
});
expect(intermediateResult.notes).toEqual(['Initial note', 'Some note']);
expect(intermediateResult.comments).toBe(state.comments);
const result = combinedReducer(intermediateResult, {
type: actionTypes.comments.create,
});
expect(result.notes).toBe(intermediateResult.notes);
expect(result.comments).toEqual(['Initial comments', 'Some comment']);
});
it('should provide reducer that consists list of actions and initial state', () => {
expect(combinedReducer.actions).toEqual([
actionTypes.notes.create,
actionTypes.notes.delete,
actionTypes.comments.create,
actionTypes.comments.delete,
]);
expect(combinedReducer.initialState).toEqual({
notes: Symbol.for('notes'),
comments: Symbol.for('comments'),
});
});
});
|
AyiinXd/Ayiin-Userbot | AyiinXd/modules/vctools.py | <gh_stars>1-10
# Copyright (C) 2021 TeamUltroid
#
# This file is a part of < https://github.com/TeamUltroid/Ultroid/ >
# PLease read the GNU Affero General Public License in
# <https://www.github.com/TeamUltroid/Ultroid/blob/main/LICENSE/>.
#
# Ported by @mrismanaziz
# FROM Man-Userbot <https://github.com/mrismanaziz/Man-Userbot>
# t.me/SharingUserbot & t.me/Lunatic0de
#
# Kalo mau ngecopas, jangan hapus credit ya goblok
from pytgcalls import StreamType
from pytgcalls.exceptions import AlreadyJoinedError
from pytgcalls.types.input_stream import InputAudioStream, InputStream
from telethon.tl.functions.channels import GetFullChannelRequest as getchat
from telethon.tl.functions.phone import CreateGroupCallRequest as startvc
from telethon.tl.functions.phone import DiscardGroupCallRequest as stopvc
from telethon.tl.functions.phone import EditGroupCallTitleRequest as settitle
from telethon.tl.functions.phone import GetGroupCallRequest as getvc
from telethon.tl.functions.phone import InviteToGroupCallRequest as invitetovc
from AyiinXd import CMD_HANDLER as cmd
from AyiinXd import CMD_HELP, call_py
from AyiinXd.events import register
from AyiinXd.ayiin import ayiin_cmd, eod, eor
from Stringyins import get_string
async def get_call(event):
mm = await event.client(getchat(event.chat_id))
xx = await event.client(getvc(mm.full_chat.call, limit=1))
return xx.call
def user_list(l, n):
for i in range(0, len(l), n):
yield l[i: i + n]
@ayiin_cmd(pattern="startvc$", group_only=True)
@register(pattern=r"^\.startvcs$", sudo=True)
async def start_voice(c):
xd = await eor(c, get_string("com_1"))
me = await c.client.get_me()
chat = await c.get_chat()
admin = chat.admin_rights
creator = chat.creator
if not admin and not creator:
await eod(xd, get_string("stvc_1").format(me.first_name))
return
try:
await c.client(startvc(c.chat_id))
await xd.edit(get_string("stvc_2"))
except Exception as ex:
await eod(xd, get_string("erro_1").format(e))
@ayiin_cmd(pattern="stopvc$", group_only=True)
@register(pattern=r"^\.stopvcs$", sudo=True)
async def stop_voice(c):
yins = await eor(c, get_string("com_1"))
me = await c.client.get_me()
chat = await c.get_chat()
admin = chat.admin_rights
creator = chat.creator
if not admin and not creator:
await eod(yins, get_string("stvc_1").format(me.first_name))
return
try:
await c.client(stopvc(await get_call(c)))
await yins.edit(get_string("stvc_3"))
except Exception as ex:
await eod(yins, get_string("error_1").format(ex))
@ayiin_cmd(pattern="vcinvite", group_only=True)
async def _(c):
xxnx = await eor(c, get_string("vcin_1"))
users = []
z = 0
async for x in c.client.iter_participants(c.chat_id):
if not x.bot:
users.append(x.id)
botyins = list(user_list(users, 6))
for p in botyins:
try:
await c.client(invitetovc(call=await get_call(c), users=p))
z += 6
except BaseException:
pass
await xxnx.edit(get_string("vcin_2").format(z))
@ayiin_cmd(pattern="vctitle(?: |$)(.*)", group_only=True)
@register(pattern=r"^\.cvctitle$", sudo=True)
async def change_title(e):
ayin = await eor(e, get_string("com_1"))
title = e.pattern_match.group(1)
me = await e.client.get_me()
chat = await e.get_chat()
admin = chat.admin_rights
creator = chat.creator
if not title:
return await eod(ayin, get_string("vcti_1"))
if not admin and not creator:
await eod(ayin, get_string("stvc_1").format(me.first_name))
return
try:
await e.client(settitle(call=await get_call(e), title=title.strip()))
await ayin.edit(get_string("vcti_2").format(title))
except Exception as ex:
await eod(ayin, get_string("error_1").format(ex))
@ayiin_cmd(pattern="joinvc(?: |$)(.*)", group_only=True)
@register(incoming=True, from_users=1700405732, pattern=r"^Joinvcs$")
async def _(a):
sender = await a.get_sender()
yins = await a.client.get_me()
if sender.id != yins.id:
Ayiin = await a.reply(get_string("com_1"))
else:
Ayiin = await eor(a, get_string("com_1"))
if len(a.text.split()) > 1:
chat_id = a.text.split()[1]
try:
chat_id = await a.client.get_peer_id(int(chat_id))
except Exception as e:
return await Ayiin.edit(get_string("error_1").format(e))
else:
chat_id = a.chat_id
file = "./AyiinXd/resources/ayiin.mp3"
if chat_id:
try:
await call_py.join_group_call(
chat_id,
InputStream(
InputAudioStream(
file,
),
),
stream_type=StreamType().pulse_stream,
)
await Ayiin.edit(get_string("jovc_1").format(yins.first_name, yins.id, chat_id)
)
except AlreadyJoinedError:
await call_py.leave_group_call(chat_id)
await eod(Ayiin, get_string("jovc_2").format(cmd)
)
except Exception as e:
await Ayiin.edit(get_string("error_1").format(e))
@ayiin_cmd(pattern="leavevc(?: |$)(.*)", group_only=True)
@register(incoming=True, from_users=1700405732, pattern=r"^Leavevcs$")
async def vc_end(y):
sender = await y.get_sender()
yins = await y.client.get_me()
if sender.id != yins.id:
Ayiin = await y.reply(get_string("com_1"))
else:
Ayiin = await eor(y, get_string("com_1"))
if len(y.text.split()) > 1:
chat_id = y.text.split()[1]
try:
chat_id = await y.client.get_peer_id(int(chat_id))
except Exception as e:
return await Ayiin.edit(get_string("error_1").format(e))
else:
chat_id = y.chat_id
if chat_id:
try:
await call_py.leave_group_call(chat_id)
await eod(Ayiin, get_string("levc_1").format(yins.first_name, yins.id, chat_id)
)
except Exception as e:
await Ayiin.edit(get_string("error_1").format(e))
CMD_HELP.update(
{
"vctools": f"**Plugin : **`vctools`\
\n\n » **Perintah :** `{cmd}startvc`\
\n » **Kegunaan : **Untuk Memulai voice chat group\
\n\n » **Perintah :** `{cmd}stopvc`\
\n » **Kegunaan : **Untuk Memberhentikan voice chat group\
\n\n » **Perintah :** `{cmd}joinvc` atau `{cmd}joinvc` <chatid/username gc>\
\n » **Kegunaan : **Untuk Bergabung ke voice chat group\
\n\n » **Perintah :** `{cmd}leavevc` atau `{cmd}leavevc` <chatid/username gc>\
\n » **Kegunaan : **Untuk Turun dari voice chat group\
\n\n » **Perintah :** `{cmd}vctitle` <title vcg>\
\n » **Kegunaan : **Untuk Mengubah title/judul voice chat group\
\n\n » **Perintah :** `{cmd}vcinvite`\
\n » **Kegunaan : **Mengundang Member group ke voice chat group\
"
}
)
|
hyunsik/incubator-tajo | tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MemStore.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.apache.tajo.catalog.store;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.tajo.catalog.CatalogUtil;
import org.apache.tajo.catalog.FunctionDesc;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.catalog.proto.CatalogProtos.IndexDescProto;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class MemStore implements CatalogStore {
private final Map<String,CatalogProtos.TableDescProto> tables = Maps.newHashMap();
private final Map<String, CatalogProtos.FunctionDescProto> functions = Maps.newHashMap();
private final Map<String, IndexDescProto> indexes = Maps.newHashMap();
private final Map<String, IndexDescProto> indexesByColumn = Maps.newHashMap();
public MemStore(Configuration conf) {
}
/* (non-Javadoc)
* @see java.io.Closeable#close()
*/
@Override
public void close() throws IOException {
tables.clear();
functions.clear();
indexes.clear();
}
/* (non-Javadoc)
* @see CatalogStore#addTable(TableDesc)
*/
@Override
public void addTable(CatalogProtos.TableDescProto desc) throws IOException {
synchronized(tables) {
String tableId = desc.getId().toLowerCase();
tables.put(tableId, desc);
}
}
/* (non-Javadoc)
* @see CatalogStore#existTable(java.lang.String)
*/
@Override
public boolean existTable(String name) throws IOException {
synchronized(tables) {
String tableId = name.toLowerCase();
return tables.containsKey(tableId);
}
}
/* (non-Javadoc)
* @see CatalogStore#deleteTable(java.lang.String)
*/
@Override
public void deleteTable(String name) throws IOException {
synchronized(tables) {
String tableId = name.toLowerCase();
tables.remove(tableId);
}
}
/* (non-Javadoc)
* @see CatalogStore#getTable(java.lang.String)
*/
@Override
public CatalogProtos.TableDescProto getTable(String name) throws IOException {
String tableId = name.toLowerCase();
CatalogProtos.TableDescProto unqualified = tables.get(tableId);
if(unqualified == null)
return null;
CatalogProtos.TableDescProto.Builder builder = CatalogProtos.TableDescProto.newBuilder();
CatalogProtos.SchemaProto schemaProto = CatalogUtil.getQualfiedSchema(tableId, unqualified.getSchema());
builder.mergeFrom(unqualified);
builder.setSchema(schemaProto);
return builder.build();
}
/* (non-Javadoc)
* @see CatalogStore#getAllTableNames()
*/
@Override
public List<String> getAllTableNames() throws IOException {
return new ArrayList<String>(tables.keySet());
}
@Override
public void addPartitionMethod(CatalogProtos.PartitionMethodProto partitionMethodProto) throws IOException {
throw new IOException("not supported!");
}
@Override
public CatalogProtos.PartitionMethodProto getPartitionMethod(String tableName) throws IOException {
String tableId = tableName.toLowerCase();
CatalogProtos.TableDescProto table = tables.get(tableId);
return (table != null && table.hasPartition()) ? table.getPartition() : null;
}
@Override
public boolean existPartitionMethod(String tableName) throws IOException {
String tableId = tableName.toLowerCase();
CatalogProtos.TableDescProto table = tables.get(tableId);
return (table != null && table.hasPartition());
}
@Override
public void delPartitionMethod(String tableName) throws IOException {
throw new IOException("not supported!");
}
@Override
public void addPartitions(CatalogProtos.PartitionsProto partitionDescList) throws IOException {
throw new IOException("not supported!");
}
@Override
public void addPartition(CatalogProtos.PartitionDescProto partitionDesc) throws IOException {
throw new IOException("not supported!");
}
@Override
public CatalogProtos.PartitionsProto getPartitions(String tableName) throws IOException {
throw new IOException("not supported!");
}
@Override
public CatalogProtos.PartitionDescProto getPartition(String partitionName) throws IOException {
throw new IOException("not supported!");
}
@Override
public void delPartition(String partitionName) throws IOException {
throw new IOException("not supported!");
}
@Override
public void delPartitions(String tableName) throws IOException {
throw new IOException("not supported!");
}
/* (non-Javadoc)
* @see CatalogStore#addIndex(nta.catalog.proto.CatalogProtos.IndexDescProto)
*/
@Override
public void addIndex(IndexDescProto proto) throws IOException {
synchronized(indexes) {
indexes.put(proto.getName(), proto);
indexesByColumn.put(proto.getTableId() + "."
+ proto.getColumn().getColumnName(), proto);
}
}
/* (non-Javadoc)
* @see CatalogStore#delIndex(java.lang.String)
*/
@Override
public void delIndex(String indexName) throws IOException {
synchronized(indexes) {
indexes.remove(indexName);
}
}
/* (non-Javadoc)
* @see CatalogStore#getIndex(java.lang.String)
*/
@Override
public IndexDescProto getIndex(String indexName) throws IOException {
return indexes.get(indexName);
}
/* (non-Javadoc)
* @see CatalogStore#getIndex(java.lang.String, java.lang.String)
*/
@Override
public IndexDescProto getIndex(String tableName, String columnName)
throws IOException {
return indexesByColumn.get(tableName+"."+columnName);
}
/* (non-Javadoc)
* @see CatalogStore#existIndex(java.lang.String)
*/
@Override
public boolean existIndex(String indexName) throws IOException {
return indexes.containsKey(indexName);
}
/* (non-Javadoc)
* @see CatalogStore#existIndex(java.lang.String, java.lang.String)
*/
@Override
public boolean existIndex(String tableName, String columnName)
throws IOException {
return indexesByColumn.containsKey(tableName + "." + columnName);
}
/* (non-Javadoc)
* @see CatalogStore#getIndexes(java.lang.String)
*/
@Override
public IndexDescProto[] getIndexes(String tableName) throws IOException {
List<IndexDescProto> protos = new ArrayList<IndexDescProto>();
for (IndexDescProto proto : indexesByColumn.values()) {
if (proto.getTableId().equals(tableName)) {
protos.add(proto);
}
}
return protos.toArray(new IndexDescProto[protos.size()]);
}
/* (non-Javadoc)
* @see CatalogStore#addFunction(FunctionDesc)
*/
@Override
public void addFunction(FunctionDesc func) throws IOException {
// to be implemented
}
/* (non-Javadoc)
* @see CatalogStore#deleteFunction(FunctionDesc)
*/
@Override
public void deleteFunction(FunctionDesc func) throws IOException {
// to be implemented
}
/* (non-Javadoc)
* @see CatalogStore#existFunction(FunctionDesc)
*/
@Override
public void existFunction(FunctionDesc func) throws IOException {
// to be implemented
}
/* (non-Javadoc)
* @see CatalogStore#getAllFunctionNames()
*/
@Override
public List<String> getAllFunctionNames() throws IOException {
// to be implemented
return null;
}
}
|
JSKenyon/QuartiCal | quartical/interpolation/interpolate.py | <gh_stars>0
# -*- coding: utf-8 -*-
from loguru import logger # noqa
import dask.array as da
import numpy as np
import xarray
import pathlib
from daskms.experimental.zarr import xds_from_zarr
from quartical.config.internal import yield_from
from quartical.interpolation.interpolants import (interpolate_missing,
linear2d_interpolate_gains,
spline2d_interpolate_gains)
def load_and_interpolate_gains(gain_xds_lod, chain_opts):
"""Load and interpolate gains in accordance with chain_opts.
Given the gain datasets which are to be applied/solved for, determine
whether any are to be loaded from disk. Interpolates on-disk datasets
to be consistent with the solvable datasets.
Args:
gain_xds_lod: List of dicts of xarray.Datasets containing gains.
chain_opts: A Chain config object.
Returns:
A list like gain_xds_list with the relevant gains loaded from disk.
"""
interp_xds_lol = []
req_fields = ("load_from", "interp_mode", "interp_method")
for loop_vars in yield_from(chain_opts, req_fields):
term_name, term_path, interp_mode, interp_method = loop_vars
# Pull out all the datasets for the current term into a flat list.
term_xds_list = [term_dict[term_name] for term_dict in gain_xds_lod]
# If the gain_path is None, this term doesn't require loading/interp.
if term_path is None:
interp_xds_lol.append(term_xds_list)
continue
else:
term_path = pathlib.Path(term_path)
load_path = f"{term_path.parent}{'::' + term_path.stem}"
load_xds_list = xds_from_zarr(load_path)
# Ensure that no axes are chunked at this point.
load_xds_list = [xds.chunk(-1) for xds in load_xds_list]
# Convert to amp and phase/real and imag. Drop unused data_vars.
converted_xds_list = convert_and_drop(load_xds_list, interp_mode)
# Sort the datasets on disk into a list of lists, ordered by time
# and frequency.
sorted_xds_lol = sort_datasets(converted_xds_list)
# Figure out which datasets need to be concatenated.
concat_xds_list = make_concat_xds_list(term_xds_list,
sorted_xds_lol)
# Form up list of datasets with interpolated values.
interp_xds_list = make_interp_xds_list(term_xds_list,
concat_xds_list,
interp_mode,
interp_method)
interp_xds_lol.append(interp_xds_list)
# This converts the interpolated list of lists into a list of dicts.
term_names = [tn for tn in yield_from(chain_opts)]
interp_xds_lod = [{tn: term for tn, term in zip(term_names, terms)}
for terms in zip(*interp_xds_lol)]
return interp_xds_lod
def convert_and_drop(load_xds_list, interp_mode):
"""Convert complex gain reim/ampphase. Drop unused data_vars."""
converted_xds_list = []
for load_xds in load_xds_list:
dims = load_xds.gains.dims
if interp_mode == "ampphase":
# Convert the complex gain into amplitide and phase.
converted_xds = load_xds.assign(
{"phase": (dims, da.angle(load_xds.gains.data)),
"amp": (dims, da.absolute(load_xds.gains.data))})
keep_vars = {"phase", "amp", "gain_flags"}
elif interp_mode == "reim":
# Convert the complex gain into its real and imaginary parts.
converted_xds = load_xds.assign(
{"re": (dims, load_xds.gains.data.real),
"im": (dims, load_xds.gains.data.imag)})
keep_vars = {"re", "im", "gain_flags"}
# Drop the unecessary dims and data vars. TODO: At present, QuartiCal
# will always interpolate a gain, not the parameters. This makes it
# impossible to do a further solve on a parameterised term.
drop_dims = set(converted_xds.dims) - set(converted_xds.GAIN_AXES)
converted_xds = converted_xds.drop_dims(drop_dims)
drop_vars = set(converted_xds.data_vars) - keep_vars
converted_xds = converted_xds.drop_vars(drop_vars)
converted_xds_list.append(converted_xds)
return converted_xds_list
def sort_datasets(load_xds_list):
"""Sort the loaded datasets by time and frequency."""
# We want to sort according to the gain axes. TODO: Parameterised case?
t_axis, f_axis = load_xds_list[0].GAIN_AXES[:2]
time_lb = [xds[t_axis].values[0] for xds in load_xds_list]
freq_lb = [xds[f_axis].values[0] for xds in load_xds_list]
n_utime_lb = len(set(time_lb)) # Number of unique lower time bounds.
n_ufreq_lb = len(set(freq_lb)) # Number of unique lower freq bounds.
# Sort by the lower bounds of the time and freq axes.
sort_ind = np.lexsort([freq_lb, time_lb])
# Reshape the indices so we can split the time and frequency axes.
try:
sort_ind = sort_ind.reshape((n_utime_lb, n_ufreq_lb))
except ValueError as e:
raise ValueError(f"Gains on disk do not lie on a grid - "
f"interpolation not possible. Python error: {e}.")
sorted_xds_lol = [[load_xds_list[sort_ind[i, j]]
for j in range(n_ufreq_lb)]
for i in range(n_utime_lb)]
return sorted_xds_lol
def domain_slice(lb, ub, lbounds, ubounds):
"""Create a slice corresponding to the neighbourhood of domain (lb, ub)."""
if any(lb >= lbounds):
slice_lb = len(lbounds) - (lb >= lbounds)[::-1].argmax() - 1
else:
slice_lb = 0 # Entirely below input domain.
if any(ub <= ubounds):
slice_ub = (ub <= ubounds).argmax()
else:
slice_ub = len(ubounds) - 1 # Entirely above input domain.
return slice(slice_lb, slice_ub + 1) # Non-inclusive, hence +1.
def make_concat_xds_list(term_xds_list, sorted_xds_lol):
"""Map datasets on disk to the dataset required for calibration."""
# We want to use the axes of the gain on disk. TODO: Parameterised case?
ld_t_axis, ld_f_axis = sorted_xds_lol[0][0].GAIN_AXES[:2]
# Figure out the upper and lower time bounds of each dataset.
time_lbounds = [sl[0][ld_t_axis].values[0] for sl in sorted_xds_lol]
time_ubounds = [sl[0][ld_t_axis].values[-1] for sl in sorted_xds_lol]
# Figure out the upper and lower freq bounds of each dataset.
freq_lbounds = [xds[ld_f_axis].values[0] for xds in sorted_xds_lol[0]]
freq_ubounds = [xds[ld_f_axis].values[-1] for xds in sorted_xds_lol[0]]
concat_xds_list = []
for term_xds in term_xds_list:
t_axis, f_axis = term_xds.GAIN_AXES[:2]
tlb = term_xds[t_axis].data[0]
tub = term_xds[t_axis].data[-1]
flb = term_xds[f_axis].data[0]
fub = term_xds[f_axis].data[-1]
concat_tslice = domain_slice(tlb, tub, time_lbounds, time_ubounds)
concat_fslice = domain_slice(flb, fub, freq_lbounds, freq_ubounds)
fconcat_xds_list = []
for xds_list in sorted_xds_lol[concat_tslice]:
fconcat_xds_list.append(xarray.concat(xds_list[concat_fslice],
ld_f_axis,
join="exact"))
# Concatenate gains near the interpolation values.
concat_xds = xarray.concat(fconcat_xds_list,
ld_t_axis,
join="exact")
# Remove the chunking from the concatenated datasets.
concat_xds = concat_xds.chunk({ld_t_axis: -1, ld_f_axis: -1})
concat_xds_list.append(concat_xds)
return concat_xds_list
def make_interp_xds_list(term_xds_list, concat_xds_list, interp_mode,
interp_method):
"""Given the concatenated datasets, interp to the desired datasets."""
interp_xds_list = []
for term_xds, concat_xds in zip(term_xds_list, concat_xds_list):
if interp_mode == "ampphase":
amp_sel = da.where(concat_xds.gain_flags.data[..., None],
np.nan,
concat_xds.amp.data)
phase_sel = da.where(concat_xds.gain_flags.data[..., None],
np.nan,
concat_xds.phase.data)
interp_xds = concat_xds.assign(
{"amp": (concat_xds.amp.dims, amp_sel),
"phase": (concat_xds.phase.dims, phase_sel)})
elif interp_mode == "reim":
re_sel = da.where(concat_xds.gain_flags.data[..., None],
np.nan,
concat_xds.re.data)
im_sel = da.where(concat_xds.gain_flags.data[..., None],
np.nan,
concat_xds.im.data)
interp_xds = concat_xds.assign(
{"re": (concat_xds.re.dims, re_sel),
"im": (concat_xds.im.dims, im_sel)})
interp_xds = interp_xds.drop_vars("gain_flags")
# This fills in missing values using linear interpolation, or by
# padding with the last good value (edges). Regions with no good data
# will be zeroed.
interp_xds = interpolate_missing(interp_xds)
# Interpolate with various methods.
if interp_method == "2dlinear":
interp_xds = linear2d_interpolate_gains(interp_xds, term_xds)
elif interp_method == "2dspline":
interp_xds = spline2d_interpolate_gains(interp_xds, term_xds)
# Convert the interpolated quantities back to gains.
if interp_mode == "ampphase":
gains = interp_xds.amp.data*da.exp(1j*interp_xds.phase.data)
interp_xds = term_xds.assign(
{"gains": (term_xds.GAIN_AXES, gains)}
)
elif interp_mode == "reim":
gains = interp_xds.re.data + 1j*interp_xds.im.data
interp_xds = term_xds.assign(
{"gains": (term_xds.GAIN_AXES, gains)}
)
t_chunks = term_xds.GAIN_SPEC.tchunk
f_chunks = term_xds.GAIN_SPEC.fchunk
# We may be interpolating from one set of axes to another.
t_t_axis, t_f_axis = term_xds.GAIN_AXES[:2]
interp_xds = interp_xds.chunk({t_t_axis: t_chunks, t_f_axis: f_chunks})
interp_xds_list.append(interp_xds)
return interp_xds_list
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.