branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>bomeara/treevo_paper<file_sep>/analyses_cluster_fast/Run_An_Emp_TimeReg_2019-07-31.sh
#!/bin/bash
Rscript Run_An_Emp_TimeReg_2019-07-31.R
<file_sep>/analyses_cluster/Run_Aq_Emp_3Opt2Bound.sh
#!/bin/bash
Rscript Run_Aq_Emp_3Opt2Bound.R
<file_sep>/old/replicating_FPK_simulation.R
# FPK model is a four param intrinsic model
# three to describe landscape
# one to describe sigma (dispersion parameter)
# bounds are treated as two additional parameters
# but these are intended to be nuisance parameters
# Boucher et al essentially fix these bounds
# at a distance far from observed trait values
# actualistic bounds on function
getTraitBoundsFPK<-function(x){
bounds <- c(
min(x)-((max(x) - min(x))/2),
max(x)+((max(x) - min(x))/2)
)
return(bounds)
}
getTraitIntervalDensityFPK<-function(trait,origIntLength,
origSequence,grainScaleFPK){
#### example dataset for testing
# grainScaleFPK<-100
# origIntLength<-0.23
# origSequence<-(-20:(-20+grainScaleFPK))*origIntLength
# trait<-(-1.83)
# trait<-max(origSequence)
# trait<-min(origSequence)
####################################################
traitRange<-c(trait-origIntLength/2,
trait+origIntLength/2)
#
intDensity<-rep(NA,grainScaleFPK)
intDensity[2:(grainScaleFPK-1)]<-sapply(2:(grainScaleFPK-1), function(i) max(0,
min(origSequence[i+1],traitRange[2])-max(origSequence[i],traitRange[1])))
#
# special calculations for first and last
if(traitRange[2]<origSequence[2]){
intDensity[1]<-origIntLength
}else{
intDensity[1]<-0
}
#
if(traitRange[1]>origSequence[grainScaleFPK-1]){
intDensity[grainScaleFPK]<-origIntLength
}else{
intDensity[grainScaleFPK]<-0
}
#
#if(length(intDensity)!=grainScaleFPK){
# stop("intDensity is not calculated with correct length")
# }
#
#sum(intDensity)==origIntLength
return(intDensity)
}
#' @rdname
#' @export
landscapeFPK_Intrinsic <- function(params, states, timefrompresent,
grainScaleFPK = 100 # sim controls
) {
#
#a discrete time Fokker–Planck–Kolmogorov model (FPK)
# V(x)=ax4+bx2+cx
# parameters: a,b,c, sigma
# dependent on former trait value
#
# describes a potential surface where height corresponds to
# tendency to change in that direction
# allows for multiple optima, different heights to those optima
#also collapses to BM and OU1
#
# From Boucher et al:
# Finally, note that both BM and the OU model are special cases of the FPK
# model: BM corresponds to V(x)=0 and OU to
# V(x)=((alpha/sigma^2)*x^2)-((2*alpha*theta/(sigma^2))*x)
#
# following code is loosely based on function Sim_FPK from package BBMV
#
# all of the following only need to be run
# when the parameters of FPK are changed
# this *could* be pre-calculated for a single run with lexical scoping
#
# landscape descriptor function
# over the arbitrary interval (-1.5 : 1.5)
arbSequence<-seq(from=-1.5,to=1.5,
length.out=grainScaleFPK)
#
# get bounds from params
bounds <- params[5:6]
#
# translate to original trait scale
origSequence<-seq(from=bounds[1],to=bounds[2],
length.out=grainScaleFPK)
origIntLength<-abs((bounds[2]-bounds[1])/(grainScaleFPK-1))
# # potentialVector is numeric vector representing the potential
# length = grainScaleFPK
# V(x)=ax4+bx2+cx
potentialVector<-potentialFunFPK(
x=arbSequence,
a=params[1],b=params[2],c=params[3])
#
# Coefficient of Diffusion of the Model
dCoeff <- log((params[4])^2/2) # log((sigma^2)/2)
#
# Transition matrix describing prob of evolving between two sites in the trait grid in an infinitesimal time step.
# Create and diagonalize the transition matrix that has been discretized
# returns: the transition matrix going forward in time, for simulating traits only
#
# make empty matrix
expD <- tranMatrix <- matrix(0,grainScaleFPK,grainScaleFPK)
#assign values not on outer rows/columns
for (i in 1:(grainScaleFPK)){
if(i>1){
tranMatrix[i-1,i] <- exp((potentialVector[i]-potentialVector[i-1])/2)
}
if(i<grainScaleFPK){
tranMatrix[i+1,i] <- exp((potentialVector[i]-potentialVector[i+1])/2)
}
# rate of staying in place is negative sum of neighboring cells
neighbors<-c(ifelse(i>1,tranMatrix[i-1,i],0),
ifelse(i<grainScaleFPK,tranMatrix[i+1,i],0)
)
tranMatrix[i,i] <- (-sum(neighbors))
}
# eigenvalues and eigenvectors of transition matrix
# take only real components
eigTranMatrix <- lapply(eigen(tranMatrix),Re)
#
# solve the eigenvectors
solvedEigenvectors <- solve(eigTranMatrix$vectors,tol = 1e-30)
#
# get expected dispersion
# scale expected dispersion to original trait scale
# squared distance between points in resolution of trait scale
# (tau from Boucher et al.'s original code)
origScaler <- origIntLength^2
# assign dispersion to diagonal of expD
diag(expD) <- exp(exp(dCoeff)/origScaler*eigTranMatrix$values)
# previous time-dep version from Boucher et al's code
# diag(expD) <- exp(t*diag_expD)
#
# take dot product of expD, eigenvectors and solved eigenvectors
# get matrix of potential for future trait values
# given potentialVector alone, not yet considering previous values
potentialMatrix <- eigTranMatrix$vectors%*%expD%*%solvedEigenvectors
#
###############################################
#######################################################
#
# need a vector, length = grainScaleFPK
# with zeroes in intervals far from current trait value
# and with density=1 distributed in interval=origIntLength
# centered around the original trait value
intDensity<-getTraitIntervalDensityFPK(
trait=states,
origIntLength=origIntLength,
origSequence=origSequence,
grainScaleFPK=grainScaleFPK)
#
# take product to get potential with respect to position
probDivergence <- potentialMatrix %*% intDensity
# round all up to zero at least
probDivergence[probDivergence<0] <- 0
# convert potential to a probability
probDivergence<-probDivergence / sum(probDivergence)
#
# sample from this probability distribution
# to get divergence over a time-step
newTraitPosition <- sample(
x=origSequence,
size=1,
prob=probDivergence)
# subtract the current trait position so to get divergence
newDisplacement<-newTraitPosition-states
return(newDisplacement)
}
# equation for getting potential under FPK
potentialFunFPK<-function(x,a,b,c){
# V(x)=ax4+bx2+cx
Vres <- (a*(x^4))+(b*(x^2))+(c*x)
return(Vres)
}
plotFPKmodel<-function(params,
grainScaleFPK=1000,
traitName="Trait",
plotLandscape=TRUE
){
#
# get bounds from params
bounds <- params[5:6]
# get trait sequence
origSequence<-seq(from=bounds[1],to=bounds[2],
length.out=grainScaleFPK)
# V(x)=ax4+bx2+cx
potentialVector<-potentialFunFPK(
a=params[1],b=params[2],c=params[3],
x=seq(from=-1.5,to=1.5,
length.out=grainScaleFPK))
#
if(plotLandscape){
#potentialVector<-1*exp(-potentialVector)
# equation from Boucher's BBMV tutorial (??)
potentialVector<-exp(-potentialVector)/sum(exp(-potentialVector)
*((bounds[2]-bounds[1])/grainScaleFPK))
yLabel<-"Macroevolutionary Landscape (N*exp(-V))"
}else{
potentialVector<-potentialVector/max(potentialVector)
yLabel<-"Evolutionary Potential (Rescaled to Max Potential)"
}
#
plot(origSequence,potentialVector,type="l",
xlab=paste0(traitName," (Original Scale)"),
ylab=yLabel)
}
set.seed(444)
traitData<-rnorm(100,0,1)
# need traits to calculate bounds
bounds<-getTraitBoundsFPK(traitData)
# pick a value at random
trait<-0
# two peak symmetric landscape example
params<-c(
a=2,
b=-4,
c=0,
sigma=1,
bounds)
plotFPKmodel(params)
# simulate under this model - simulated trait DIVERGENCE
landscapeFPK_Intrinsic(params=params, states=trait, timefrompresent=NULL)
# simulate n time-steps, repeat many times, plot results
repeatSimSteps<-function(params,trait,nSteps){
for(i in 1:nSteps){
# add to original trait value to get new trait value
trait<-trait+landscapeFPK_Intrinsic(
params=params, states=trait, timefrompresent=NULL)
}
trait
}
repSim<-replicate(30,repeatSimSteps(params,trait,20))
hist(repSim,main="Simulated Trait Values")
# uneven two peak symmetric landscape example
params<-c(
a=2,
b=-4,
c=0.3,
sigma=1,
bounds)
plotFPKmodel(params)
# simulate under this model - simulated trait DIVERGENCE
landscapeFPK_Intrinsic(params=params, states=trait, timefrompresent=NULL)
repSim<-replicate(30,repeatSimSteps(params,trait,20))
hist(repSim,main="Simulated Trait Values")
<file_sep>/old/simulations_setup_script_05-31-19.R
###################################################
# Individual Empirical Analyses and Simulations
##################################################
# Control Box
# number of simulated trait datasets to do for simulated-trait runs
nSimTrait <- 10
# error for run with mis-specified prior on sigmasq in a pure-BM model
# the mean of the normal prior is multiplied by this value
# 100 = mean of rate prior is off by two orders of magnitude!
ratePriorError <- 100
# simulation resolution
generation.time <- 100000
# control parameters for multicore and simulation resolution
multicore <- TRUE
coreLimit <- 6
# control parameters for MCMC / ABC
nRuns <- 2
nStepsPRC <- 3
numParticles <- 20
nInitialSimsPerParam <- 10
nInitialSims <- 10
##################################################
setwd("d://dave//workspace//treevo_paper//")
library(ape)
library(TreEvo)
######################################
# get empirical data
#
# 1) Anolis
# repulsion - adaptive landscape dynamics - multi optima
#
# obtain anolis tree - from Poe et al. 2017 (SystBiol)
# their time-tree
anolisTree <- read.tree(
file="datasets//anolis_PoeEtAl2018_datedMCCw0.5burnin.tre"
)
# make into a multiPhylo list
anolisTreeList <- list(anolisTree = anolisTree)
class(anolisTreeList) <- "multiPhylo"
#
# obtain anolis trait data -
# Snout-Vent body-size data from Poe et al. 2018 (AmNat)
anolisTrait <- read.table(
"datasets//anolis_lntraits_matched_tabdelim_07-24-18.txt",
header=TRUE,row.names=1
)
anolisSize <- anolisTrait[,1]
#
# 2) Aquilegia
# whittall et al. model of nectar spur increase in size
#
# obtain aquilegia tree (from Whittall and Hodges 2007?)
aquilegiaTree <- read.tree(
"datasets//aquilegia_Whttall&Hodges2007_figuredMCC.tre"
)
# make into a multiPhylo list
aquilegiaTreeList <- list(aquilegiaTree = aquilegiaTree)
class(aquilegiaTreeList) <- "multiPhylo"
#
# obtain aquilegia trait data (from Whittall and Hodges 2007?)
# need both nectur spur lengths and regime data
#
aquilegiaTrait <- read.table("aquilegia_traitData.txt",
header=FALSE, row.names=1)
#
# get just nectur spur length
aquilegiaSpurLength <- aquilegiaTrait[,2]
# and take the natural log
# (note that the third column of the table was already the natural log)
# previous code from Brian had 'log(data[,3])' - log of a log
aquilegiaSpurLength <- log(aquilegiaSpurLength)
#
# legacy aquilegia code from <NAME>:
#
# assume generation time of 10 years (its a perennial plant),
# following Cooper et al. Plos ONe 2010
# Genetic Variation at Nuclear loci fails to distinguish group is about 3 MY,
# So =>> phy height is 3.
# Thus each unit = 1,000,000 years or 100,000 generations
#
# TreeYears=100000
# timeStep <- 1/TreeYears
# totalTreeLength=TreeYears*sum(phy$edge.length) #how many generations are represented
# number of expected polinator shifts based on parsimony is 7:
# parsimonyShifts=7
# pollinatorShiftRate=parsimonyShifts/totalTreeLength
# aquilegia regimes - pollinator syndromes
aquilegiaPollinators <- aquilegiaTrait[,14]
# regimes coded 0, 1, 2
# 0 is bumble-bee, 1 is humming-bird, 2 is hawkmoth
# this probably won't be used directly?
# could use for post-analysis comparisons? Hmm
###############################################################################
# generate sets of ideal trees for doing simulations on
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
idealTrees <- list(
#
balanced_n8 = stree(
n=8,
type = "balanced", tip.label = NULL
),
balanced_n16 = stree(
n=16,
type = "balanced", tip.label = NULL
),
balanced_n64 = stree(
n=64,
type = "balanced", tip.label = NULL
),
#
pectinate_n8 = stree(
n=8,
type = "left", tip.label = NULL
),
pectinate_n16 = stree(
n=16,
type = "left", tip.label = NULL
),
pectinate_n64 = stree(
n=64,
type = "left", tip.label = NULL
),
#
star_n8 = stree(
n=8,
type = "star", tip.label = NULL
),
star_n16 = stree(
n=16,
type = "star", tip.label = NULL
),
star_n64 = stree(
n=64,
type = "star", tip.label = NULL
)
)
# make multiPhylo
class(ideaTrees) <- "multiPhylo"
#
# compress tip labels? No, I don't think that works for trees of different sizes
# trees <- .compressTipLabel(trees)
######################################################################################
# time to get table, process the inputs listed
#
# get simulation run table
simRunTable <- read.csv(
file="simulation_sets_parameters_table.csv"
header=TRUE,
stringsAsFactors=FALSE,
)
#
# number of analyses
nAnalyses <- nrow(simRunTable)
#
# names of analyses
analysesNames <- simRunTable$runLabel
#
# which analyses are independent or dependent
whichIndependentPrevRun <- which(
!as.logical(simRunTable$dependentPrevRun)
)
whichDependentPrevRun <- which(
as.logical(simRunTable$dependentPrevRun)
)
#
# create list for saving analysis output
analysisOutput <- list()
# why numbers? use actual names
# names(analysisOutput) <- 1:nAnalyses
names(analysisOutput) <- analysesNames
#
# Let's runs the analyses!
#
# run all independent analyses
for (i in whichIndependentPrevRun){
analysisOutput[[i]] <- runAnalysis(
runParameters = simRunTable[i, , drop = FALSE],
# inputs needed from script above
nSimTrait = nSimTrait,
ratePriorError = ratePriorError,
#
anolisTreeList = anolisTreeList,
anolisSize = anolisSize,
aquilegiaTreeList = aquilegiaTreeList,
aquilegiaSpurLength = aquilegiaSpurLength,
idealTrees = idealTrees,
#
indepAnalyses_intrinsicOut = NULL,
indepAnalyses_extrinsicOut = NULL,
#
# presets
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims
)
}
#############################
# dependent analyses
########################################
#indep runs that dep runs depend on :
#
# INTRINSIC
# An_Emp_BrownMotion
# An_Emp_Disp
# An_Emp_Bound
# An_Emp_DispBound
# An_Emp_Bound_BoundByStartingState
# An_Emp_Bound_BoundByMinValue
# An_Emp_Bound_BoundOneRangeAway
# An_Emp_TimeReg
# Aq_Emp_3Opt2Bound
# Aq_Emp_BrownMotion
# EXTRINSIC
# An_Emp_DispBound
# An_Emp_Disp
#############################
# get the stuff necessary for doing the dependent analyses
#
# get model parameters from runs
# that will be used for dependent simulations
# use extract on all indep analyses now
# then can call these later for dependent analyses
# without have to extract same data many times
#
indepAnalyses_intrinsicOut <- lapply(
analysisOutput[whichIndependentPrevRun],
extractIntrinsic_from_prcOut
)
#
indepAnalyses_extrinsicOut <- lapply(
analysisOutput[whichIndependentPrevRun],
extractExtrinsic_from_prcOut
)
# make sure named correctly
names(indepAnalyses_intrinsicOut) <- analysesNames[whichIndependentPrevRun]
names(indepAnalyses_extrinsicOut) <- analysesNames[whichIndependentPrevRun]
#
# run all dependent analyses
for (i in whichDependentPrevRun){
analysisOutput[[i]] <- runAnalysis(
runParameters = simRunTable[i, , drop = FALSE],
# inputs needed from script above
nSimTrait = nSimTrait,
ratePriorError = ratePriorError,
#
anolisTreeList = anolisTreeList,
anolisSize = anolisSize,
aquilegiaTreeList = aquilegiaTreeList,
aquilegiaSpurLength = aquilegiaSpurLength,
idealTrees = idealTrees,
#
indepAnalyses_intrinsicOut = indepAnalyses_intrinsicOut,
indepAnalyses_extrinsicOut = indepAnalyses_extrinsicOut,
#
# presets
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims
)
}
<file_sep>/analyses_cluster_fast/Run_An_Emp_Bound_2019-07-31.sh
#!/bin/bash
Rscript Run_An_Emp_Bound_2019-07-31.R
<file_sep>/analyses_cluster_fast/Run_An_Emp_BrownMotion_2019-07-31.sh
#!/bin/bash
Rscript Run_An_Emp_BrownMotion_2019-07-31.R
<file_sep>/analyses_cluster/Run_Aq_Emp_3Opt2Bound_2019-07-14.sh
#!/bin/bash
Rscript Run_Aq_Emp_3Opt2Bound_2019-07-14.R
<file_sep>/analyses_cluster/simulations_setup_script.R
###################################################
# Individual Empirical Analyses and Simulations
##################################################
# Control Box
# number of simulated trait datasets to do for simulated-trait runs
nSimTrait <- 10
# error for run with mis-specified prior on sigmasq in a pure-BM model
# the mean of the normal prior is multiplied by this value
# 100 = mean of rate prior is off by two orders of magnitude!
ratePriorError <- 100
# root age for idealized simulated trees from time=0
# similar to Anolis root depth (51.49056)
idealTreeDepth <- 50
# simulation resolution
# recc default is 1000
generation.time <- 1000
# control parameters for multicore and simulation resolution
multicore <- TRUE
coreLimit <- 24
# control parameters for MCMC / ABC
nRuns <- 2 # use 2 - recc default is 2
#(for testing, use 1)
nStepsPRC <- 5 # use 5 - recc default is 5
#(for testing, use 2)
numParticles <- 300 # use 300 - recc default is 300
#(for testing, use 5)
nInitialSimsPerParam <- 100 # use 100 - recc default is 100
#(for testing, use 10)
nInitialSims <- 100 # use NULL - default is NULL = 100 per param
#(for testing, use 5)
#### miscellaneous controls
# save data during runs?
saveData <- FALSE
# print out progress to terminal?
verboseParticles <- FALSE
###########################
# FOR CONTINUING FROM A PREVIOUS TEST
# (...if output from previous analyses exist at all)
continueFromPrevious <- FALSE
# source(
# "simulations_framework_script.R"
# )
<file_sep>/analyses_cluster_fast/Run_Aq_Emp_3Opt2Bound_2019-07-31.sh
#!/bin/bash
Rscript Run_Aq_Emp_3Opt2Bound_2019-07-31.R
<file_sep>/analyses_cluster_fast/Run_Aq_Emp_BrownMotion_2019-07-31.sh
#!/bin/bash
Rscript Run_Aq_Emp_BrownMotion_2019-07-31.R
<file_sep>/analyses_cluster/Run_An_Emp_Disp_2019-07-14.sh
#!/bin/bash
Rscript Run_An_Emp_Disp_2019-07-14.R
<file_sep>/analyses_cluster/Run_Aq_Emp_BrownMotion.sh
#!/bin/bash
Rscript Run_Aq_Emp_BrownMotion.R
<file_sep>/analyses_cluster_July2021/empirical_evaluation_10-10-18.R
# This function calculates Effective Sample Size (ESS) on results.
# Performs the best when results are from multiple runs.
pairwiseESS(results$particleDataFrame)
# bayesCoverageProb
# for comparing true / generating parameter values to the posteriors of analyses done on that data
# plotUnivariatePosteriorVsPrior
# plot priors versus their posteriors - useful for runs with bad prior on BM?
```
# examples of getting density coordinates and summary statistics from distributions
priorKernal<-getUnivariatePriorCurve(priorFn="normal", priorVariables=c(28,2),
nPoints=100000, from=NULL, to=NULL, prob=0.95)
postKernal<-getUnivariatePosteriorCurve(acceptedValues=results$particleDataFrame$starting_1,
from=NULL, to=NULL, prob=0.95)
# let's compare this (supposed) prior against the posterior in a plot
plotUnivariatePosteriorVsPrior(posteriorCurve=postKernal, priorCurve=priorKernal,
label="parameter", trueValue=NULL, prob=0.95)
```
# plotPosteriors
# for each free parameter in the posterior, a plot is made of the distribution of values estimate in the last generation
# can also be used to visually compare against true (generating) parameter values in a simulation.
plotPosteriors(particleDataFrame=resultsBM$particleDataFrame,
priorsMat=resultsBM$PriorMatrix)
# highestPostDens
# get weighted mean, standard deviation, upper and lower highest posterior density (HPD) for each free parameter in posterior.
highestPostDens(results$particleDataFrame, percent=0.95, returnData=FALSE)
# plotABC_3D
# Plot posterior density distribution for each generation in 3d plot window
#############################################################################################
# notes from conversation with <NAME> (05-09-18)
#
# so I'm doing approximate bayesian computation and the question is, what do I want to show to the reader
# I want to show posterior parameter estimates from real data,
# and show that they are very different from parameter estimates made under other models,
# or under the same model but with simulated data, for scenarios with a small number of models
# what i do is make the same series of posterior predictive checks
# and demonstrate how your prefered model better recapitulates the data it was fit to
#
#
############################################################
# ECDF
# ECDF - empirical cumulative distribution function = the ranked order accumulation curve
# http://stat.ethz.ch/R-manual/R-devel/library/stats/html/ecdf.html
# ecdf is a cool way of summarizing the entire dataset graphically
#
# how well does simulations under a fit model reproduce ecdf or the density of the original data?
# if your model does a better job of doing that, then it is straight up a better model
# it also goes beyond parameter estimates and towards the model describing the data
#
# bayes wants to describe more than just the expected value. it is greedy and wants to describe the whole posterior
# the posterior predictive distribution describes all data sets that are consistent with the model, given the original input information
# if the PPD doesn't look like the empirical data, then the model is not describing your data
#####################################################################################
# from 06-21-18
# okay so the general sketch is particles from the posterior, simulate under this set of parameters N times,
# and compare the original ECDF for each parameter to the simulated
# my other idea:
# draw parameter estimates from some posterior particle, simulate under those parameters,
# then test if 'true' generating parameters are actually within the 95% HDF of the simulated posterior
# deals with how we don't really understand how adequate the models are for giving unbiased estimates of parameters
# checkAdequacy # sixAnalysesTwoModels
#checkAdequacy <- function(){
# }
#I mean, writing a function that just takes arguments: tree, params, etc. and returns results would be good
#a lot of this is (dataset) and six corresponding analyses
#fit model A, model B to real data, then simulate under model A, model B and fits both model A and B to both
#(where A is usually BM)
<file_sep>/analyses_cluster/Run_An_Emp_TimeReg_2019-07-14.sh
#!/bin/bash
Rscript Run_An_Emp_TimeReg_2019-07-14.R
<file_sep>/old/aquilegia_models_09-20-18.R
# September 2018
# Make two more models for Aquilegia
# 1) trait values have three optima on gradient, with some rate of switching to next-largest optima, cannot reverse
# 2) trait values evolve in three regimes with successive upper bounds on gradient
#(so only two upper-bounds, highest regime has no bounds)
# with some rate of switching to next-largest regime, cannot reverse
# addendum - maybe make rate of switching to next optima dependent on trait value?
multiOptima3IntrinsicMaxBoundary3 <- function(params, states, timefrompresent) {
# 1) trait values have three optima on gradient, with some rate of
# switching to next-largest optima, cannot reverse
#
#a discrete time OU with multiple optima in three regimes
# with equal attraction (alpha) to all optima (theta 1:N)
# and each regime having its own max trait value
# breakdown of params:
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:6] is the max boundary, for each of the three regimes
# params[7:9] describes theta (optima) values for each of the three regimes
# In this model, optima represent fixed trait values conveying adaptive benefit
# the proximity of a population to an optima makes it more likely to be under that regime
# a point equidistant between multiple regimes may be drawn to any
# the draw to any specific optima is inverse to distance from optima
# thus a lineage at an optima may show large variance as it circles the plateau
# then suddenly feel drawn to another optima, and show sudden, giant shifts toward that optima
# BUT this also has max bounds!
# proximity to theta determines which theta a lineage is in, but lineages
#
sigma<-params[1]
alpha<-params[2]
rho <- params[3]
maxBound<-params[4:6]
theta<-params[7:9]
#
# what regime does the lineage sit in?
# (1) cannot be in a regime whose max bound it has surpassed
unsurpassedBound <- maxBound >= states
# also make it so that lineages in the first regime can't just to the third
if(all(unsurpassedBound)){
unsurpassedBound[3] <- FALSE
}
#
# (2) it has a chance of being in the next highest regime
# calculate probabilistic weights relative to distance from all theta
# raised to the power of rho - scaling parameter
thetaWeights <- (1/abs(theta-states))^rho
#
# (3) combined 1+2, rescale so sum to 1 as probability
thetaWeights <- thetaWeights*as.numeric(unsurpassedBound)
thetaWeights<-thetaWeights/sum(thetaWeights)
#
# sample a theta/bound regime
regime<-sample(1:length(theta), 1, prob = thetaWeights)
theta<-theta[regime]
maxBound<-maxBound[regime]
#
# now
#subtract current states because we want displacement
newdisplacement <- rpgm::rpgm.rnorm(
n = length(states),
mean = (theta-states)*alpha,
sd = sd)
# shift states if they surpass max bound
for (i in 1:length(newdisplacement)) {
newstate <- newdisplacement[i]+states[i]
if (newstate>maxBound) { #newstate less than min
newdisplacement[i] <- maxBound-states[i]
#so, rather than go above the maximum, this moves the new state to the maximum
}
}
#
return(newdisplacement)
}
multiOptima3IntrinsicMaxBoundary2 <- function(params, states, timefrompresent) {
# trait values evolve in three regimes with successive upper bounds on gradient (
# so only ****two**** upper-bounds, highest regime has no bounds)
# with some rate of switching to next-largest regime, cannot reverse
#
#a discrete time OU with multiple optima in three regimes
# with equal attraction (alpha) to all optima (theta 1:N)
# and each regime having its own max trait value
# breakdown of params:
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:5] is the max boundary, for the two lower regimes regimes
# params[6:8] describes theta (optima) values for each of the three regimes
# In this model, optima represent fixed trait values conveying adaptive benefit
# the proximity of a population to an optima makes it more likely to be under that regime
# a point equidistant between multiple regimes may be drawn to any
# the draw to any specific optima is inverse to distance from optima
# thus a lineage at an optima may show large variance as it circles the plateau
# then suddenly feel drawn to another optima, and show sudden, giant shifts toward that optima
# BUT this also has max bounds!
# proximity to theta determines which theta a lineage is in, but lineages
#
sigma<-params[1]
alpha<-params[2]
rho <- params[3]
# add an infinite max bound to the last regime
maxBound<-c(params[4:5],Inf)
theta<-params[6:8]
#
# what regime does the lineage sit in?
# (1) cannot be in a regime whose max bound it has surpassed
unsurpassedBound <- maxBound >= states
# also make it so that lineages in the first regime can't just to the third
if(all(unsurpassedBound)){
unsurpassedBound[3] <- FALSE
}
#
# (2) it has a chance of being in the next highest regime
# calculate probabilistic weights relative to distance from all theta
# raised to the power of rho - scaling parameter
thetaWeights <- (1/abs(theta-states))^rho
#
# (3) combined 1+2, rescale so sum to 1 as probability
thetaWeights <- thetaWeights*as.numeric(unsurpassedBound)
thetaWeights<-thetaWeights/sum(thetaWeights)
#
# sample a theta/bound regime
regime<-sample(1:length(theta), 1, prob = thetaWeights)
theta<-theta[regime]
#
# now
#subtract current states because we want displacement
newdisplacement <- rpgm::rpgm.rnorm(
n = length(states),
mean = (theta-states)*alpha,
sd = sd)
# shift states if they surpass max bound
if(regime != 3){
maxBound<-maxBound[regime]
for (i in 1:length(newdisplacement)) {
newstate <- newdisplacement[i]+states[i]
if (newstate>maxBound) { #newstate less than min
newdisplacement[i] <- maxBound-states[i]
#so, rather than go above the maximum, this moves the new state to the maximum
}
}
}
#
return(newdisplacement)
}
# old aquilegia model from O'Meara
# designating priors for old aquilegia model from legacy code
# from B O'Meara
#
#do fixed for param2 b/c dont have a lot of data to use
# intrinsicPriorsFns=c("exponential","fixed","uniform","uniform")
#
#intrinsicPriorsValues=matrix(
# c(
# # regular rate/sd of change = 0.08 is based on best guess of average rate of change, it seems
# rep(1/0.08 , 2),
# # jump rate of change - fixed at zero because:
# #"do fixed for param2 b/c dont have a lot of data to use" ??
# c(0,0),
# # mean of shift (if not zero, then its a trend model)
# c(-maxCharDistance, maxCharDistance),
# # rate of a shift
# 0, 0.00001
# )
# , nrow=2, byrow=FALSE)
#
# hmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm no
# old aquilegia model from O'Meara, slightly rewritten
# levy walk model
pollinatorShiftIntrinsic <-function(params, states, timefrompresent) {
#params[1] is sd for normal process,
#params[2] is sd for jump size of non-background shift
#params[3] is mean of shift (mean of normal process is zero),
#params[4] is per generation probability of a non-background shift/jump
if (runif(1,0,1) < params[4] ) {
# *not* background.process
newdisplacement<-rnorm(n=length(states),mean=params[3],sd=params[2])
return(newdisplacement)
}else{
# background process
newdisplacement<-rnorm(n=length(states),mean=0,sd=params[1])
return(newdisplacement)
}
}
<file_sep>/old/simulations_setup_script_10-10-18.R
###################################################
# Individual Empirical Analyses and Simulations
##################################################
# Control Box
# number of simulated trait datasets to do for simulated-trait runs
nSimTrait <- 10
# error for run with mis-specified prior on sigmasq in a pure-BM model
# the mean of the normal prior is multiplied by this value
# 100 = mean of rate prior is off by two orders of magnitude!
ratePriorError <- 100
# control parameters for multicore and simulation resolution
multicore <- TRUE
coreLimit <- 6
generation.time <- 10000
# control parameters for MCMC / ABC
nRuns <- 2
nStepsPRC <- 3
numParticles <- 20
nInitialSimsPerParam <- 10
nInitialSims <- 10
##################################################
setwd("d://dave//workspace//treevo_paper//")
library(ape)
library(TreEvo)
######################################
# get empirical data
# 1) Anolis
# repulsion - adaptive landscape dynamics - multi optima
# obtain anolis tree - from Poe et al. 2017 (SystBiol)
# their time-tree
anolisTree<-read.tree(
file="datasets//anolis_PoeEtAl2018_datedMCCw0.5burnin.tre"
)
# make into a multiPhylo list
anolisTreeList <- list(anolisTree = anolisTree)
class(anolisTreeList) <- "multiPhylo"
# obtain anolis trait data -
# Snout-Vent body-size data from Poe et al. 2018 (AmNat)
anolisTrait<-read.table(
"datasets//anolis_lntraits_matched_tabdelim_07-24-18.txt",
header=TRUE,row.names=1
)
anolisSize<-anolisTrait[,1]
# 2) Aquilegia
# whittall et al. model of nectar spur increase in size
# obtain aquilegia tree (from Whittall and Hodges 2007?)
aquilegiaTree<-read.tree(
"datasets//aquilegia_Whttall&Hodges2007_figuredMCC.tre"
)
# make into a multiPhylo list
aquilegiaTreeList <- list(aquilegiaTree = aquilegiaTree)
class(aquilegiaTreeList) <- "multiPhylo"
# obtain aquilegia trait data (from Whittall and Hodges 2007?)
# need both nectur spur lengths and regime data
#
aquilegiaTrait<-read.table("aquilegia_traitData.txt",
header=FALSE, row.names=1)
# get just nectur spur length
aquilegiaSpurLength<-aquilegiaTrait[,2]
# and take the natural log
# (note that the third column of the table was already the natural log)
# previous code from Brian had 'log(data[,3])' - log of a log
aquilegiaSpurLength<-log(aquilegiaSpurLength)
# legacy aquilegia code from <NAME>:
#
# assume generation time of 10 years (its a perennial plant),
# following Cooper et al. Plos ONe 2010
# Genetic Variation at Nuclear loci fails to distinguish group is about 3 MY,
# So =>> phy height is 3.
# Thus each unit = 1,000,000 years or 100,000 generations
#
# TreeYears=100000
# timeStep<-1/TreeYears
# totalTreeLength=TreeYears*sum(phy$edge.length) #how many generations are represented
# number of expected polinator shifts based on parsimony is 7:
# parsimonyShifts=7
# pollinatorShiftRate=parsimonyShifts/totalTreeLength
# aquilegia regimes - pollinator syndromes
aquilegiaPollinators<-aquilegiaTrait[,14]
# regimes coded 0, 1, 2
# 0 is bumble-bee, 1 is humming-bird, 2 is hawkmoth
# this probably won't be used directly?
# could use for post-analysis comparisons? Hmm
###############################################################################
# generate sets of ideal trees for doing simulations on
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
idealTrees<-list(
#
balanced_n8 = stree(n=8,
type = "balanced", tip.label = NULL),
balanced_n16 = stree(n=16,
type = "balanced", tip.label = NULL),
balanced_n64 = stree(n=64,
type = "balanced", tip.label = NULL),
#
pectinate_n8 = stree(n=8,
type = "left", tip.label = NULL),
pectinate_n16 = stree(n=16,
type = "left", tip.label = NULL),
pectinate_n64 = stree(n=64,
type = "left", tip.label = NULL),
#
star_n8 = stree(n=8,
type = "star", tip.label = NULL),
star_n16 = stree(n=16,
type = "star", tip.label = NULL),
star_n64 = stree(n=64,
type = "star", tip.label = NULL)
)
# make multiPhylo
class(ideaTrees)<-"multiPhylo"
# compress tip labels? No, I don't think that works for trees of different sizes
# trees<-.compressTipLabel(trees)
######################################################################################
# time to get table, process the inputs listed
#
# get simulation run table
simRunTable<-read.csv(
file="simulation_sets_parameters_table.csv"
header=TRUE,
stringsAsFactors=FALSE,
)
#
# number of analyses
nAnalyses<-nrow(simRunTable)
#
# which analyses are independent or dependent
whichIndependentPrevRun<-which(!as.logical(simRunTable$dependentPrevRun))
whichDependentPrevRun<-which(as.logical(simRunTable$dependentPrevRun))
#
# create list for saving analysis output
analysisOutput <- list()
names(analysisOutput)<-1:nAnalyses
#
# Let's runs the analyses!
#
# run all independent analyses
for (i in whichIndependentPrevRun){
analysisOutput[[i]]<-
}
#
# run all dependent analyses
for (i in whichDependentPrevRun){
analysisOutput[[i]]<-
}
# inputs needed from script above
nSimTrait
ratePriorError
anolisTreeList
aquilegiaTreeList
anolisSize
aquilegiaSpurLength
idealTrees
generation.time=generation.time,
multicore=multicore,
coreLimit=coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims
generation.time
multicore
coreLimit
nRuns
nStepsPRC
numParticles
nInitialSimsPerParam
nInitialSims
################################################
# define MCMC / ABC control parameter list
controlsList <- list(
# standard controls, don't need to be change
standardDevFactor=0.2,
epsilonProportion=0.7,
epsilonMultiplier=0.7,
stopRule = FALSE,
plot=FALSE,
verboseParticles=FALSE,
#
# controls that may need to be changed
generation.time=generation.time,
multicore=multicore,
coreLimit=coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims
)
#
##################################################
# rate prior error
# sigmasq state prior has an error if "rexp_with_mean_NOT_at_true_sigmasq"
#
if (prior != "rexp_with_mean_NOT_at_true_sigmasq"){
# then do *NOT* apply the error to the sigmasq prior
ratePriorError <- 1
}
##########################################
#
#
# doRun.Intrinsic
#
if (doRun.Intrinsic == "Pure_BM"){
intrinsicFunctionToFit <- brownianIntrinsic
#
intrinsicArgList <- list(
intrinsicPriorsFns = c("exponential"),
intrinsicPriorsValues = list(10 * ratePriorError)
)
}
#
if (doRun.Intrinsic == "BM_LowerBound"){
intrinsicFunctionToFit <- boundaryMinIntrinsic
#
intrinsicArgList <- list(
intrinsicPriorsFns=c("exponential","normal"),
intrinsicPriorsValues=list(10, c(-10, 1))
)
}
#
if (doRun.Intrinsic == "3Opt2Bound"){
intrinsicFunctionToFit <- multiOptima3IntrinsicMaxBoundary2
#
intrinsicArgList <- list(
# breakdown of params:
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:5] is the max boundary, for the two lower regimes regimes
# params[6:8] describes theta (optima) values for each of the three regimes
intrinsicPriorsFns=c(
# we'll make rate an exponential prior, rate 10
"exponential",
# we'll make alpha an exponential prior, rate 10
"exponential",
# well make rho an exponential, rate 1
"exponential",
# let's place bounds based on whitall and hodges:
"uniform","uniform",
# normal priors for optima
"normal","normal","normal"
),
intrinsicPriorsValues=list(
10, 10, 1,
c(15,20),c(20,25),
c(10,1), c(20,1), c(30,1)
)
)
}
#
if (doRun.Intrinsic == "Time_AutoRegressive_Model"){
intrinsicFunctionToFit <- autoregressiveIntrinsic
#
intrinsicArgList <- list(
intrinsicPriorsFns=c("exponential","normal"),
intrinsicPriorsValues=list(10, c(-10, 1))
)
}
#
# doRun.Extrinsic
#
if (doRun.Extrinsic =="Null"){
extrinsicFunctionToFit <- nullExtrinsic
#
extrinsicArgList <- list(
extrinsicPriorsFns = c("fixed"),
extrinsicPriorsValues = list(0)
)
}
#
if (doRun.Extrinsic =="Displacement"){
extrinsicFunctionToFit <-ExponentiallyDecayingPushExtrinsic
#
extrinsicArgList <- list(
extrinsicPriorsFns = c("exponential","normal","exponential"),
# \code{ExponentiallyDecayingPushExtrinsic} with parameters \code{params = sd, maximumForce, halfDistance}
extrinsicPriorsValues = list(10,c(1,1),10)
)
}
#########################################
# nTraitSetsPerSimTree is 1 unless empiricalTraitData is "SIMULATED" in which case it is nSimTrait
nTraitSetsPerSimTree<-1
if(empiricalTraitData == "SIMULATED"){
nTraitSetsPerSimTree<-nSimTrait
}
#
#########################################
#
#treeSet
#
# if the treeSet is "Ideal-Simulated"
# then the number of simulated tree types and
# number of tip-totals per simulated tree type is 3, other 1
nSimTreeTypes<-nTipNumbersPerSimTreeType<-1
#
if(treeSet == "empirical_anolis_tree"){
treeList <- anolisTreeList
}
#
if(treeSet == "empirical_Aquilegia_tree"){
treeList <- aquilegiaTreeList
}
#
if(treeSet=="Ideal_Simulated"){
treeList<-idealTrees
nSimTreeTypes<-nTipNumbersPerSimTreeType<-3
}
####################################
#
# nDoRun
#
# calculate the number of doRun statements for this analysis-run
# product of treeTypes and nTipNumbersPerSimTreeType and nSimTrait
nDoRun <- nSimTreeTypes * nTipNumbersPerSimTreeType * nTraitSetsPerSimTree
# should be one 1, 10 or 90... probably
#
################################################
# need to make trait data for every tree in treeList
#
for (tree_i in 1:length(treeList)){
#
# traitDataList will be a list with each element corresponding to a tree
# and sub list corresponding to trait data to be analyzed on that tree
#
traitDataList<-list()
# empiricalTraitData
#
if(empiricalTraitData == "Anolis_Size_Data"){
# need a list of trait sets (of length 1)
traitDataList[[tree_i]] <-list(anolisSize = anolisSize)
}
#
if(empiricalTraitData == "Aquilegia_Nectar_Spur_Data"){
# need a list of trait sets (of length 1)
traitDataList[[tree_i]] <-list(aquilegiaSpurLength = aquilegiaSpurLength)
}
#
if(empiricalTraitData == "SIMULATED"){
#
# simTrait.Intrinsic
# ALSO need estimates of parameters from previous analyses needed for later simulations
# need to make part of output from doRun if not already
#
if(is.na(simTrait.Intrinsic)){
stop("The intrinsic model for a simulated trait dataset is given as NA")
}else{
# ANOLIS BASED MODELS
if(simTrait.Intrinsic == "An_Emp_BrownMotion"){
simTraitIntrinsicArgs <- list(
intfn = brownianIntrinsic,
#whatever run is An_Emp_BrownMotion
intPar = An_Emp_BrownMotion$parMeansList$intrinsic,
startPar = An_Emp_BrownMotion$parMeansList$starting
)
# need function that simply outputs a list with those parameters
# (median?) expectations from the last MCMC generation
# this function would be run with doRun such that doRun would include with output
# these parameter estimates would be given as a list
# split into 3 vectors: starting/intrinsic/extrinsic parameters
# formatted for immediate use as parameter estimates for doSimulation
# with matching intrinsic/extrinsic functions
)
}
#
if(simTrait.Intrinsic == "An_Emp_Disp"){
}
#
if(simTrait.Intrinsic == "An_Emp_DispBound"){
}
#
if(simTrait.Intrinsic == "An_Emp_Bound"){
}
#
if(simTrait.Intrinsic == "An_Emp_Bound_BoundByStartingState"){
}
#
if(simTrait.Intrinsic == "An_Emp_Bound_BoundByMinValue"){
}
#
if(simTrait.Intrinsic == "An_Emp_Bound_BoundOneRangeAway"){
}
#
if(simTrait.Intrinsic == "An_Emp_TimeReg"){
}
#
if(simTrait.Intrinsic == "Aq_Emp_3Opt2Bound"){
}
#
if(simTrait.Intrinsic == "Aq_Emp_BrownMotion"){
}
#
}
#
# simTrait.Extrinsic
#
if(is.na(simTrait.Extrinsic)){
stop("The extrinsic model for a simulated trait dataset is given as NA")
}else{
if(simTrait.Extrinsic == "Null"){
simTraitExtrinsicArgs <- list(
extfn = nullExtrinsic,
extPar = c(0),
)
}
#
if(simTrait.Extrinsic == "An_Emp_Disp"){
simTraitExtrinsicArgs <- list(
extfn = ExponentiallyDecayingPushExtrinsic,
extPar = anolisBMrun$parMeansList$extrinsic, #whatever run is An_Emp_BrownMotion
)
}
#
if(simTrait.Extrinsic == "An_Emp_Disp"){
}
#
if(simTrait.Extrinsic == "An_Emp_DispBound"){
}
#
}
#####################
# now have to simulate traits
simChar <- doSimulation(
phy = treeList[[tree_i]],
intrinsicFn = simTraitIntrinsicArgs$intFn,
extrinsicFn = exFn,
startingValues = simTraitIntrinsicArgs$startPar, #root state
intrinsicValues = simTraitIntrinsicArgs$intPar,
extrinsicValues = c(0),
generation.time = generation.time
)
# save as a list of trait sets
traitDataList <-
}
# now run doRun across trees, trait datasets
#
for (trait_j in length(traitDataList)){
# define job name
jobNameRun <-
#
traitDataToUse <- traitDataList [[trait_j]]
#
doRun_out <- do.call(what = doRun_prc,
# arguments
args = c(
phy = ,
traits = traitDataToUse,
intrinsicFn =
extrinsicFn =
intrinsicArgList,
extrinsicArgList,
# starting state prior
startingPriorsFns = "normal",
startingPriorsValues = matrix(
c(mean(traitDataToUse[, 1]) , sd(traitDataToUse[, 1]))),
#
#
jobName = paste(),
# give control arguments
controlsList
)
)
}
}
)
###################################################
###### TWO tests of treevo
#################################################
# Unresolved question: Number of particles? Number of generations?
#########################
### (1) test basic BM
# fixed sigma square (rate)
#Two different BM priors: unif with truth at say 25th percentile, rexp with mean not at true value (just to be realistic). Similar for root state.
#############################
### (2) interaction model - repulsion between species with max bound
# assume log transformed traits, so no min bound
# use realistic tree set
# Some models with
# no actual repulsion
# moderate (species cross in trait space)
# high (no touching happens).
# Models vary distance of max to the trait data
# max is very close (most species bounce off it based on starting values),
# max is moderately far (start hitting in last 25% of sim),
# very far (never hit)
# That’s nine different parameter values,
# all of which I guess are scaled effective to the BM rate.
# Could do it as six:
#1) moderate repulsion, max bound is very close
#2) moderate repulsion, max bound is moderately close
#3) moderate repulsion, max bound is very far
#4) no repulsion, max bound is moderately close
#5) high repulsion, max bound is moderately close
#########################
# 3) Time - autoregressive model with optimum based on a factor that changes through time (like O2 concentration)
# empirical tree, like in simulations
# Parameters of the regression:
# a single variable function to convert O2 to optimal gill size or whatever
# strength of pull
# BM wiggle
# abd presumably the tracked env factor will be analyzed many times over
<file_sep>/analyses_cluster/Run_An_Emp_TimeReg.sh
#!/bin/bash
Rscript Run_An_Emp_TimeReg.R
<file_sep>/trials and tests/plotting_aquilegia_poster_11-02-18.R
# aquilegia_models_test_10-10-18.R
library(ape)
library(TreEvo)
source("D:\\dave\\workspace\\treevo_paper\\analyses\\aquilegia_models_09-20-18.R")
# simulate n time-steps, repeat many times, plot results
repeatSimSteps<-function(params,trait=0,nSteps,fun){
for(i in 1:nSteps){
# add to original trait value to get new trait value
trait<-trait+fun(
params=params, states=trait, timefrompresent=NA)
}
trait
}
set.seed(1)
setwd("d://dave//workspace//treevo_paper//")
# obtain aquilegia tree (from Whittall and Hodges 2007?)
aquilegiaTree<-read.tree("datasets//aquilegia_Whttall&Hodges2007_figuredMCC.tre")
# make into a multiPhylo list
aquilegiaTreeList <- list(aquilegiaTree = aquilegiaTree)
class(aquilegiaTreeList) <- "multiPhylo"
# obtain aquilegia trait data (from Whittall and Hodges 2007?)
# need both nectur spur lengths and regime data
#
aquilegiaTrait<-read.table("datasets//aquilegia_traitData.txt", header=FALSE, row.names=1)
# get just nectur spur length
aquilegiaSpurLength<-aquilegiaTrait[,2]
# and take the natural log
# (note that the third column of the table was already the natural log)
# previous code from Brian had 'log(data[,3])' - log of a log
aquilegiaSpurLength<-log(aquilegiaSpurLength)
# aquilegia regimes - pollinator syndromes
aquilegiaPollinators<-aquilegiaTrait[,14]
# regimes coded 0, 1, 2
# 0 is bumble-bee, 1 is humming-bird, 2 is hawkmoth
# this probably won't be used directly?
# could use for post-analysis comparisons? Hmm
library(ggplot2)
aqData<-data.frame(aquilegiaSpurLength=aquilegiaSpurLength,
aquilegiaPollinators=as.factor(aquilegiaPollinators)
)
ggplot(aqData,
aes(x=aquilegiaSpurLength, fill=aquilegiaPollinators)) +
geom_histogram(show.legend=FALSE)
#plot the entire data set (everything)
breaks<-seq(1,5,by=0.5)
hist(aquilegiaSpurLength, breaks=breaks, col="Yellow",main="")
#then everything except one sub group (1 in this case)
hist(aquilegiaSpurLength[aquilegiaPollinators!=2],
breaks=breaks, col="Red", add=TRUE)
#then everything except two sub groups (1&2 in this case)
hist(aquilegiaSpurLength[aquilegiaPollinators!=2 & aquilegiaPollinators!=1],
breaks=breaks, col="Blue", add=TRUE)
##########################################################################################
# 2 bounds, 3 optima
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:5] is the max boundary, for the two lower regimes regimes
# params[6:8] describes theta (optima) values for each of the three regimes
layout(matrix(1:12,4,3))
par(mar=c(2,2,0,0))
for(i in 1:12){
# same model above, with more switching between weak optima, high diffusion
params<-c(
sigma=runif(1,0.2,0.4),
alpha=0.1,
rho=runif(1,0.6,0.8),
maxbounds=c(20,40),
theta=c(10,30,50)
)
repSim<-replicate(31,
repeatSimSteps(params,trait = 0, nSteps = 100,
fun = multiOptima3IntrinsicMaxBoundary2
)
)
hist(repSim,main="Simulated Trait Values",
breaks=20,axes=FALSE,ann=FALSE)
Axis(side=1, labels=FALSE)
Axis(side=2, labels=FALSE)
}
<file_sep>/old/multiOptimaIntrinsic.R
# moser_multi-optima-single evolutionary-regime-model.R
# 08-06-18
# multi optima single evolutionary regime model
# MOSER?
#' @rdname intrinsicModels
#' @export
multiOptimaIntrinsic <- function(params, states, timefrompresent) {
#a discrete time OU with multiple optima in the same regime
# with equal attraction (alpha) to all optima (theta 1:N)
# breakdown of params:
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:n] describes theta values
# n-2 = N # of optima describe by this model
# In this model, optima represent fixed trait values conveying adaptive benefit
# the proximity of a population to an optima makes it more likely to be under that regime
# a point equidistant between multiple regimes may be drawn to any
# the draw to any specific optima is inverse to distance from optima
# thus a lineage at an optima may show large variance as it circles the plateau
# then suddenly feel drawn to another optima, and show sudden, giant shifts toward that optima
# this all seems realistic...
#
sigma<-params[1]
alpha<-params[2]
rho <- params[3]
theta<-params[-(1:3)]
#
# measure distances to theta
# convert to probabilistic weights
# raised to the power of rho - scaling parameter
thetaWeights<-(1/abs(theta-states))^rho
# rescale so sum to 1, as probabilities
thetaWeights<-thetaWeights/sum(thetaWeights)
# sample a theta
theta<-sample(theta,1,prob=thetaWeights)
# now
#subtract current states because we want displacement
newdisplacement <- rpgm::rpgm.rnorm(n = length(states), mean = (theta-states)*alpha, sd = sd)
return(newdisplacement)
}
# three optima model, with strong attraction
set.seed(1)
params<-c(
sigma=0.1,
alpha=0.7,
rho=1,
theta=c(-20,20,50)
)
multiOptimaIntrinsic(params=params, states=0, timefrompresent=NA)
# simulate n time-steps, repeat many times, plot results
repeatSimSteps<-function(params,trait=0,nSteps){
for(i in 1:nSteps){
# add to original trait value to get new trait value
trait<-trait+multiOptimaIntrinsic(
params=params, states=trait, timefrompresent=NA)
}
trait
}
repSim<-replicate(300,repeatSimSteps(params,trait=0,100))
hist(repSim,main="Simulated Trait Values",breaks=20)
# same model above, with more switching between optima
set.seed(1)
params<-c(
sigma=0.1,
alpha=0.7,
rho=0.5,
theta=c(-20,20,50)
)
multiOptimaIntrinsic(params=params, states=0, timefrompresent=NA)
# simulate n time-steps, repeat many times, plot results
repeatSimSteps<-function(params,trait=0,nSteps){
for(i in 1:nSteps){
# add to original trait value to get new trait value
trait<-trait+multiOptimaIntrinsic(
params=params, states=trait, timefrompresent=NA)
}
trait
}
repSim<-replicate(300,repeatSimSteps(params,trait=0,100))
hist(repSim,main="Simulated Trait Values",breaks=20)
<file_sep>/old/analyses_script_07-20-18.R
###################################################
# Individual Empirical Analyses and Simulations
##################################################
setwd("d://dave//workspace//treevo_paper//")
library(ape)
library(TreEvo)
######################################
# get empirical data
# obtain anolis tree - from Poe et al. 2017 (SystBiol)
# their time-tree
anolisTree<-read.tree(file="datasets//anolis_PoeEtAl2018_datedMCCw0.5burnin.tre")
# obtain anolis trait data -
# Snout-Vent body-size data from Poe et al. 2018 (AmNat)
anolisTrait<-read.table("datasets//anolis_lntraits_matched_tabdelim_07-24-18.txt",
header=TRUE,row.names=1)
anolisSize<-anolisTrait[,1]
# obtain aquilegia tree (from Whittall and Hodges 2007?)
aquilegiaTree<-read.tree("datasets//aquilegia_Whttall&Hodges2007_figuredMCC.tre")
# obtain aquilegia trait data (from Whittall and Hodges 2007?)
# need both nectur spur lengths and regime data
#
aquilegiaTrait<-read.table("aquilegia_traitData.txt", header=FALSE, row.names=1)
# get just nectur spur length
aquilegiaSpurLength<-aquilegiaTrait[,2]
# and take the natural log
# (note that the third column of the table was already the natural log)
# previous code from Brian had 'log(data[,3])' - log of a log
aquilegiaSpurLength<-log(aquilegiaSpurLength)
# aquilegia regimes - pollinator syndromes
aquilegiaPollinators<-aquilegiaTrait[,14]
# regimes coded 0, 1, 2
# 0 is bumble-bee, 1 is humming-bird, 2 is hawkmoth
######
# old aquilegia code
#assume generation time of 10 years (its a perennial plant),
# following Cooper et al. Plos ONe 2010
# Genetic Variation at Nuclear loci fails to distinguish group is about 3 MY,
# phy height is 3. So each unit = 1,000,000 years or thus 100,000 generations
# TreeYears=100000
# timeStep<-1/TreeYears
# totalTreeLength=TreeYears*sum(phy$edge.length) #how many generations are represented
# number of expected polinator shifts based on parsimony is 7:
# parsimonyShifts=7
# pollinatorShiftRate=parsimonyShifts/totalTreeLength
#####
##############################################################################
# need to reconstruct regimes down the aquilegia tree
###############################################################################
# generate sets of ideal trees for doing simulations on
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
idealTrees<-list(
#
balanced_n8 = stree(n=8, type = "balanced", tip.label = NULL),
balanced_n16 = stree(n=16, type = "balanced", tip.label = NULL),
balanced_n64 = stree(n=64, type = "balanced", tip.label = NULL),
#
pectinate_n8 = stree(n=8, type = "left", tip.label = NULL),
pectinate_n16 = stree(n=16, type = "left", tip.label = NULL),
pectinate_n64 = stree(n=64, type = "left", tip.label = NULL),
#
star_n8 = stree(n=8, type = "star", tip.label = NULL),
star_n16 = stree(n=16, type = "star", tip.label = NULL),
star_n64 = stree(n=64, type = "star", tip.label = NULL)
)
#############################################################################
# analyses
#############################################
# runLabel = An_Emp-DispBound
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
intrinsicModel<-
extrinsicModel<-
#############################################
# runLabel = An_Emp-Bound
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = An_SimDispBound-DispBound
# treeSet = empirical-anolis_tree
# simTrait.Intrinsic = An_Emp-DispBound
# simTrait.Extrinsic = An_Emp-DispBound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = An_SimBound-DispBound
# treeSet = empirical-anolis_tree
# simTrait.Intrinsic = An_Emp-Bound
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = An_Emp-BrownMotion
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = An_Emp-Disp
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
#############################################
# runLabel = An_Emp-TimeReg
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = Aq_Emp-3Opt
# treeSet = empirical-Aquilegia_tree
# empiricalTraitData = Aquilegia_Nectar_Spur_Data
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#' @rdname intrinsicModels
#' @export
autoregressiveMultOPtimaIntrinsic <- function(params, states, timefrompresent) {
#a discrete time OU, same sd, mean, and attraction for all chars
#params[1] is sd (sigma), params[2] is attractor (ie. character mean), params[3] is attraction (ie. alpha)
sd <- params[1]
attractor <- params[2]
attraction <- params[3] #in this model, this should be between zero and one
newdisplacement <- rpgm::rpgm.rnorm(n = length(states), mean = (attractor-states)*attraction, sd = sd) #subtract current states because we want displacement
return(newdisplacement)
}
#############################################
# runLabel = Aq_Emp-BrownMotion
# treeSet = empirical-Aquilegia_tree
# empiricalTraitData = Aquilegia_Nectar_Spur_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = Aq_Sim3Opt-3Opt
# treeSet = empirical-Aquilegia_tree
# simTrait.Intrinsic = Aq_Emp-3Opt
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = Aq_SimBM-3Opt
# treeSet = empirical-Aquilegia_tree
# simTrait.Intrinsic = Aq_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-BM
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBMpriorBiased
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = rexp_with_mean_*not*_at_true_sigmasq
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-Disp
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDisp-Disp
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Disp
# simTrait.Extrinsic = An_Emp-Disp
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBound-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound
# simTrait.Extrinsic = Null
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundNear-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundByStartingState
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundMod-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundByMinValue
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundFar-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundOneRangeAway
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBound-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-DispBound
# simTrait.Extrinsic = An_Emp-DispBound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-TimeReg
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Time-AutoRegressive_Model
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimTimeReg-TimeReg
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-TimeReg
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Time-AutoRegressive_Model
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-3Opt
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = Aq_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_Sim3Opt-3Opt
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = Aq_Emp-3Opt
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
<file_sep>/analyses_cluster/Run_An_Emp_BrownMotion.sh
#!/bin/bash
Rscript Run_An_Emp_BrownMotion.R
<file_sep>/analyses_cluster/Run_An_Emp_Bound_2019-07-14.sh
#!/bin/bash
Rscript Run_An_Emp_Bound_2019-07-14.R
<file_sep>/analyses_cluster_July2021/functions_for_analysis.R
extractIntrinsic_from_prcOut<-function(prcOut){
res <- list(
intrinsicFn = prcOut[[1]][[1]]$intrinsicFn,
intrinsicValues = prcOut[[1]][[1]]$parMeansList$intrinsic,
startingValues = prcOut[[1]][[1]]$parMeansList$starting
)
return(res)
}
extractExtrinsic_from_prcOut<-function(prcOut){
res <- list(
extrinsicFn = prcOut[[1]][[1]]$extrinsicFn,
extrinsicValues = prcOut[[1]][[1]]$parMeansList$extrinsic
)
return(res)
}
cleanSimTraitData <- function(simulatedTraitData){
res <- simulatedTraitData$states
names(res) <- rownames(simulatedTraitData)
return(res)
}
setupRunAnalysis <- function(
runParameters,
nSimTrait,
ratePriorError,
#
anolisTreeList, anolisSize,
aquilegiaTreeList, aquilegiaSpurLength,
idealTrees,
#
indepAnalyses_intrinsicOut,
indepAnalyses_extrinsicOut
){
##################################################
# rate prior error
# sigmasq state prior has an error
# if "rexp_with_mean_NOT_at_true_sigmasq"
#
if(runParameters$prior != "rexp_with_mean_NOT_at_true_sigmasq"){
# then do *NOT* apply the error to the sigmasq prior
# Reset ratePriorError to 1
ratePriorError <- 1
}
##########################################
#
#
# doRun.Intrinsic
#
if(runParameters$doRun.Intrinsic == "Pure_BM"){
intrinsicFunctionToFit <- brownianIntrinsic
#
intrinsicArgList <- list(
intrinsicPriorsFns = c("exponential"),
intrinsicPriorsValues = list(10 * ratePriorError)
)
}
#
if(runParameters$doRun.Intrinsic == "BM_LowerBound"){
intrinsicFunctionToFit <- boundaryMinIntrinsic
#
intrinsicArgList <- list(
intrinsicPriorsFns=c("exponential", "normal"),
intrinsicPriorsValues=list(10, c(-10, 1))
)
}
#
if(runParameters$doRun.Intrinsic == "3Opt2Bound"){
intrinsicFunctionToFit <- multiOptima3IntrinsicMaxBoundary2
#
intrinsicArgList <- list(
# breakdown of params:
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:5] is the max boundary, for the two lower regimes regimes
# params[6:8] describes theta (optima) values for each of the three regimes
intrinsicPriorsFns=c(
# we'll make rate an exponential prior, rate 10
"exponential",
# we'll make alpha an exponential prior, rate 10
"exponential",
# well make rho an exponential, rate 1
"exponential",
# let's place bounds based on whitall and hodges:
"uniform","uniform",
# normal priors for optima
"normal", "normal", "normal"
),
intrinsicPriorsValues=list(
10, 10, 1,
c(15,20),c(20,25),
c(10,1), c(20,1), c(30,1)
)
)
}
#
if(runParameters$doRun.Intrinsic == "Time_AutoRegressive_Model"){
intrinsicFunctionToFit <- autoregressiveIntrinsic
# this model has three parameters:
# sigma (sigma), attractor (character mean), attraction (alpha)
#
intrinsicArgList <- list(
intrinsicPriorsFns=c("exponential", "normal", "exponential"),
intrinsicPriorsValues=list(10, c(-10, 1), 10)
)
}
# doRun.Extrinsic
#
if(runParameters$doRun.Extrinsic =="Null"){
extrinsicFunctionToFit <- nullExtrinsic
#
extrinsicArgList <- list(
extrinsicPriorsFns = c("fixed"),
extrinsicPriorsValues = list(0)
)
}
#
if(runParameters$doRun.Extrinsic == "Displacement"){
extrinsicFunctionToFit <- ExponentiallyDecayingPushExtrinsic
#
extrinsicArgList <- list(
extrinsicPriorsFns = c("exponential", "normal", "exponential"),
# \code{ExponentiallyDecayingPushExtrinsic}
# with parameters \code{params = sd, maximumForce, halfDistance}
extrinsicPriorsValues = list(10, c(1,1), 10)
)
}
#########################################
#
# treeSet and nDoRun
#
# calculate the number of doRun statements for each analysis-run
# should be one 1, 10 or 90... probably
# 1 if empirical tree, empirical trait data
# 10 if empirical tree and 10 simulated trait datasets
# 90 if 9 simulated trees and 10 simulated trait datasets for *each* tree
#
#
# if the treeSet is "Ideal-Simulated"
# then the number of simulated tree types and
# number of tip-totals per simulated tree type is 3, other 1
# number of analyses also depends on if empiricalTraitData is "SIMULATED"
#
if(runParameters$treeSet == "empirical_anolis_tree"){
treeList <- anolisTreeList
#
if(runParameters$empiricalTraitData == "Anolis_Size_Data"){
# nDoRun equal to length(anolisTreeList) (should be 1)
nDoRun <- length(anolisTreeList)
# need a two-level list of trait sets (one tree, one trait dataset)
traitDataList <- list(list(anolisSize = anolisSize))
#
message(paste0(
"Performing a single analysis with the empirical Anolis phylogeny,\n",
" and empirical Anolis size trait data."
))
}
if(runParameters$empiricalTraitData == "SIMULATED"){
# nDoRun equal to nSimTrait * # of trees
nDoRun <- nSimTrait * length(anolisTreeList)
#
message(paste0(
"Performing ", nDoRun,
" analyses on the empirical Anolis phylogeny,\n with ",
nDoRun,
" seperately-simulated trait datasets."
))
}
}
#
if(runParameters$treeSet == "empirical_Aquilegia_tree"){
treeList <- aquilegiaTreeList
#
if(runParameters$empiricalTraitData == "Aquilegia_Nectar_Spur_Data"){
# nDoRun equal to length(aquilegiaTreeList) (should be 1)
nDoRun <- length(aquilegiaTreeList)
# need a two-level list of trait sets (one tree, one trait dataset)
traitDataList <- list(list(aquilegiaSpurLength = aquilegiaSpurLength))
#
message(paste0(
"Performing a single analysis with the empirical Aquilegia phylogeny,\n",
" and empirical Aquilegia Nectar Spur trait data."
))
}
if(runParameters$empiricalTraitData == "SIMULATED"){
# nDoRun equal to nSimTrait * # of trees
nDoRun <- nSimTrait * length(aquilegiaTreeList)
#
message(paste0(
"Performing ", nDoRun,
" analyses on the empirical Aquilegia phylogeny,\n with ",
nDoRun,
" seperately-simulated trait datasets."
))
}
}
#
if(runParameters$treeSet == "Ideal_Simulated"){
treeList <- idealTrees
#
# calculate the number of doRun statements for this analysis-run
# nSimTrait multiplied by the number of trees
# should be 9
# product of nSimTreeTypes (3) and nTipNumbersPerSimTreeType (3)
nDoRun <- nSimTrait * length(idealTrees)
message(paste0(
"Performing ", nDoRun,
" analyses for ", 3,
" simulated 'idealized' phylogeny classes,\n",
" with ", 3, " sets of tip values each,\n",
" and ", nSimTrait,
" trait datasets simulated for each tree."
))
#
}
#
#####################################################################
# Get Simulated Trait Data
#
# traitDataList will be a list with each element corresponding to a tree
# and sub list corresponding to trait data to be analyzed on that tree
#
if(runParameters$empiricalTraitData == "SIMULATED"){
# go through each tree from tree list
# iterate, generate nSimTrait simulated trait datasets
# produce a list where each item represents a tree
# and each subitem of each list item is a trait dataset
#
# first make the empty list
#
traitDataList <- list()
#
simTrait.Intrinsic <- runParameters$simTrait.Intrinsic
simTrait.Extrinsic <- runParameters$simTrait.Extrinsic
#
# simTrait.Intrinsic
# ALSO need estimates of parameters from
# previous analyses needed for later simulations
# need to make part of output from doRun if not already
#
if(is.na(simTrait.Intrinsic)){
stop("The intrinsic model for a simulated trait dataset is given as NA")
}else{
# call respective analysis, take parameters from it
simTraitIntrinsicArgs <- list(
intrinsicFn = indepAnalyses_intrinsicOut
[[simTrait.Intrinsic]]$intrinsicFn,
intrinsicValues = indepAnalyses_intrinsicOut
[[simTrait.Intrinsic]]$intrinsicValues,
startingValues = indepAnalyses_intrinsicOut
[[simTrait.Intrinsic]]$startingValues
)
}
#
# simTrait.Extrinsic
#
if(is.na(runParameters$simTrait.Extrinsic)){
stop("The extrinsic model for a simulated trait dataset is given as NA")
}else{
if(simTrait.Extrinsic == "Null"){
simTraitExtrinsicArgs <- list(
extrinsicFn = nullExtrinsic,
extrinsicValues = c(0)
)
}else{
simTraitExtrinsicArgs <- list(
extrinsicFn = indepAnalyses_extrinsicOut
[[simTrait.Extrinsic]]$extrinsicFn,
extrinsicValues = indepAnalyses_extrinsicOut
[[simTrait.Extrinsic]]$extrinsicValues
)
}
}
#####################
# now have to simulate traits
# save to the list of trait sets
#
# reporting messages
#
if(length(treeList)>1){
message(paste0(
"Simulating ", nSimTrait,
" trait datasets on ",
length(treeList), " phylogenies..."
))
}else{
message(paste0(
"Simulating ", nSimTrait,
" trait datasets on the empirical phylogeny..."
))
}
message(" (...This may take a while...) ")
#
#
for(tree_i in 1:length(treeList)){
#
traitDataThisTree <- list()
#
for(trait_i in 1:nSimTrait){
#
simulatedTraitData <- doSimulation(
phy = treeList[[tree_i]],
intrinsicFn = simTraitIntrinsicArgs$intrinsicFn,
extrinsicFn = simTraitExtrinsicArgs$extrinsicFn,
startingValues = simTraitIntrinsicArgs$startingValues,
intrinsicValues = simTraitIntrinsicArgs$intrinsicValues,
extrinsicValues = simTraitExtrinsicArgs$extrinsicValues,
generation.time = generation.time
)
#
traitDataThisTree[[trait_i]] <- cleanSimTraitData(simulatedTraitData)
}
#
traitDataList[[tree_i]] <- traitDataThisTree
}
}
##################
runLabel <- runParameters$runLabel
#
res <- list(
treeList = treeList,
traitDataList = traitDataList,
runLabel = runLabel,
nDoRun = nDoRun,
intrinsicFunctionToFit = intrinsicFunctionToFit,
extrinsicFunctionToFit = extrinsicFunctionToFit,
intrinsicArgList = intrinsicArgList,
extrinsicArgList = extrinsicArgList
)
#
return(res)
}
doRunAnalysis <- function(
treeList,
traitDataList,
runLabel,
nDoRun,
intrinsicFunctionToFit,
extrinsicFunctionToFit,
intrinsicArgList,
extrinsicArgList,
#
# presets
generation.time,
multicore,
coreLimit,
numParticles,
nStepsPRC,
nRuns,
nInitialSims,
nInitialSimsPerParam,
saveData,
verboseParticles
){
#############################################################
##########################################################
# now run doRun across each trees and its trait datasets
#
message("###############################")
message("Now doing doRun analyses...")
# first make the empty list for output - two levels!
doRun_out <- list()
#
for (i in 1:length(treeList)){
# first iterate over trees
#
message("####################")
message(paste0(
"Analyzing tree ",
i,"..."
))
#
treeToUse <- treeList[[i]]
#
# empty list
doRun_out_ThisTree <- list()
#
for (j in 1:length(traitDataList[[i]])){
#
# define job name
jobNameRun <- paste0(
runLabel,
"_tree_",i,
"_trait_",j,
"_", format(Sys.time(), "%m-%d-%y")
)
#
#
message("####################")
message(paste0(
"Analyzing ",
jobNameRun,"..."
))
#
traitDataToUseForThisRun <- traitDataList[[i]][[j]]
#
#print(traitDataToUseForThisRun)
#
doRun_out_ThisTree[[j]] <- doRun_prc(
##############
phy = treeToUse,
traits = traitDataToUseForThisRun,
#
intrinsicFn = intrinsicFunctionToFit,
extrinsicFn = extrinsicFunctionToFit,
#
startingPriorsFns = "normal",
startingPriorsValues =
list(c(
mean(traitDataToUseForThisRun),
sd(traitDataToUseForThisRun)
)),
###########################
#
intrinsicPriorsFns =
intrinsicArgList$intrinsicPriorsFns,
intrinsicPriorsValues =
intrinsicArgList$intrinsicPriorsValues,
#########
#
extrinsicPriorsFns =
extrinsicArgList$extrinsicPriorsFns,
extrinsicPriorsValues =
extrinsicArgList$extrinsicPriorsValues,
#########
#
jobName = jobNameRun,
#
################################################
# define MCMC / ABC control parameter list
#
# controls that may need to be changed
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
#
numParticles = numParticles,
nStepsPRC = nStepsPRC,
nRuns = nRuns,
nInitialSims = nInitialSims,
nInitialSimsPerParam = nInitialSimsPerParam,
#
saveData = saveData,
verboseParticles = verboseParticles,
#
#
# standard controls, don't need to be changed
standardDevFactor = 0.20,
epsilonProportion = 0.7,
epsilonMultiplier = 0.7,
#
validation = "CV",
scale = TRUE,
variance.cutoff = 95,
#niter.goal = 5,
#
stopRule = FALSE,
stopValue = 0.05,
maxAttempts = Inf
#
)
}
doRun_out[[i]] <- doRun_out_ThisTree
}
#
###########################################################
#
# record nDoRun as an attribute
attr(doRun_out, "nDoRun") <- nDoRun
#
return(doRun_out)
}
<file_sep>/old/simulation_start_here.R
source("d://dave//workspace//treevo_paper//analyses//simulations_setup_script.R")
<file_sep>/analyses_cluster/Run_Aq_Emp_BrownMotion_2019-07-14.sh
#!/bin/bash
Rscript Run_Aq_Emp_BrownMotion_2019-07-14.R
<file_sep>/analyses_cluster/Run_An_Emp_Bound.sh
#!/bin/bash
Rscript Run_An_Emp_Bound.R
<file_sep>/analyses/autoregressiveWanderingUnknownOptimumIntrinsic.R
#' @rdname intrinsicModels
#' @export
autoregressiveWanderingUnknownOptimumIntrinsic <- function(params, states, timefrompresent) {
# 3) Time - autoregressive model with optimum based on a
# factor that changes through time (like O2 concentration)
# Parameters of the regression:
# a single variable function to convert O2 to optimal gill size or whatever
# strength of pull
# BM wiggle
# 10-20-18
# can't do it with an unknown optimum because the model function doesn't talk to other
# instances of the model, so how would they know what the optimum is at a particular time-point
# ->>>>>>>>>>>>>>>>
# We need a dataset with an environmental dataset
# that we can treat as an optimum being tracked
# but what? and this will require another argument.
#a discrete time OU, same sd, mean, and attraction for all chars
#params[1] is sd (sigma), params[2] is attractor (ie. character mean), params[3] is attraction (ie. alpha)
sigma <- params[1]
attractor <- params[2]
attraction <- params[3] #in this model, this should be between zero and one
#subtract current states because we want displacement
newdisplacement <- TreEvo:::rnormFastZig(
nZig = length(states),
meanZig = (attractor-states)*attraction,
sdZig = sigma)
return(newdisplacement)
} <file_sep>/analyses_cluster/Run_An_Emp_Disp.sh
#!/bin/bash
Rscript Run_An_Emp_Disp.R
<file_sep>/analyses/simulations_framework_script.R
# simulation framework
# (shouldn't need to be changed)
##################################################
library(ape)
library(TreEvo)
# get package versions
if(packageVersion("TreEvo") < "0.21.0"){
stop("Update TreEvo first!")
}
message(paste0(
"TreEvo Version Used: ",
packageVersion("TreEvo")
))
message(paste0(
"ape Version Used: ",
packageVersion("ape")
))
setwd("/share/bomeara/treevo_paper//")
source(".//analyses//functions_for_analysis.R")
source(".//analyses//functions_for_aquilegia_models.R")
######################################
# get empirical data
#############################################
#
# 1) Anolis
# repulsion - adaptive landscape dynamics - multi optima
#
# obtain anolis tree - from Poe et al. 2017 (SystBiol)
# their time-tree
anolisTree <- read.tree(
file="datasets//anolis_PoeEtAl2018_datedMCCw0.5burnin.tre"
)
#
# obtain anolis trait data -
# Snout-Vent body-size data from Poe et al. 2018 (AmNat)
anolisTrait <- read.table(
"datasets//anolis_lntraits_matched_tabdelim_07-24-18.txt",
header=TRUE,row.names=1
)
#
anolisSize <- anolisTrait[,1] # ,drop = FALSE]
names(anolisSize) <- rownames(anolisTrait)
#
# need to remove all unshared taxa from the tree
#
# crop traits down to those in the tree
anolisSize <- anolisSize[anolisTree$tip.label]
names(anolisSize) <- anolisTree$tip.label
# are any NA?
anyMatchesNA <- is.na(anolisSize)
if(any(anyMatchesNA)){
droppers <- names(anolisSize)[anyMatchesNA]
message(paste0(
"The following OTUs (",
length(droppers),
") on the Anolis tree do not appear to\n",
" have size data and thus will be dropped: \n ",
paste0(strwrap(
paste0(droppers, collapse=", ")
),collapse="\n ")
))
anolisTree <- drop.tip(anolisTree, droppers)
anolisSize <- anolisSize[anolisTree$tip.label]
names(anolisSize) <- anolisTree$tip.label
}
# make into a multiPhylo list
anolisTreeList <- list(anolisTree = anolisTree)
class(anolisTreeList) <- "multiPhylo"
#
################################################
#
# 2) Aquilegia
# whittall et al. model of nectar spur increase in size
#
# obtain aquilegia tree (from Whittall and Hodges 2007?)
aquilegiaTree <- read.tree(
"datasets//aquilegia_Whttall&Hodges2007_figuredMCC.tre"
)
# need to clear away the root edge length
aquilegiaTree$root.edge <- NULL
#
# obtain aquilegia trait data (from Whittall and Hodges 2007?)
# need both nectur spur lengths and regime data
#
aquilegiaTrait <- read.table(
"datasets//aquilegia_traitData.txt",
header=FALSE, row.names=1
)
#
# get just nectur spur length
aquilegiaSpurLength <- aquilegiaTrait[,2] # , drop = FALSE]
names(aquilegiaSpurLength) <- rownames(aquilegiaTrait)
# and take the natural log
# (note that the third column of the table was already the natural log)
# previous code from Brian had 'log(data[,3])' - log of a log
aquilegiaSpurLength <- log(aquilegiaSpurLength)
# crop traits down to those in the tree
aquilegiaSpurLength <- aquilegiaSpurLength[aquilegiaTree$tip.label]
names(aquilegiaSpurLength) <- aquilegiaTree$tip.label
#
# aquilegia regimes - pollinator syndromes
aquilegiaPollinators <- aquilegiaTrait[,14]
names(aquilegiaPollinators) <- rownames(aquilegiaTrait)
# crop traits down to those in the tree
aquilegiaPollinators <- aquilegiaPollinators[aquilegiaTree$tip.label]
names(aquilegiaPollinators) <- aquilegiaTree$tip.label
#
# regimes coded 0, 1, 2
# 0 is bumble-bee, 1 is humming-bird, 2 is hawkmoth
# this probably won't be used directly?
# could use for post-analysis comparisons? Hmm
#
# need to remove all unshared taxa from the tree
# will do this ONLY relative to spur length vector!
#
# are any NA?
anyMatchesNA <- is.na(aquilegiaSpurLength)
if(any(anyMatchesNA)){
droppers <- names(aquilegiaSpurLength)[anyMatchesNA]
message(paste0(
"The following OTUs(",
length(droppers),
") on the Aquilegia tree do not appear to\n",
" have spur length data and thus will be dropped: \n ",
paste0(strwrap(
paste0(droppers, collapse=", ")
),collapse="\n ")
))
aquilegiaTree <- drop.tip(aquilegiaTree, droppers)
# and drop from trait data
aquilegiaSpurLength <- aquilegiaSpurLength[aquilegiaTree$tip.label]
aquilegiaPollinators <- aquilegiaPollinators[aquilegiaTree$tip.label]
names(aquilegiaSpurLength) <- aquilegiaTree$tip.label
names(aquilegiaPollinators) <- aquilegiaTree$tip.label
}
# make into a multiPhylo list
aquilegiaTreeList <- list(aquilegiaTree = aquilegiaTree)
class(aquilegiaTreeList) <- "multiPhylo"
#
###############################################
# legacy aquilegia code from <NAME>:
#
# assume generation time of 10 years (its a perennial plant),
# following Cooper et al. Plos ONe 2010
# Genetic Variation at Nuclear loci fails to distinguish group is about 3 MY,
# So =>> phy height is 3.
# Thus each unit = 1,000,000 years or 100,000 generations
#
# TreeYears=100000
# timeStep <- 1/TreeYears
# totalTreeLength=TreeYears*sum(phy$edge.length) #how many generations are represented
# number of expected polinator shifts based on parsimony is 7:
# parsimonyShifts=7
# pollinatorShiftRate=parsimonyShifts/totalTreeLength
#
#
###############################################################################
# generate sets of ideal trees for doing simulations on
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
#
idealTrees <- list(
#
star_n8 = stree(
n=8,
type = "star", tip.label = NULL
),
star_n16 = stree(
n=16,
type = "star", tip.label = NULL
),
star_n64 = stree(
n=64,
type = "star", tip.label = NULL
),
#
balanced_n8 = stree(
n=8,
type = "balanced", tip.label = NULL
),
balanced_n16 = stree(
n=16,
type = "balanced", tip.label = NULL
),
balanced_n64 = stree(
n=64,
type = "balanced", tip.label = NULL
),
#
pectinate_n8 = stree(
n=8,
type = "left", tip.label = NULL
),
pectinate_n16 = stree(
n=16,
type = "left", tip.label = NULL
),
pectinate_n64 = stree(
n=64,
type = "left", tip.label = NULL
)
)
#
# all of these need to have edge lengths
idealTrees <- lapply(idealTrees, compute.brlen)
# multiple edge lengths by 50
idealTrees <- lapply(idealTrees,
function(x) {
x$edge.length <- x$edge.length * idealTreeDepth
return(x)
}
)
#
# make all trees artificially bifurcating
idealTrees <- lapply(idealTrees, multi2di)
#
# test that they are ultrametric
if(!all(sapply(idealTrees,is.ultrametric))){
stop("Not all idealized simulated trees came out as ultrametric ?!")
}
#
# make multiPhylo
class(idealTrees) <- "multiPhylo"
#
# compress tip labels? No, I don't think that works for trees of different sizes
# trees <- .compressTipLabel(trees)
#
######################################################################################
message("##############################")
message("######### Beginning Analyses ############")
#
# time to get table, process the inputs listed
#
# get simulation run table
simRunTable <- read.csv(
file="analyses//simulation_sets_parameters_table.csv",
header=TRUE,
stringsAsFactors=FALSE
)
#
# number of analyses
nAnalyses <- nrow(simRunTable)
#
# names of analyses
analysesNames <- simRunTable$runLabel
#
# which analyses are independent or dependent
whichIndependentPrevRun <- which(
!as.logical(simRunTable$dependentPrevRun)
)
whichDependentPrevRun <- which(
as.logical(simRunTable$dependentPrevRun)
)
#
# create list for saving analysis output
analysisOutput <- as.list(analysesNames)
# why numbers? use actual names
# names(analysisOutput) <- 1:nAnalyses
names(analysisOutput) <- analysesNames
#
# start an empty analysisSetup
analysisSetup <- list()
loadAnalysisSetup <- FALSE
#
#
############################################
#
#
if(continueFromPrevious){
#
# output
outFiles <- file.info(list.files(
"./saved_output/", full.names = TRUE
))
outFiles <- rownames(outFiles)[which.max(outFiles$mtime)]
# if there are any files...
if(length(outFiles) > 0){
# replace analysisOutput
analysisOutputOld <- readRDS(file=outFiles)
if(analysisOutputOld == "analysisOutput"){
stop("Somehow the old output is just the string 'analysisOutput'...?")
}
if((length(analysisOutputOld) == length(analysesNames))
& identical(analysesNames,names(analysisOutput))){
message("Loading output file from previous run...")
analysisOutput <- analysisOutputOld
#
# also load old analysisSetup
loadAnalysisSetup <- TRUE
}else{
warning(paste0(
"Format of previous output file does not match current script expectations\n",
"Beginning analyses without loading previous output file..."
))
if(any(sapply(analysisOutput,length) > 1)){
stop("How did analysisOutput get non-fresh data without restarting from old?")
}
}
}else{
message(paste0(
"No output files from previous runs found\n",
"Beginning analyses without loading any previous output file..."
))
}
}
#
#
# also load old analysisSetup if loading old output
if(loadAnalysisSetup){
# output
outFiles <- file.info(list.files(
"./saved_setup/", full.names = TRUE
))
outFiles <- rownames(outFiles)[which.max(outFiles$mtime)]
# if there are any files...
if(length(outFiles) > 0){
# replace analysisOutput
analysisSetup <- readRDS(file=outFiles)
#
message("Loading analysis setup from previous run...")
}else{
message("No previous analysis setup found.")
}
}
#
################################################################
# test that analysis output is useable
if(!identical(analysesNames, names(analysisOutput))){
stop(
"analysisOutput seems to be corrupt - names do not match analysesNames"
)
}
#
##############################################
# make a new save file name for output
saveFileName <- paste0(
".//saved_output//",
"analysisOutput_saved_",
format(Sys.time(), "%m-%d-%y"),
".rds"
)
#
# save initial file
saveRDS(analysisOutput,
file = saveFileName
)
#
# do the same for the setup file
saveSetupName <- paste0(
".//saved_setup//",
"analysisSetup_saved_",
format(Sys.time(), "%m-%d-%y"),
".rds"
)
#
# save initial file
saveRDS(analysisSetup,
file = saveSetupName
)
#
######################################
#
# Let's runs the analyses!
#
# run all independent analyses
message("###############################################")
message("######### Independent Analyses ##############")
#
for (i in whichIndependentPrevRun){
if(analysisOutput[[i]] == analysesNames[i]){
#
message("#######################################")
message("###### Now running -- ", analysesNames[i], " #########")
#
runParameters <- simRunTable[i, , drop = FALSE]
#
if(identical(analysisSetup, list())){
analysisSetup <- setupRunAnalysis(
runParameters = runParameters,
#
# inputs needed from script above
nSimTrait = nSimTrait,
ratePriorError = ratePriorError,
#
anolisTreeList = anolisTreeList,
anolisSize = anolisSize,
aquilegiaTreeList = aquilegiaTreeList,
aquilegiaSpurLength = aquilegiaSpurLength,
idealTrees = idealTrees,
#
indepAnalyses_intrinsicOut = NULL,
indepAnalyses_extrinsicOut = NULL
)
#
# save analysisSetup
saveRDS(analysisSetup,
file = saveSetupName
)
}else{
if(!identical(analysisSetup$runLabel, runParameters$runLabel)){
stop(paste0(
"Loaded analysisSetup does not match expected run label.\n",
"Maybe delete old files?"
))
}
}
#################
# now doRun!
#
analysisOutput[[i]] <- doRunAnalysis(
treeList = analysisSetup$treeList,
traitDataList = analysisSetup$traitDataList,
runLabel = analysisSetup$runLabel,
nDoRun = analysisSetup$nDoRun,
intrinsicFunctionToFit = analysisSetup$intrinsicFunctionToFit,
extrinsicFunctionToFit = analysisSetup$extrinsicFunctionToFit,
intrinsicArgList = analysisSetup$intrinsicArgList,
extrinsicArgList = analysisSetup$extrinsicArgList,
#
# presets
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims,
saveData = saveData,
verboseParticles = verboseParticles
)
#
# test that analysis output is useable
if(!identical(analysesNames,names(analysisOutput))){
stop("analysisOutput seems to be corrupt - names do not match analysesNames")
}
#
saveRDS(analysisOutput,
file = saveFileName
)
# delete analysisSetup
analysisSetup <- list()
# and save empty analysisSetup
saveRDS(analysisSetup,
file = saveSetupName
)
}
}
#############################
# dependent analyses
########################################
#indep runs that dep runs depend on :
#
# INTRINSIC
# An_Emp_BrownMotion
# An_Emp_Disp
# An_Emp_Bound
# An_Emp_DispBound
# An_Emp_Bound_BoundByStartingState
# An_Emp_Bound_BoundByMinValue
# An_Emp_Bound_BoundOneRangeAway
# An_Emp_TimeReg
# Aq_Emp_3Opt2Bound
# Aq_Emp_BrownMotion
# EXTRINSIC
# An_Emp_DispBound
# An_Emp_Disp
#
# BUT NOTICE THAT SOME OF THESE DO NOT HAVE
# CORRESPONDING INDEP ANALYSES !
#
# actual indep analyses performed:
# An_Emp_DispBound
# An_Emp_Bound
# An_Emp_BrownMotion
# An_Emp_Disp
# An_Emp_TimeReg
# Aq_Emp_3Opt2Bound
# Aq_Emp_BrownMotion
#
# ones not covered by indep analyses
# An_Emp_Bound_BoundByStartingState
# An_Emp_Bound_BoundByMinValue
# An_Emp_Bound_BoundOneRangeAway
#
#############################
# get the stuff necessary for doing the dependent analyses
#
# get model parameters from runs
# that will be used for dependent simulations
# use extract on all indep analyses now
# then can call these later for dependent analyses
# without have to extract same data many times
#
# Note that following functions will only look at first analysis
# this doesn't matter - all indep analyses should only have one analysis
# one empirical tree, one empirical trait, thus only one analysis to examine
#
indepAnalyses_intrinsicOut <- lapply(
analysisOutput[whichIndependentPrevRun],
extractIntrinsic_from_prcOut
)
#
indepAnalyses_extrinsicOut <- lapply(
analysisOutput[whichIndependentPrevRun],
extractExtrinsic_from_prcOut
)
#
# make sure named correctly
names(indepAnalyses_intrinsicOut) <- analysesNames[whichIndependentPrevRun]
names(indepAnalyses_extrinsicOut) <- analysesNames[whichIndependentPrevRun]
#
# add intrinsic models not included
# An_Emp_Bound_BoundByStartingState
# An_Emp_Bound_BoundByMinValue
# An_Emp_Bound_BoundOneRangeAway
#
# all of these are based on An_Emp_Bound
boundInt <- indepAnalyses_intrinsicOut$An_Emp_Bound
#
# An_Emp_Bound_BoundByStartingState
# bound is right by the starting state, leading to
# diffusion away from left-hand wall dynamics
boundIntStarting <- boundInt
# set bound equal to starting state
boundIntStarting$intrinsicValues['intrinsic_2'] <- boundIntStarting$startingValues[1]
indepAnalyses_intrinsicOut$An_Emp_Bound_BoundByStartingState <- boundIntStarting
#
# An_Emp_Bound_BoundByMinValue
# bound is at the minimum value observed for anolisSize
boundIntMin <- boundInt
# set bound equal to minimum size observed
boundIntMin$intrinsicValues['intrinsic_2'] <- min(anolisSize)
indepAnalyses_intrinsicOut$An_Emp_Bound_BoundByMinValue <- boundIntMin
#
# An_Emp_Bound_BoundOneRangeAway
# what if the bound was very distant -
# i.e. one range (max-min) away from the min
oneRange <- max(anolisSize) - min(anolisSize)
oneRangeAway <- min(anolisSize) - oneRange
boundOneR <- boundInt
# set bound equal to minimum size observed
boundOneR$intrinsicValues['intrinsic_2'] <- oneRangeAway
indepAnalyses_intrinsicOut$An_Emp_Bound_BoundOneRangeAway<- boundOneR
#
################################################################
# run all dependent analyses
#
message("#############################################")
message("######### Dependent Analyses ##############")
#
for (i in whichDependentPrevRun){
if(analysisOutput[[i]] == analysesNames[i]){
#
message("#####################################")
message("###### Now running -- ", analysesNames[i], " ##########")
#
runParameters <- simRunTable[i, , drop = FALSE]
#
if(identical(analysisSetup, list())){
analysisSetup <- setupRunAnalysis(
runParameters = runParameters,
#
# inputs needed from script above
nSimTrait = nSimTrait,
ratePriorError = ratePriorError,
#
anolisTreeList = anolisTreeList,
anolisSize = anolisSize,
aquilegiaTreeList = aquilegiaTreeList,
aquilegiaSpurLength = aquilegiaSpurLength,
idealTrees = idealTrees,
#
indepAnalyses_intrinsicOut =
indepAnalyses_intrinsicOut,
indepAnalyses_extrinsicOut =
indepAnalyses_extrinsicOut
)
#
# save analysisSetup
saveRDS(analysisSetup,
file = saveSetupName
)
}else{
if(!identical(analysisSetup$runLabel, runParameters$runLabel)){
stop(paste0(
"Loaded analysisSetup does not match expected run label.\n",
"Maybe delete old files?"
))
}
}
#################
# now doRun!
#
analysisOutput[[i]] <- doRunAnalysis(
treeList = analysisSetup$treeList,
traitDataList = analysisSetup$traitDataList,
runLabel = analysisSetup$runLabel,
nDoRun = analysisSetup$nDoRun,
intrinsicFunctionToFit = analysisSetup$intrinsicFunctionToFit,
extrinsicFunctionToFit = analysisSetup$extrinsicFunctionToFit,
intrinsicArgList = analysisSetup$intrinsicArgList,
extrinsicArgList = analysisSetup$extrinsicArgList,
#
# presets
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims,
saveData = saveData,
verboseParticles = verboseParticles
)
#
# test that analysis output is useable
if(!identical(analysesNames,names(analysisOutput))){
stop("analysisOutput seems to be corrupt - names do not match analysesNames")
}
#
saveRDS(analysisOutput,
file = saveFileName
)
# delete analysisSetup
analysisSetup <- list()
# and save empty analysisSetup
saveRDS(analysisSetup,
file = saveSetupName
)
}
}
<file_sep>/post-analysis-comparison/post-analysis_test_workflow_08-22-19.Rmd
---
title: "Post-Analysis Evaluation of Simulations for TreEvo"
author: "<NAME>"
date: "August 13, 2019"
output: pdf_document
---
First thing first, what directory do we want to get data files from?
```{r}
dir <- "~//treevo_paper//analyses_cluster_fast//"
```
Load these, then remove all empty runs so we only analyze output for runs that we have results for.
```{r echo=FALSE}
# see all files in working directory
files <- list.files(dir)
# identify all .rda files
filesRDA <- files[grep(pattern=".rda",files)]
# identify all results files
filesResults <- filesRDA[grep(pattern="Results_",filesRDA)]
# identify all data files
# only need to load one of these to get workspace
# this should be arbitrary
filesData <- filesRDA[grep(pattern="Data_",filesRDA)]
# load first one
load(paste0(dir,filesData[1]))
# get names of the analyses
analysisNames_Results <- sapply(filesResults,
function(x){
x <- gsub(x, pattern="Results_", replacement="")
x <- gsub(x, pattern=".rda", replacement="")
# remove date and last '_'
substr(x, start = 1, stop = nchar(x)-11)
}
)
# for now, just crash if there's duplicates
if(length(analysisNames_Results) > length(unique(analysisNames_Results))){
stop("There's duplicate results files")
}
# need to load results sequentially
# place as a sub-object in the pre-existing list: analysisOutput
for(i in 1:length(analysisNames_Results)){
fileLoc <- paste0(dir, filesResults[i])
#print(fileLoc)
load(fileLoc)
analysisOutput[[ analysisNames_Results[i] ]] <- result
}
# remove all empty runs that are not a list
analysisOutput <- analysisOutput[sapply(analysisOutput, is.list)]
```
We'll also load a number of packages as well, particularly `TreEvo` and `ape`.
```{r echo=FALSE}
library(ape)
library(TreEvo)
# get package versions
if(packageVersion("TreEvo") < "0.21.0"){
stop("Update TreEvo first!")
}
message(paste0(
"TreEvo Version Used: ",
packageVersion("TreEvo")
))
message(paste0(
"ape Version Used: ",
packageVersion("ape")
))
```
## Effective Sample Size
The `pairwiseESS` function calculates Effective Sample Size (ESS) on results, and performs best when pairwise comparisons can be made between results are from multiple runs.
```{r echo=FALSE}
ESS <- as.list(names(analysisOutput))
names(ESS) <- names(analysisOutput)
for(i in 1:length(analysisOutput)){
nTree <- length(analysisOutput[[i]])
for(j in nTree){
nTrait <- length(analysisOutput[[i]][[j]])
for (k in nTrait){
ESS[[i]] <- suppressMessages(pairwiseESS(analysisOutput[[i]][[j]][[k]]))
}
}
#print(names(analysisOutput))
#print(ESS[[i]])
}
```
Thus, calculations of ESS should be done across an entire analysis composed of multiple, otherwise identical ABC PRC runs.
```{r}
ESS
```
ESS should be evaluated just as you would gauge true sample-size in more normal situations - is 7 sufficient sample size? Is 70? Is 700?
## Visually comparing Prior and Posterior Parameter Distributions Across Runs
This function plots priors versus their posteriors - this will be useful for runs with bad prior on BM.
Note that the following code will skip parameters whose posterior distributions are highly discontinuous, suggesting complex multimodal distributions that are not best considered via smoothed density kernals.
```{r fig.height=2.5, fig.width=3, echo = FALSE}
for(i in 1:length(analysisOutput)){
nTree <- length(analysisOutput[[i]])
for(j in nTree){
nTrait <- length(analysisOutput[[i]][[j]])
for (k in nTrait){
nRunsFound <- length(analysisOutput[[i]][[j]][[k]])
for(l in 1:nRunsFound){
analysisFound <- analysisOutput[[i]][[j]][[k]][[l]]
#
name_analysisFound <- analysisFound$input.data["jobName",]
#
print(name_analysisFound)
print(paste0("Run ",l))
#
whichNonFixedPriors <- which(sapply(
analysisFound$priorList,
function(x) x$fun != "fixed"
))
#
nPar <- length(whichNonFixedPriors)
#
for(m in 1:nPar){
# first need to get prior and posterior kernals
whichPrior <- whichNonFixedPriors[m]
# parameter name
parName <- names(analysisFound$priorList)[[whichPrior]]
priorKernal<-suppressMessages(getUnivariatePriorCurve(
priorFn=analysisFound$priorList[[whichPrior]]$fun,
priorVariables=analysisFound$priorList[[whichPrior]]$params,
nPoints=100000,
from=NULL,
to=NULL,
alpha=0.95
))
postKernal<-suppressMessages(getUnivariatePosteriorCurve(
acceptedValues=analysisFound$particleDataFrame[ , 6 + m],
from=NULL,
to=NULL,
alpha=0.95
))
suppressMessages(
plotUnivariatePosteriorVsPrior(
posteriorCurve = postKernal,
priorCurve = priorKernal,
label = parName
)
)
}
}
}
}
}
```
Because we did multiple runs, the most useful way to look at these plots is compare parameter estimates from different runs and see if there is convergence. In this case, we can see runs with poor ESS do not look like they have converged well on the same suite of parameter estimates.
The function `highestPostDens` returns for the weighted mean, standard deviation, upper and lower highest posterior density (HPD) for each free parameter in posterior. This probably isn't very useful when we can plot the distributions and compare them, like above.
```{r eval = FALSE}
highestPostDens(results$particleDataFrame, percent=0.95, returnData=FALSE)
```
#########################
## plotABC_3D
This function plots posterior density distribution for each generation in a three-dimensional plot window. Unfortunately, due to `gpclib` not being available on Windows machines, it isn't available for the author of this document at this very moment.
```{r eval = FALSE, echo=FALSE}
for(i in 1:length(analysisOutput)){
nTree <- length(analysisOutput[[i]])
for(j in nTree){
nTrait <- length(analysisOutput[[i]][[j]])
for (k in nTrait){
nRunsFound <- length(analysisOutput[[i]][[j]][[k]])
for(l in 1:nRunsFound){
analysisFound <- analysisOutput[[i]][[j]][[k]][[l]]
#
print(analysisFound$input.data["jobName",])
#
whichNonFixedPriors <- which(sapply(
analysisFound$priorList,
function(x) x$fun != "fixed"
))
#
nPar <- length(whichNonFixedPriors)
#
for(m in 1:nPar){
plotABC_3D(
particleDataFrame = results[[1]]$particleDataFrame,
parameter = 6 + m,
show.particles = "none",
plot.parent = FALSE,
realParam = FALSE,
realParamValues = NA
)
}
}
}
}
}
```
##########################
# Methods for comparing Results of Analyses Based on Simulated Data To True Values
## plotPosteriors
For each free parameter in the posterior, a plot is made of the distribution of values estimate in the last generation. This can also be used to visually compare against true (generating) parameter values in a simulation.
```{r eval = FALSE}
plotPosteriors(particleDataFrame=resultsBM$particleDataFrame,
priorsMat=resultsBM$PriorMatrix)
```
## testMultivarOutlierHDR
This tests if an 'outlier' (some sample) is within a multivariate cloud of particles at some alpha
Very useful for testing if the generating parameters are within the particles for a simulation for dependant analyses - not so useful for indep analyses though!
```{r eval = FALSE}
particleMatrix <- NA
generatingParams <- NA
testMultivarOutlierHDR(
dataMatrix = particleMatrix,
outlier = generatingParams,
alpha = 0.8,
pca = TRUE
)
```
#########################
## NOTES
### Notes from conversation with <NAME> (05-09-18)
So I'm doing approximate bayesian computation and the question is, what do I want to show to the reader
I want to show posterior parameter estimates from real data, and show that they are very different from parameter estimates made under other models, or under the same model but with simulated data, for scenarios with a small number of models.
To show this, I want to make the same series of posterior predictive checks and demonstrate how your prefered model better recapitulates the data it was fit to.
#### ECDF
ECDF is the empirical cumulative distribution function, also known as the ranked order accumulation curve, available as the function `ecdf` in R.
ECDF is a cool way of summarizing the entire dataset graphically.
Basic question: How well does simulations under a fit model reproduce ecdf or the density of the original data? If your model does a better job of doing that, then it is straight up a better model. It also goes beyond parameter estimates and towards the model describing the data
Bayesian analysis wants to describe more than just the expected value. it is greedy and wants to describe the whole posterior. The posterior predictive distribution describes all data sets that are consistent with the model, given the original input information. If the PPD doesn't look like the empirical data, then the model is not describing your data.
### More notes, from 06-21-18
The general sketch is (a) sample particles from the posterior, simulate under this set of parameters N times, and compare the original ECDF for each parameter to the simulated.
A different other idea:
a) First, draw parameter estimates from some posterior particle, simulate under those parameters
b) Then test if 'true' generating parameters are actually within the 95% HDF of the simulated posterior
This approach deals with how we don't really understand how adequate the models are for giving unbiased estimates of parameters.
One could imagine writing a function that just takes arguments: tree, params, etc. and returns results for a particular comparison between two models (possible function names `checkAdequacy` or `sixAnalysesTwoModels`). Basic idea would be you have a dataset as input, two models of interest, and you would then follow this up with six corresponding analyses.
a) Fit model A and model B to real data
b) Simulate under model A and model B fitted parameters from the posterior
c) Then fit both model A and B to both sets of simulated data.
In this case, model A would generally be some simple 'null' model that we wish to compare against, such as BM.
<file_sep>/old/simulations_body_script_10-10-18.R
#############################################################################
# analyses
#############################################
# runLabel = An_Emp-DispBound
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
intrinsicModel<-
extrinsicModel<-
#############################################
# runLabel = An_Emp-Bound
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = An_SimDispBound-DispBound
# treeSet = empirical-anolis_tree
# simTrait.Intrinsic = An_Emp-DispBound
# simTrait.Extrinsic = An_Emp-DispBound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = An_SimBound-DispBound
# treeSet = empirical-anolis_tree
# simTrait.Intrinsic = An_Emp-Bound
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = An_Emp-BrownMotion
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = An_Emp-Disp
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
#############################################
# runLabel = An_Emp-TimeReg
# treeSet = empirical-anolis_tree
# empiricalTraitData = Anolis_Size_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = Aq_Emp-3Opt
# treeSet = empirical-Aquilegia_tree
# empiricalTraitData = Aquilegia_Nectar_Spur_Data
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = Aq_Emp-BrownMotion
# treeSet = empirical-Aquilegia_tree
# empiricalTraitData = Aquilegia_Nectar_Spur_Data
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
#############################################
# runLabel = Aq_Sim3Opt-3Opt
# treeSet = empirical-Aquilegia_tree
# simTrait.Intrinsic = Aq_Emp-3Opt
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = Aq_SimBM-3Opt
# treeSet = empirical-Aquilegia_tree
# simTrait.Intrinsic = Aq_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-BM
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBMpriorBiased
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Null
# prior = rexp_with_mean_*not*_at_true_sigmasq
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-Disp
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDisp-Disp
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Disp
# simTrait.Extrinsic = An_Emp-Disp
# doRun.Intrinsic = Pure_BM
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBound-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound
# simTrait.Extrinsic = Null
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundNear-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundByStartingState
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundMod-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundByMinValue
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBoundFar-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-Bound_BoundOneRangeAway
# simTrait.Extrinsic = An_Emp-Bound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimDispBound-DispBound
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-DispBound
# simTrait.Extrinsic = An_Emp-DispBound
# doRun.Intrinsic = BM_w/_LowerBound
# doRun.Extrinsic = Displacement
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-TimeReg
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Time-AutoRegressive_Model
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimTimeReg-TimeReg
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = An_Emp-TimeReg
# simTrait.Extrinsic = Null
# doRun.Intrinsic = Time-AutoRegressive_Model
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_SimBM-3Opt
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = Aq_Emp-BrownMotion
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
#############################################
# runLabel = Ideal_Sim3Opt-3Opt
# treeSet = Ideal-Simulated
# simTrait.Intrinsic = Aq_Emp-3Opt
# simTrait.Extrinsic = Null
# doRun.Intrinsic = 3-Optima
# doRun.Extrinsic = Null
# prior = standard_(uniform)
# idealTreeSets = c("Ideal-Balanced", "Ideal-Pectinate", "Ideal-Star")
# nTipSets = c(8, 16, 64)
# nSimTrait = 10
<file_sep>/README.md
# treevo_paper
Simulations and other analyses for treevo paper
<file_sep>/analyses_cluster/Run_An_Emp_DispBound_2019-07-14.sh
#!/bin/bash
Rscript Run_An_Emp_DispBound_2019-07-14.R
<file_sep>/analyses_cluster_fast/Run_An_Emp_Disp_2019-07-31.sh
#!/bin/bash
Rscript Run_An_Emp_Disp_2019-07-31.R
<file_sep>/analyses_cluster/Run_An_Emp_DispBound.R
library(ape)
library(TreEvo)
load("Data_An_Emp_DispBound.rda")
# get package versions
if(packageVersion("TreEvo") < "0.21.0"){
stop("Update TreEvo first!")
}
message(paste0(
"TreEvo Version Used: ",
packageVersion("TreEvo")
))
message(paste0(
"ape Version Used: ",
packageVersion("ape")
))
result <- doRunAnalysis(
treeList = analysisSetup$treeList,
traitDataList = analysisSetup$traitDataList,
runLabel = analysisSetup$runLabel,
nDoRun = analysisSetup$nDoRun,
intrinsicFunctionToFit = analysisSetup$intrinsicFunctionToFit,
extrinsicFunctionToFit = analysisSetup$extrinsicFunctionToFit,
intrinsicArgList = analysisSetup$intrinsicArgList,
extrinsicArgList = analysisSetup$extrinsicArgList,
#
# presets
generation.time = generation.time,
multicore = multicore,
coreLimit = coreLimit,
nRuns = nRuns,
nStepsPRC = nStepsPRC,
numParticles = numParticles,
nInitialSimsPerParam = nInitialSimsPerParam,
nInitialSims = nInitialSims,
saveData = saveData,
verboseParticles = verboseParticles
)
save(result, file="Results_An_Emp_DispBound.rda")
<file_sep>/trials and tests/aquilegia_models_test_10-10-18.R
# aquilegia_models_test_10-10-18.R
library(TreEvo)
source("D:\\dave\\workspace\\treevo_paper\\analyses\\aquilegia_models_09-20-18.R")
# simulate n time-steps, repeat many times, plot results
repeatSimSteps<-function(params,trait=0,nSteps,fun){
for(i in 1:nSteps){
# add to original trait value to get new trait value
trait<-trait+fun(
params=params, states=trait, timefrompresent=NA)
}
trait
}
set.seed(1)
#########################################################################
# 3 bounds, 3 optima
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:6] is the max boundary, for each of the three regimes
# params[7:9] describes theta (optima) values for each of the three regimes
# 3 bounds, 3 optima with weak-ish attraction
params<-c(
sigma=0.1,
alpha=0.3,
rho=1,
maxbounds=c(20,40,60),
theta=c(10,30,50)
)
multiOptima3IntrinsicMaxBoudary3(params=params, states=0, timefrompresent=NA)
repSim<-replicate(300,
repeatSimSteps(params,trait = 0, nSteps = 100,
fun = multiOptima3IntrinsicMaxBoudary3
)
)
hist(repSim,main="Simulated Trait Values",breaks=20)
# same model above, with more switching between optima
params<-c(
sigma=0.1,
alpha=0.5,
rho=0.3,
maxbounds=c(20,40,60),
theta=c(10,30,50)
)
multiOptima3IntrinsicMaxBoudary3(params=params, states=0, timefrompresent=NA)
repSim<-replicate(300,
repeatSimSteps(params,trait = 0, nSteps = 100,
fun = multiOptima3IntrinsicMaxBoudary3
)
)
hist(repSim,main="Simulated Trait Values",breaks=20)
##########################################################################################
# 2 bounds, 3 optima
# params[1] is dispersion (sigma)
# params[2] is alpha (strength of attraction to an optima)
# params[3] is rho, an exponent scaling the weighting of distance to optima
# this parameter will control switching optima
# params[4:5] is the max boundary, for the two lower regimes regimes
# params[6:8] describes theta (optima) values for each of the three regimes
# 2 bounds, 3 optima with strong-ish attraction
params<-c(
sigma=0.1,
alpha=0.3,
rho=1,
maxbounds=c(20,40),
theta=c(10,30,50)
)
multiOptima3IntrinsicMaxBoudary2(params=params, states=0, timefrompresent=NA)
repSim<-replicate(300,
repeatSimSteps(params,trait = 0, nSteps = 100,
fun = multiOptima3IntrinsicMaxBoudary2
)
)
hist(repSim,main="Simulated Trait Values",breaks=20)
# same model above, with more switching between weak optima, high diffusion
params<-c(
sigma=0.7,
alpha=0.1,
rho=0.5,
maxbounds=c(20,40),
theta=c(10,30,50)
)
multiOptima3IntrinsicMaxBoudary2(params=params, states=0, timefrompresent=NA)
repSim<-replicate(300,
repeatSimSteps(params,trait = 0, nSteps = 100,
fun = multiOptima3IntrinsicMaxBoudary2
)
)
hist(repSim,main="Simulated Trait Values",breaks=20)
<file_sep>/datasets/autoregressiveWanderingUnknownOptimumIntrinsic.R
#' @rdname intrinsicModels
#' @export
autoregressiveWanderingUnknownOptimumIntrinsic <- function(params, states, timefrompresent) {
# 3) Time - autoregressive model with optimum based on a
# factor that changes through time (like O2 concentration)
# and presumably the tracked env factor will be analyzed many times over
#
# Parameters of the regression:
# a single variable function to convert O2 to optimal gill size or whatever
# strength of pull
# BM wiggle
#
# 10-20-18
# can't do it with an unknown optimum because the model function doesn't talk to other
# instances of the model, so how would they know what the optimum is at a particular time-point
# ->>>>>>>>>>>>>>>>
# We need a dataset with an environmental dataset
# that we can treat as an optimum being tracked
# but what data? this will require another argument.
#
# 10-23-18
# Previous discussions of what models we wanted to test included discussion a "time / autoregressive" model
# as far as I can tell from our previous conversations, we had meant for this to be an autoregressive model where the optimum follows some known environmental predictor through time
# (note that the way treevo is designed, it is impossible as far as I can tell to do an arbitrary environmental predictor)
# (i.e. an unknown autoregressive optima that varies over time)
# a model that follows a known environmental predictor could be coded
# it would require some small but not minor changes to the treevo code base, particularly that the variable would need to be an extra argument to all intrinsic models
# and the way time is passed to the intrinsic models would need to be in user-defined time units - i.e. the same units as the tree / matrix of enviromental variable itself
# Previous discussions of what models we wanted to test included discussion a "time / autoregressive" model
# as far as I can tell from our previous conversations, we had meant for this to be an autoregressive model where the optimum follows some known environmental predictor through time
# (note that the way treevo is designed, it is impossible as far as I can tell to do an arbitrary environmental predictor)
# (i.e. an unknown autoregressive optima that varies over time)
# a model that follows a known environmental predictor could be coded, but it would require some small but not minor changes to the treevo code base, particularly that the variable would need to be an extra argument to all intrinsic models
# and the way time is passed to the intrinsic models would need to be in user-defined time units - i.e. the same units as the tree / matrix of enviromental variable itself
#
#
# Brian 10-23-18
#
# environVariable DEFINED within the function - then doesn't need to be input carried through doRun
# and instead is in the function by default
# okay, that should work
# need to get the environmental variable
environVariable[timefrompresent]
#a discrete time OU, same sd, mean, and attraction for all chars
#params[1] is sd (sigma), params[2] is attractor (ie. character mean), params[3] is attraction (ie. alpha)
sd <- params[1]
attractor <- params[2]
attraction <- params[3] #in this model, this should be between zero and one
#subtract current states because we want displacement
newdisplacement <- rpgm::rpgm.rnorm(
n = length(states),
mean = (attractor-states)*attraction,
sd = sd)
return(newdisplacement)
}
<file_sep>/analyses_cluster_fast/Run_An_Emp_DispBound_2019-07-31.sh
#!/bin/bash
Rscript Run_An_Emp_DispBound_2019-07-31.R
<file_sep>/analyses_cluster/Run_An_Emp_DispBound.sh
#!/bin/bash
Rscript Run_An_Emp_DispBound.R
<file_sep>/analyses_cluster/Run_An_Emp_BrownMotion_2019-07-14.sh
#!/bin/bash
Rscript Run_An_Emp_BrownMotion_2019-07-14.R
| 5e587a35a098e5bee0462f22c340c6d512e4b30a | [
"Markdown",
"R",
"RMarkdown",
"Shell"
] | 40 | Shell | bomeara/treevo_paper | 56a0e523651218b2965952718a98895b375b6e39 | 7bf6ced4e7463e3ea74c9870006996d18a4d7799 |
refs/heads/master | <repo_name>quirozariel21/convertJsonToJavaObject<file_sep>/src/coderoad/cr24/model/ListInspector.java
package coderoad.cr24.model;
import java.util.List;
import org.codehaus.jackson.annotate.JsonProperty;
public class ListInspector {
@JsonProperty("inspector")
private List<Inspector> listInspector;
public List<Inspector> getListInspector() {
return listInspector;
}
public void setListInspector(List<Inspector> listInspector) {
this.listInspector = listInspector;
}
}
<file_sep>/src/coderoad/cr24/seleniumConnector/OverrideClass.java
package coderoad.cr24.seleniumConnector;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.events.WebDriverEventListener;
public class OverrideClass implements WebDriverEventListener{
@Override
public void afterChangeValueOf(WebElement arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER afterChangeValueOf");
}
@Override
public void afterClickOn(WebElement arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER afterClickOn");
}
@Override
public void afterFindBy(By arg0, WebElement arg1, WebDriver arg2) {
// TODO Auto-generated method stub
//System.out.println("LISTENER afterFindBy");
}
@Override
public void afterNavigateBack(WebDriver arg0) {
// TODO Auto-generated method stub
//System.out.println("LISTENER afterNavigateBack");
}
@Override
public void afterNavigateForward(WebDriver arg0) {
// TODO Auto-generated method stub
///System.out.println("LISTENER afterNavigateForward");
}
@Override
public void afterNavigateTo(String arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER afterNavigateTo");
}
@Override
public void afterScript(String arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER DESPUES DE afterScript");
}
@Override
public void beforeChangeValueOf(WebElement arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES De beforeChangeValueOf");
}
@Override
public void beforeClickOn(WebElement arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeClickOn");
}
@Override
public void beforeFindBy(By arg0, WebElement arg1, WebDriver arg2) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeFindBy");
}
@Override
public void beforeNavigateBack(WebDriver arg0) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeNavigateBack");
}
@Override
public void beforeNavigateForward(WebDriver arg0) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeNavigateForward");
}
@Override
public void beforeNavigateTo(String arg0, WebDriver arg1) {
//// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeNavigateTo");
}
@Override
public void beforeScript(String arg0, WebDriver arg1) {
// TODO Auto-generated method stub
//System.out.println("LISTENER ANTES DE beforeScript");
}
@Override
public void onException(Throwable arg0, WebDriver arg1) {
System.out.println("LISTENER ERRROR onException");
System.out.println("LISTENER ERRROR onException"+arg0.getMessage());
System.out.println("LISTENER ERRROR onException"+arg1.getCurrentUrl());
//String newWindow= arg1.getWindowHandle();
//System.out.println("newWindow "+newWindow);
//arg1.switchTo().window(newWindow);
//arg1.quit();
//arg1.close();
}
}
<file_sep>/src/coderoad/cr24/selenium/ConvertJsonToJavaObject.java
package coderoad.cr24.selenium;
import java.io.File;
import java.io.IOException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.ObjectMapper;
import coderoad.cr24.exception.JsonException;
import coderoad.cr24.model.JsonSelenium;
import static coderoad.cr24.utils.UtilsMethods.*;
public class ConvertJsonToJavaObject {
private String filePath;
public ConvertJsonToJavaObject(){
}
public ConvertJsonToJavaObject(String filePath){
this.filePath=filePath;
}
public JsonSelenium convertJsonToJavaObject(String filePath){
ObjectMapper mapper=new ObjectMapper();
JsonSelenium jsonSelenium=null;
try {
jsonSelenium = mapper.readValue(new File(PATH_FILE_JSON), JsonSelenium.class);
//jsonSelenium = mapper.readValue(filePath,JsonSelenium.class);
String jsonString=mapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonSelenium);
checkValidJson(jsonString);
System.out.println(jsonString);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JsonException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return jsonSelenium;
}
private void checkValidJson(final String json) throws JsonException{
try {
final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(json);
while (parser.nextToken() != null) {}
} catch (JsonParseException jpe) {
throw new JsonException("The contactJson contains a JSON malformed", jpe);
} catch (IOException ioe) { //
throw new JsonException("The contactJson cannot be readed", ioe);
}
}
}
<file_sep>/README.md
# convertJsonToJavaObject
Convierte un archivo en formato a un objeto Java.
<file_sep>/src/selenium/org/ClassTest.java
package selenium.org;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxProfile;
//import com.gargoylesoftware.htmlunit.WebConsole;
public class ClassTest {
public static void main(String []args) throws InterruptedException, IOException{
FirefoxProfile profile = new FirefoxProfile();
profile.setPreference("browser.download.dir", "C:\\Users\\aquiroz\\Documents");
profile.setPreference("browser.download.folderList", 2);
profile.setPreference("browser.helperApps.alwaysAsk.force", false);
profile.setPreference("browser.download.manager.showWhenStarting",false);
profile.setPreference("browser.helperApps.neverAsk.saveToDisk","application/zip, application/x-zip, application/x-zip-compressed, application/download, application/octet-stream");
WebDriver driver= new FirefoxDriver(profile);
driver.get("http://www.wikipedia.org");
driver.quit();
/*
try{
// Create file
FileWriter fstream = new FileWriter("C:\\Users\\aquiroz\\Documents\\banco.html");
BufferedWriter out = new BufferedWriter(fstream);
out.write(driver.getPageSource());
//Close the output stream
out.close();
}catch (Exception e){//Catch exception if any
System.err.println("Error: " + e.getMessage());
}
String page=driver.getPageSource();
File file=new File("C:\\Users\\aquiroz\\Documents\\bancoafomericaSelenium.html");
FileWriter fw=new FileWriter(file);
fw.write(page);
fw.close();
*/
/*
driver.get("http://www.wikipedia.org");
WebElement link;
link=driver.findElement(By.linkText("English"));
link.click();
File srcFile=((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
org.apache.commons.io.FileUtils.copyFile(srcFile, new File("C:\\Users\\aquiroz\\Pictures\\screenShot1.png"));
Thread.sleep(5000);
WebElement searchBox;
searchBox=driver.findElement(By.id("searchInput"));
searchBox.sendKeys("Software");
searchBox.submit();
File srcFile2=((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
org.apache.commons.io.FileUtils.copyFile(srcFile2, new File("C:\\Users\\aquiroz\\Pictures\\screenShot2.png"));
Thread.sleep(5000);
*/
driver.quit();
}
}
| fd15ed5f0bec7a1d378845db5a54e8fa69da6cab | [
"Markdown",
"Java"
] | 5 | Java | quirozariel21/convertJsonToJavaObject | 2d06b1de45509b8a7f053de107db9f57c16d4b1c | 61f2c717235d5a032377c31a2d397c744d64b633 |
refs/heads/master | <file_sep>function generateDashboard(data,geom){
var map = new lg.map('#map').geojson(geom).nameAttr('Mun_Name').joinAttr('Mun_Code').zoom(7.6).center([17.5,121.3]);
var pred_abs_category = new lg.column("#prediction+abs_category").label("Priority Index Categories (based on #)").axisLabels(false);
var pred_abs_damages = new lg.column("#prediction+abs_damage").label("Damaged Houses (#)").axisLabels(false);
var pred_perc_damages = new lg.column("#prediction+perc_damage").label("Damaged Houses (% of HHs)").axisLabels(false);
var pred_cat_weightedsum = new lg.column("#prediction+category_new").label("Priority Index (based on %)").axisLabels(false);
var pred_abs_weightedsum = new lg.column("#prediction+weightedsum").label("Weighted sum (#)").axisLabels(false)
.colorAccessor(function(d){ if (d>2000) {return 4;} else if (d>1000) {return 3;} else if (d>=500) {return 2;} else if (d>=200) {return 1;} else if (d<200) {return 0;}});
var pred_perc_weightedsum = new lg.column("#prediction+perc_damage_new").label("Weighted sum (% of HHs)").axisLabels(false)
.colorAccessor(function(d){ if (d>0.2) {return 4;} else if (d>0.1) {return 3;} else if (d>=0.05) {return 2;} else if (d>=0.02) {return 1;} else if (d<0.02) {return 0;}});
var actual_abs_damages = new lg.column("#actual+abs_damage").label("Damaged Houses (#)");
var actual_perc_damages = new lg.column("#actual+perc_damage").label("Damaged Houses (% of HHs)").axisLabels(false);
var actual_abs_weightedsum = new lg.column("#actual+weightedsum").label("Weighted Sum (#)").axisLabels(false)
.colorAccessor(function(d){ if (d>2000) {return 4;} else if (d>1000) {return 3;} else if (d>=500) {return 2;} else if (d>=200) {return 1;} else if (d<200) {return 0;}});
var actual_perc_weightedsum = new lg.column("#actual+perc_damage_new").label("Weighted Sum (% of HHs)").axisLabels(false)
.colorAccessor(function(d){ if (d>0.2) {return 4;} else if (d>0.1) {return 3;} else if (d>=0.05) {return 2;} else if (d>=0.02) {return 1;} else if (d<0.02) {return 0;}});
var diff_perc = new lg.column("#diff+perc").label("%-point difference").axisLabels(false)
//.colorAccessor(function(d){ if (d>0.2) {return 4;} else if (d>0.05) {return 3;} else if (d>=-0.05) {return 2;} else if (d>=-0.2) {return 1;} else if (d<-0.2) {return 0;}})
.colors(['#d7191c','#fdae61','#ffffbf','#DA70D6','#8B008B']);
var diff_gap = new lg.column("#diff+gap").label("% difference").axisLabels(false)
.colorAccessor(function(d){ if (d>.5) {return 4;} else if (d>0.2) {return 3;} else if (d>=-0.2) {return 2;} else if (d>=-0.5) {return 1;} else if (d<-0.5) {return 0;}})
.colors(['#d7191c','#fdae61','#ffffbf','#DA70D6','#8B008B']);
var diff_perc_new = new lg.column("#diff+perc_new").label("%-point difference").axisLabels(false)
.colorAccessor(function(d){ if (d>0.10) {return 4;} else if (d>0.05) {return 3;} else if (d>=-0.05) {return 2;} else if (d>=-0.10) {return 1;} else if (d<-0.10) {return 0;}})
.colors(['#d7191c','#fdae61','#ffffbf','#DA70D6','#8B008B']);
var diff_gap_new = new lg.column("#diff+gap_new").label("% difference").axisLabels(false)
.colorAccessor(function(d){ if (d>.5) {return 4;} else if (d>0.2) {return 3;} else if (d>=-0.2) {return 2;} else if (d>=-0.5) {return 1;} else if (d<-0.5) {return 0;}})
.colors(['#d7191c','#fdae61','#ffffbf','#DA70D6','#8B008B']);
lg.colors(["#ffffb2","#fecc5c","#fd8d3c","#f03b20","#bd0026"]);
var group1 = 3;
var group2 = 2;
var group3 = 2;
var group4 = 0;
var grid1 = new lg.grid('#grid1')
.data(data)
.width($('#grid1').width())
.height(5000)
.nameAttr('#adm3+name')
.joinAttr('#adm3+code')
.hWhiteSpace(4)
.vWhiteSpace(4)
.margins({top: 250, right: 20, bottom: 30, left: 200})
.columns([pred_cat_weightedsum,pred_abs_weightedsum,pred_perc_weightedsum,actual_abs_weightedsum,actual_perc_weightedsum,diff_perc_new])
;
$('#run1').on('click',function(){
lg._gridRegister = [];
$('#run2').css({'background-color': 'grey' });
$('#run1').css({'background-color': '#BF002D' });
$('#map-container').html('<div id="map"></div>');
$('#grid1').html('');
grid1 = new lg.grid('#grid1')
.data(data)
.width($('#grid1').width())
.height(5000)
.nameAttr('#adm3+name')
.joinAttr('#adm3+code')
.hWhiteSpace(4)
.vWhiteSpace(4)
.margins({top: 250, right: 20, bottom: 30, left: 200})
.columns([pred_abs_category,pred_abs_damages,pred_perc_damages,actual_abs_damages,actual_perc_damages,diff_perc])
;
lg.init();
initlayout(data,diff_perc,'#diff+perc');
$("#map").width($("#map").width());
});
$('#run2').on('click',function(){
lg._gridRegister = [];
$('#run1').css({'background-color': 'grey' });
$('#run2').css({'background-color': '#BF002D' });
$('#map-container').html('<div id="map"></div>');
$('#grid1').html('');
grid1 = new lg.grid('#grid1')
.data(data)
.width($('#grid1').width())
.height(5000)
.nameAttr('#adm3+name')
.joinAttr('#adm3+code')
.hWhiteSpace(4)
.vWhiteSpace(4)
.margins({top: 250, right: 20, bottom: 30, left: 200})
.columns([pred_cat_weightedsum,pred_abs_weightedsum,pred_perc_weightedsum,actual_abs_weightedsum,actual_perc_weightedsum,diff_perc_new])
;
lg.init();
initlayout(data,diff_perc_new,'#diff+perc_new');
$("#map").width($("#map").width());
});
lg.init();
initlayout(data,diff_perc_new,'#diff+perc_new');
$("#map").width($("#map").width());
function initlayout(data,sort_indicator1,sort_indicator2){
//sort table and color map by priority after loading dashboard
var newdata = [];
data.forEach(function(d){
newdata.push({'key':d['#adm3+code'],'value':d[sort_indicator2]});
});
map.colorMap(newdata,sort_indicator1);
grid1._update(data,grid1.columns(),sort_indicator1,'#adm3+name');
//////////////////////////////////////////
//Create the category lines above the grid
//////////////////////////////////////////
var g = d3.select('#grid1').select('svg').select('g').append('g');
//Add the number of variables per group
var offset_hor = 0;
var offset_vert = -30;
//horizontal line 1
g.append('line').attr("x1", 0+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth)*group1+(lg._gridRegister[0]._hWhiteSpace)*(group1-1)+offset_hor)
.attr("y2", offset_vert)
.attr("stroke-width", 1)
.attr("stroke", "black");
//horizontal line 2
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*group1+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth)*(group1+group2)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2-1)+offset_hor)
.attr("y2", offset_vert)
.attr("stroke-width", 1)
.attr("stroke", "black");
//horizontal line 3
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth)*(group1+group2+group3)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3-1)+offset_hor)
.attr("y2", offset_vert)
.attr("stroke-width", 1)
.attr("stroke", "black");
/* //horizontal line 4
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3+group4))
.attr("y2", offset_vert)
.attr("stroke-width", 1)
.attr("stroke", "black"); */
//vertical line 1.1
g.append('line').attr("x1", 0+offset_hor)
.attr("y1", offset_vert)
.attr("x2", 0+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 1.2
g.append('line').attr("x1", lg._gridRegister[0]._properties.boxWidth*(group1)+(lg._gridRegister[0]._hWhiteSpace)*(group1-1)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", lg._gridRegister[0]._properties.boxWidth*(group1)+(lg._gridRegister[0]._hWhiteSpace)*(group1-1)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 2.1
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 2.2
g.append('line').attr("x1", lg._gridRegister[0]._properties.boxWidth*(group1+group2)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2-1)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", lg._gridRegister[0]._properties.boxWidth*(group1+group2)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2-1)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 3.1
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 3.2
g.append('line').attr("x1", lg._gridRegister[0]._properties.boxWidth*(group1+group2+group3)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3-1)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", lg._gridRegister[0]._properties.boxWidth*(group1+group2+group3)+(lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3-1)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
/* //vertical line 4.1
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3)+offset_hor)
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3)+offset_hor)
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black");
//vertical line 4.2
g.append('line').attr("x1", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3+group4))
.attr("y1", offset_vert)
.attr("x2", (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3+group4))
.attr("y2", (offset_vert-5))
.attr("stroke-width", 1)
.attr("stroke", "black"); */
//horizontal text 1
g.append('text').attr('x', lg._gridRegister[0]._properties.boxWidth*(group1/2)+offset_hor)
.attr('y', (offset_vert+15))
.text('Predictions')
.style("text-anchor", "middle")
.attr("font-size",12);
//horizontal text 2
g.append('text').attr('x', (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2/2)+offset_hor)
.attr('y', (offset_vert+15))
.text('Actuals')
.style("text-anchor", "middle")
.attr("font-size",12);
//horizontal text 3
g.append('text').attr('x', (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3/2)+offset_hor)
.attr('y', (offset_vert+15))
.text('Difference')
.style("text-anchor", "middle")
.attr("font-size",12);
/* //horizontal text 4
g.append('text').attr('x', (lg._gridRegister[0]._properties.boxWidth+lg._gridRegister[0]._hWhiteSpace)*(group1+group2+group3+group4/2)+offset_hor)
.attr('y', (offset_vert+15))
.text('Demographics')
.style("text-anchor", "middle")
.attr("font-size",12); */
}
}
function hxlProxyToJSON(input,headers){
var output = [];
var keys=[]
input.forEach(function(e,i){
if(i==0){
e.forEach(function(e2,i2){
var parts = e2.split('+');
var key = parts[0]
if(parts.length>1){
var atts = parts.splice(1,parts.length);
atts.sort();
atts.forEach(function(att){
key +='+'+att
});
}
keys.push(key);
});
} else {
var row = {};
e.forEach(function(e2,i2){
row[keys[i2]] = e2;
});
output.push(row);
}
});
return output;
}
function stickydiv(){
var window_top = $(window).scrollTop();
var div_top = $('#sticky-anchor').offset().top;
if (window_top > div_top){
$('#map-container').addClass('sticky');
}
else{
$('#map-container').removeClass('sticky');
}
};
$(window).scroll(function(){
stickydiv();
});
//load data
var dataCall = $.ajax({
type: 'GET',
url: 'data/gap_data.json', //https://proxy.hxlstandard.org/data.json?merge-keys01=%23adm2%2Bcode&strip-headers=on&filter01=merge&merge-url01=https%3A//docs.google.com/spreadsheets/d/1klRixK82iRk1JnDOpAqKrry4VQiFcTGrfFZWr9ih-Z8/pub%3Fgid%3D777123392%26single%3Dtrue%26output%3Dcsv&url=https%3A//docs.google.com/spreadsheets/d/1OlxhQ_ejRKNvohbnfJ7yJPKD6U6pXcPPfsFnwBbP2nc/pub%3Fgid%3D0%26single%3Dtrue%26output%3Dcsv&filter02=select&select-query02-01=%23indicator%2Bcategory%21%3D1&merge-tags01=%23affected%2Bdeaths%2C%23affected%2Bmissing%2C%23affected%2Bwounded%2C%23affected%2Binshelter%2C%23affected%2Bbuildings%2Bdestroyed%2C%23affected%2Bbuildings%2Bpartially%2C%23affected%2Bschools',
dataType: 'json',
});
//load geometry
var geomCall = $.ajax({
type: 'GET',
url: 'data/geom.json',
dataType: 'json',
});
//when both ready construct dashboard
$.when(dataCall, geomCall).then(function(dataArgs,geomArgs){
geom = topojson.feature(geomArgs[0],geomArgs[0].objects.geom);
overview = hxlProxyToJSON(dataArgs[0],false);
generateDashboard(overview,geom);
});
| 92a52a4b723cddd10d48831a83651406fc811364 | [
"JavaScript"
] | 1 | JavaScript | jannisvisser/Priority-Index-Haima-Accuracy | 503c8a87e5b17ffe70910f132b46efb48c695c1e | e722a14878975cd7e381edaca1dcd07119055f83 |
refs/heads/master | <file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas
def path():
'''Helper function to get data path within project.
Returns
-------
path_to_data : string
'''
from pathlib import Path
path = Path('.').resolve()
path_string = path.absolute().as_posix()
if 'src' in path_string:
path = path.parent / 'data'
elif 'data' in path_string:
pass
else:
path = path / 'data'
path_to_data = f'{path.absolute().as_posix()}/'
return path_to_data
def data(file='KumonTaskData.csv'):
'''Helper function to load data.
Returns
-------
df: csv file read in
'''
split = lambda text: [word.strip() for word in text.split(',')]
return pandas.read_csv(path()+file, converters={'Masked Words': split}).dropna()<file_sep># nyu-ds1016
Group Project for NYU-DSGA 1016, Computational Cognitive Modeling (2020 Spring Semester)
## Group Members
* Wangrui (<NAME> | wh916
* Gabriella (<NAME> | gh1408
* <NAME> | sr5388
## Topic: Neural Networks - Language
_Selected from [CCM Project Site](https://brendenlake.github.io/CCM-site/final_project_ideas.html)_
>Exploring lexical and grammatical structure in BERT or GPT2. What do powerful pre-trained models learn about lexical and grammatical structure? Explore the learned representations of a state-of-the-art language model (BERT, GPT2, etc.) in systematic ways, and discuss the learned representation in relation to how children may acquire this structure through learning.
## Approved Proposal
>Exploring connections between BERT and child language acquisition. Using Kumon center data, we plan to examine in context masked word prediction using the hugging-face pre-trained BERT model. While we do not know what specific mistakes were made by Kumon students, we know which exercises they performed well and poorly at. We plan to divide these exercises into 3 groups: 1) students do perfectly, 2) half of the students do poorly, 3) most students do poorly. We want to know if BERT performs perfectly on all exercises or, if it makes mistakes, what kinds of mistakes they are.
>
>Additionally, we would love to explore the performances of each BERT layer when performing these masked-word predictions. We are currently looking at how to analyze BERTs attention at specific hidden-layers and if there are other metrics of performance we should consider without having to retrain BERT
## Timeline
_Adpated from [CCM Site](https://brendenlake.github.io/CCM-site/#final-project)_
- [x] (26-Mar) Initial Meeting
- [x] (02-Apr) Proposal Review
- [x] (06-Apr) Proposal Submission [complete](#approved-proposal)
* The final project proposal is due Monday, April 6 (one half page written). Please submit via email to <EMAIL> with the file name lastname1-lastname2-lastname3-ccm-proposal.pdf.
* https://piazza.com/class/k5cskqm4l1d4ei?cid=87
- [x] (30-Apr) [Demo Notebook](src/demo.ipynb)
- [x] (01-May) Friday 1PM EST [Zoom meeting](https://nyu.zoom.us/j/5079167320) with [Prof Cournane](https://wp.nyu.edu/cournane/)
- [x] (12-May) Review
- [x] (13-May) Final Project Due
* The final project is due Wednesday 5/13. Please submit via email to <EMAIL> with the file name lastname1-lastname2-lastname3-ccm-final.pdf.
* https://piazza.com/class/k5cskqm4l1d4ei?cid=197
## Dataset sources:
* Kumon
## Code Dependencies:
- python3, pandas, & numpy
- [pytorch](https://github.com/pytorch/pytorch)
- [transformers](https://github.com/huggingface/transformers)
## Working Documents
* [Report](Hou-Hurtado-Roy.pdf)
* [Google Drive](https://drive.google.com/drive/folders/16DHSToewAcIkIytzBF9Lzkr-OV284a1c)
## Road Map
- [x] set up git
- [x] update proposal
- [x] lit review draft
- [x] clean dataset
- [x] bert masked token model [demo](./src/demo.ipynb)
- [x] select tasks
- [x] compare task performance
## References:
* [Transformers Quickstart](https://huggingface.co/transformers/quickstart.html)
* [<NAME> - intpret_bert upstream repo](https://ganeshjawahar.github.io/)
* [Ganesh speaking about interpret_bert](https://vimeo.com/384961703)
* [Children First Language Acquisition At Age 1-3 Years Old In Balata](http://www.iosrjournals.org/iosr-jhss/papers/Vol20-issue8/Version-5/F020855157.pdf)
* [Caregivers' Role in Child's Language Acquisition](https://dspace.univ-adrar.dz/jspui/handle/123456789/2476)
* [The Acquisition of Syntax](https://linguistics.ucla.edu/people/hyams/28%20Hyams-Orfitelli.final.pdf)
* [Studies in Child Language: An Anthropological View: A First Language: The Early Stages](https://www.researchgate.net/publication/249422499_Studies_in_Child_Language_An_Anthropological_View_A_First_Language_The_Early_Stages_Roger_Brown_Language_Acquisition_and_Communicative_Choice_Susan_Ervin-Tripp_Studies_of_Child_Language_Development_Ch)
* [What do you Learn from Context? Probing for Sentence Structure in Contextualized Word Representations](https://openreview.net/pdf?id=SJzSgnRcKX)
* [Stanford WordBank Dataset](http://wordbank.stanford.edu/analyses)
* [A Structural Probe for Finding Syntax in Word Representations](https://nlp.stanford.edu/pubs/hewitt2019structural.pdf)
<file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import load
import torch
import pandas
from transformers import BertTokenizer, BertModel, BertForMaskedLM
# OPTIONAL: if you want to have more information on what's happening under the hood, activate the logger as follows
import logging
class MaskedTokenBert:
'''Class for predicting masked tokens
'''
def __init__(self, bert='bert-base-uncased', file='KumonTaskData.csv', debug=False):
logging.basicConfig(level=logging.ERROR)
if debug:
logging.basicConfig(level=logging.INFO)
# Load pre-trained model tokenizer (vocabulary)
self.bert = bert
self.file = file
self.tokenizer = BertTokenizer.from_pretrained(self.bert)
def _load(self, file=None):
# Loads list of questions from dataset
self.file = file if file else self.file
self.df = load.data(file)
self.data = self.df.Task.values
self.size = self.data.size
def _load_line(self, line):
self.data = [line]
self.size = 1
def tokenize_data(self, text=None):
'''Tokenizes self.data
'''
# Check that proper data is loaded
try:
if self.data and not text:
pass
except:
self._load_line(text) if text else self._load(self.file)
# Tokenize input
self.t_data = [self.tokenizer.tokenize(i) for i in self.data]
# Convert token to vocabulary indices
self.t_idx = [self.tokenizer.convert_tokens_to_ids(i) for i in self.t_data]
# Define sentence A and B indices associated to 1st and 2nd sentences (see paper)
self.segment_ids = []
self.masks = []
for i in self.t_data:
length = len(i)
first_sep = i.index('[SEP]')+1
self.segment_ids.append([0]*(first_sep) + [1]*(length-first_sep))
self.masks.append([x for x, m in enumerate(i) if m == '[MASK]'])
# Convert inputs to PyTorch tensors
if torch.cuda.is_available():
# If you have a GPU, put everything on cuda
self.token_tensors = [torch.tensor([i]).to('cuda') for i in self.t_idx]
self.segment_tensors = [torch.tensor([i]).to('cuda') for i in self.segment_ids]
else:
self.token_tensors = [torch.tensor([i]) for i in self.t_idx]
self.segment_tensors = [torch.tensor([i]) for i in self.segment_ids]
def model(self):
'''Applies pre-trained model to self.data
'''
# Load pre-trained model (weights)
self.model = BertModel.from_pretrained(self.bert)
self.mask_model = BertForMaskedLM.from_pretrained(self.bert)
# Set the model in evaluation mode to deactivate the DropOut modules
# This is IMPORTANT to have reproducible results during evaluation!
self.model.eval()
self.mask_model.eval()
# If you have a GPU, put everything on cuda
if torch.cuda.is_available():
self.model.to('cuda')
self.mask_model.to('cuda')
# Check that data has been tokenized
try:
if self.t_data:
pass
except:
self.tokenize_data()
# Predict hidden states features for each layer
self.outputs = []
self.encoded_layers = []
with torch.no_grad():
# See the models docstrings for the detail of the inputs
for i in range(self.size):
self.outputs.append(self.model(input_ids=self.token_tensors[i],
token_type_ids=self.segment_tensors[i]))
# First element is the hidden state of the last Bert model layer
self.encoded_layers.append(self.outputs[i][0])
# Predict best word for each masked token
self.predictions = []
with torch.no_grad():
for i in range(self.size):
# First element is all masked predictions
self.predictions.append(self.mask_model(input_ids=self.token_tensors[i],
token_type_ids=self.segment_tensors[i])[0])
# We have encoded our input sequence in a FloatTensor of shape (batch size, sequence length, model hidden dimension)
assert tuple(self.encoded_layers[0].shape) == (1, len(self.t_idx[0]), self.model.config.hidden_size)
def _decode(self, i):
return self.tokenizer.convert_ids_to_tokens([i])[0]
def predict(self, top_n=5):
'''Predicts the top_n (default=5) candidates for each [MASK]
'''
# Check we already have predictions; else generate model
try:
if self.predictions:
pass
except:
self.model()
# sort over model predictions and store indicies, rank/weights, and predictions
self.p_idx = []
self.p_rank = []
self.p_items = []
for i in range(self.size):
idx = []
rank = []
items = []
for m in self.masks[i]:
pred_idx = torch.argsort(-self.predictions[i][0, m])[:top_n]
idx.append([x.item() for x in pred_idx])
rank.append([self.predictions[i][0,m][x] for x in pred_idx])
items.append([self._decode(x.item()) for x in pred_idx])
self.p_idx.append(idx)
self.p_rank.append(rank)
self.p_items.append(items)
return self.p_items
def score(self, show_wrong=False):
'''Returns list of bert's scores per task; show_wrong to print bad predictions
'''
def print_wrong(task, mask):
actual = truth[task][mask]
pred = preds[task][mask]
print(f'({task},{mask}: actual={actual} bert={pred[0]}')
if actual in pred:
print(f"{actual} was bert's #{pred.index(actual)+1} choice")
# Custon Kumon scoring metric
def kumon_score(num_wrong):
d = {0:100.0, 1:80.0, 2:70.0}
return d.get(num_wrong, 69.0)
truth = self.df['Masked Words']
preds = self.p_items
right = []
wrong = []
self.page_scores = {}
bert_predictions = []
current_page = None
wrong_on_page = 0
# Iterate over each task
for task in range(self.size):
# Kumon scores are grouped by page; so here we track the current page
if current_page != self.df['Workbook Page'][task]:
if current_page:
self.page_scores.update({current_page: kumon_score(wrong_on_page)})
current_page = self.df['Workbook Page'][task]
wrong_on_page = 0
wrong_on_task = 0
bert_masks = []
# Each task may have multiple tasks; track how many are wrong
for mask in range(len(truth[task])):
try:
bert_masks.append(preds[task][mask][0])
if truth[task][mask] == preds[task][mask][0]:
right.append((task,mask))
else:
wrong.append((task,mask))
wrong_on_task += 1
if show_wrong:
print_wrong(task,mask)
except:
print(f"{task},{mask} broke... moving on")
continue
# If any masks are wrong - bert failed this task
bert_predictions.append(bert_masks)
if wrong_on_task:
wrong_on_page += 1
self.page_scores.update({current_page: kumon_score(wrong_on_page)})
# Putting values back into pandas so for easy comparison/saving to file
scores = []
for task in range(self.size):
scores.append(self.page_scores[self.df['Workbook Page'][task]])
self.df.insert(3, "Bert Score", pandas.Series(scores))
self.df.insert(6, "Bert Masks", pandas.Series(bert_predictions))
print(f'Bert got {len(right)}/{len(right)+len(wrong)} correct.')
return scores
| 60cd476b7ea0f708fda35622d6d8eabcb49a42bc | [
"Markdown",
"Python"
] | 3 | Python | sroy2/ccm-project | 77032e0d26fd210103ec206392dc6a23ef2f21f8 | 032752a62b2076821502ca765723d2eb318a7eb3 |
refs/heads/master | <file_sep># README
This is a "toy" app. There is absolutely zero security thinking in place.
## Screenshot

## Introduction
The following instructions should be done in top-down order:
- Configuration
- Build
- Drop DB (only if 'restarting')
- Create DB
- Launch
- Browse
- Add ASINs
## TL;DR (Quick) launch
- Run `./launch.sh`, follow instructions.
- Requires "Configuration"
## Configuration
Amazon Credentials
You must supply your own AMAZON_ACCESS_KEY_ID, AMAZON_ASSOCIATE_TAG and
AMAZON_SECRET_ACCESS_KEY into a file named `web-env-variables.env`. You may
copy the template `web-env-variables-template.env`, and replace all the
`placeholder` values.
Start at:
https://affiliate-program.amazon.com/home/account
Create API credentials at:
https://affiliate-program.amazon.com/assoc_credentials/home
Detailed official Documentation is at:
https://docs.aws.amazon.com/AWSECommerceService/latest/DG/Welcome.html
## Build the system
`docker-compose build`
## Drop the database
Stop the system:
`docker-compose down`
Drop the database files:
`rm -r -f ./tmp/db`
## Create the database
Launch:
`docker-compose up -d`
Wait until all services are fully initialized (~15 seconds), and then:
`docker-compose run --rm -w /myapp web rake db:create`
`docker-compose run --rm -w /myapp web rails db:migrate`
## Launch
`docker-compose up -d`
## Browse
http://localhost:3000/ is the default URL of this app.
## Web App Usage
1. Supply ASINs using the "Fetch Tool" textarea,
and submit it.
2. The table will be refreshed on success. On error,
it will not be refreshed.
## Sample ASIN strings
The first one is the baby product.
```
B002QYW8LW
B01N0X3NL5
B018LJZUGK
B079S8G3HC
B00JM5GW10
B07BBMV8MY
B01N9QVIRV
B01N1037CV
B010TQY7A8
B01MRG7T0D
B00000IGGJ
B00005JNBQ
B00006KGC0
B00006KGC2
B0001LJBTE
B00020HALU
B000001DQI
B000003BGP
B00000JWOJ
B000JKXBKC
B000MZ5DD2
B0777GLYJ5
B06XCM9LJ4
B01CB6YQ8M
B00WQEPSY6
B00A85EMVK
```
<file_sep>#!/bin/bash
if [ ! -f web-env-variables.env ]; then
echo "You must configure your own AMAZON ACCESS KEYS in [web-env-variables.env]!"
exit 1
fi
docker-compose build
docker-compose up -d
echo "Sleep 30 seconds..."
sleep 15
echo "Sleep 15 seconds..."
sleep 5
echo "Sleep 10 seconds..."
sleep 5
echo "Sleep 5 seconds..."
sleep 1
echo "Sleep 4 seconds..."
sleep 1
echo "Sleep 3 seconds..."
sleep 1
echo "Sleep 2 seconds..."
sleep 1
echo "Sleep 1 seconds..."
sleep 1
echo "If [create database] fails, just run [./launch.sh] again. Likely didn't sleep long enough."
docker-compose run --rm -w /myapp web rake db:create
docker-compose run --rm -w /myapp web rails db:migrate
docker-compose up -d
cat <<EOF
Sample Amazon ASINs (first one is the baby product):
B002QYW8LW
B01N0X3NL5
B018LJZUGK
B079S8G3HC
B00JM5GW10
B07BBMV8MY
B01N9QVIRV
B01N1037CV
B010TQY7A8
B01MRG7T0D
B00000IGGJ
B00005JNBQ
B00006KGC0
B00006KGC2
B0001LJBTE
B00020HALU
B000001DQI
B000003BGP
B00000JWOJ
B000JKXBKC
B000MZ5DD2
B0777GLYJ5
B06XCM9LJ4
B01CB6YQ8M
B00WQEPSY6
B00A85EMVK
EOF<file_sep>require 'test_helper'
require 'ostruct'
class ProductTest < ActiveSupport::TestCase
test "should not be savable without data" do
product = Product.new
assert_not product.save, "Saved product without ASIN"
end
test "create and update from fetch" do
data = { :asin => "test" } # assume that Amazon can provide no other data besides ASIN
response = OpenStruct.new(data)
product = Product.create_from_fetch(response)
assert product
data = { :asin => "test", category: "test" } # assume that Amazon can provide no other data besides ASIN
response = OpenStruct.new(data)
assert Product.update_from_fetch(product, response)
end
end
<file_sep>require "rapa"
class Product < ApplicationRecord
# ASIN should be not-nil.
validates :asin, presence: true
# Works on POST /products/fetch_batch
#
# Configuration: See README.md for "how to configure Amazon Access Keys".
def self.fetch_batch(asins)
client = Rapa::Client.new(
access_key_id: ENV["AMAZON_ACCESS_KEY_ID"],
associate_tag: ENV["AMAZON_ASSOCIATE_TAG"],
secret_access_key: ENV["AMAZON_SECRET_ACCESS_KEY"],
)
response = client.list_items(
item_ids: asins,
id_type: "ASIN",
domain: "com",
response_groups: ["ItemAttributes", "SalesRank"]
)
response.each do |response|
p = Product.find_by(asin: response.asin)
if p
Product.update_from_fetch(p, response)
else
Product.create_from_fetch(response)
end
end # each response
end
# A unit-testable "create" method, for the context of a "fetch_batch" operation.
# nil-values are OK for all fields except ASIN.
def self.create_from_fetch(response)
item_width = response.item_width.nil? ? nil : response.item_width.value
item_height = response.item_height.nil? ? nil : response.item_height.value
item_length = response.item_length.nil? ? nil : response.item_length.value
item_width_units = response.item_width.nil? ? nil : response.item_width.units
item_height_units = response.item_height.nil? ? nil : response.item_height.units
item_length_units = response.item_length.nil? ? nil : response.item_length.units
package_width = response.package_width.nil? ? nil : response.package_width.value
package_height = response.package_height.nil? ? nil : response.package_height.value
package_length = response.package_length.nil? ? nil : response.package_length.value
package_width_units = response.package_width.nil? ? nil : response.package_width.units
package_height_units = response.package_height.nil? ? nil : response.package_height.units
package_length_units = response.package_length.nil? ? nil : response.package_length.units
p = Product.create(
asin: response.asin,
item_width: item_width,
item_height: item_height,
item_length: item_length,
item_width_units: item_width_units,
item_height_units: item_height_units,
item_length_units: item_length_units,
package_width: package_width,
package_height: package_height,
package_length: package_length,
package_width_units: package_width_units,
package_height_units: package_height_units,
package_length_units: package_length_units,
category: response.product_group,
rank: response.sales_rank
)
end
# A unit-testable "update" method, for the context of a "fetch_batch" operation.
# nil-values are OK for all fields!
def self.update_from_fetch(product, response)
product.item_width = response.item_width.nil? ? nil : response.item_width.value
product.item_height = response.item_height.nil? ? nil : response.item_height.value
product.item_length = response.item_length.nil? ? nil : response.item_length.value
product.item_width_units = response.item_width.nil? ? nil : response.item_width.units
product.item_height_units = response.item_height.nil? ? nil : response.item_height.units
product.item_length_units = response.item_length.nil? ? nil : response.item_length.units
product.package_width = response.package_width.nil? ? nil : response.package_width.value
product.package_height = response.package_height.nil? ? nil : response.package_height.value
product.package_length = response.package_length.nil? ? nil : response.package_length.value
product.package_width_units = response.package_width.nil? ? nil : response.package_width.units
product.package_height_units = response.package_height.nil? ? nil : response.package_height.units
product.package_length_units = response.package_length.nil? ? nil : response.package_length.units
product.category = response.product_group
product.rank = response.sales_rank
product.save
end
end
<file_sep>class CreateProducts < ActiveRecord::Migration[5.1]
def change
create_table :products do |t|
t.string :asin, null: false
t.text :category
t.bigint :rank
t.decimal :item_width
t.decimal :item_height
t.decimal :item_length
t.text :item_width_units
t.text :item_height_units
t.text :item_length_units
t.decimal :package_width
t.decimal :package_height
t.decimal :package_length
t.text :package_width_units
t.text :package_height_units
t.text :package_length_units
t.timestamps
end
add_index :products, :asin, unique: true
end
end
<file_sep>FROM ruby:alpine
RUN apk --no-cache add build-base postgresql-dev nodejs tzdata yarn
RUN mkdir /myapp
WORKDIR /myapp
COPY Gemfile /myapp/Gemfile
COPY Gemfile.lock /myapp/Gemfile.lock
RUN bundle install
RUN yarn init --yes
RUN yarn add react react-dom
COPY . /myapp
<file_sep>Rails.application.routes.draw do
# Products CRUD + "fetch_batch" action
resources :products do
collection do
post 'fetch_batch'
end
end
# The "main", "root", "/" page where our React app lives.
get 'welcome/index'
root :to => 'welcome#index'
end
<file_sep>class AsinApp extends React.Component {
constructor(props) {
super(props);
this.handleProductsLoad = this.handleProductsLoad.bind(this);
this.refresh = this.refresh.bind(this);
this.state = {products: []};
}
handleProductsLoad(e){
let products = JSON.parse(e.currentTarget.responseText);
this.setState({ products: products });
}
componentDidMount(){
this.refresh();
}
refresh(){
let oReq = new XMLHttpRequest();
oReq.addEventListener("load", this.handleProductsLoad);
oReq.open("GET", "/products.json");
oReq.send();
}
render() {
return (
<div>
<h1>Amazon ASIN App</h1>
<hr />
<h2>Products Database</h2>
<ProductsTable products={this.state.products} />
<hr />
<h2>Fetch Tool</h2>
<FetchTool refresh={this.refresh} />
<hr />
</div>
);
}
}
class ProductsTable extends React.Component {
render() {
return (
<table border="1">
<thead>
<tr>
<th colSpan="3"></th>
<th colSpan="3">Item</th>
<th colSpan="3">Package</th>
</tr>
<tr>
<th>ASIN</th>
<th>Product Group (Category)</th>
<th>Sales Rank</th>
<th>Width</th>
<th>Height</th>
<th>Length</th>
<th>Width</th>
<th>Height</th>
<th>Length</th>
</tr>
</thead>
<tbody>
{
this.props.products.map(function(p, i){
return (<ProductRow key={i} product={p} />);
})
}
</tbody>
</table>
)
}
}
class ProductRow extends React.Component {
constructor(props) {
super(props);
this.state = {
product: props.product
}
}
render(){
return (
<tr>
<td>
<a href={"https://www.amazon.com/dp/"+this.state.product.asin}>
{this.state.product.asin}
</a>
</td>
<td>{this.state.product.category}</td>
<td>{this.state.product.rank}</td>
<td>{this.state.product.item_width} {this.state.product.item_width_units}</td>
<td>{this.state.product.item_height} {this.state.product.item_height_units}</td>
<td>{this.state.product.item_length} {this.state.product.item_length_units}</td>
<td>{this.state.product.package_width} {this.state.product.package_width_units}</td>
<td>{this.state.product.package_height} {this.state.product.package_height_units}</td>
<td>{this.state.product.package_length} {this.state.product.package_length_units}</td>
</tr>
)
}
}
class FetchTool extends React.Component {
constructor(props) {
super(props);
this.state = {asin_list: '', status: '...'};
this.handleChange = this.handleChange.bind(this);
this.doFetch = this.doFetch.bind(this);
}
doFetch(e) {
e.preventDefault();
// get non-empty lines, as array
let asin_array = this.state.asin_list.split("\n").filter(word => word.length > 0);
let refresh = this.props.refresh;
let component = this;
let now = new Date();
component.setState( {status: 'Processing @ ' + now} );
fetch('/products/fetch_batch.json', {
method: 'POST', // or 'PUT'
body: JSON.stringify({asins: asin_array}),
headers: new Headers({
'Content-Type': 'application/json'
})
}).then(function(res){
res.json()
})
.catch(function(error) {
let now = new Date();
component.setState( {status: 'Error @ ' + now} );
console.error('Error:', error)
})
.then(function(response) {
let now = new Date();
component.setState( {status: 'Success @ ' + now} );
refresh();
});
}
handleChange(e) {
this.setState({asin_list: e.target.value});
}
render() {
return (
<div>
<form>
<label htmlFor="asin_list">List of ASINs, one per line.</label>
<textarea name="asin_list" id="asin_list" value={this.state.asin_list} onChange={this.handleChange}></textarea><br />
<p>* Since this is just a "toy" app, please limit the number of items to a small amount</p>
<p>Status: {this.state.status}</p>
<button onClick={this.doFetch}>Submit</button>
</form>
</div>
)
}
}
ReactDOM.render(
<AsinApp name="Main" />,
document.getElementById('asin_app')
);<file_sep>require 'test_helper'
class ProductsControllerTest < ActionDispatch::IntegrationTest
setup do
@product = products(:one)
end
test "fetch tool post" do
post "/products/fetch_batch", xhr: true, params: {asins: ["foo"]}
assert_equal "{\"status\":\"ok\"}", @response.body
assert_equal "application/json", @response.content_type
end
test "should get index" do
get products_url
assert_response :success
end
test "should get new" do
get new_product_url
assert_response :success
end
test "should create product" do
assert_difference('Product.count') do
random_asin = (0...12).map{ ('a'..'z').to_a[rand(26)] }.join
post products_url, params: { product: { asin: random_asin, category: @product.category, item_height: @product.item_height, item_height_units: @product.item_height_units, item_length: @product.item_length, item_length_units: @product.item_length_units, item_width: @product.item_width, item_width_units: @product.item_width_units, package_height: @product.package_height, package_height_units: @product.package_height_units, package_length: @product.package_length, package_length_units: @product.package_length_units, package_width: @product.package_width, package_width_units: @product.package_width_units, rank: @product.rank } }
end
assert_redirected_to product_url(Product.last)
end
test "should show product" do
get product_url(@product)
assert_response :success
end
test "should get edit" do
get edit_product_url(@product)
assert_response :success
end
test "should update product" do
patch product_url(@product), params: { product: { asin: @product.asin, category: @product.category, item_height: @product.item_height, item_height_units: @product.item_height_units, item_length: @product.item_length, item_length_units: @product.item_length_units, item_width: @product.item_width, item_width_units: @product.item_width_units, package_height: @product.package_height, package_height_units: @product.package_height_units, package_length: @product.package_length, package_length_units: @product.package_length_units, package_width: @product.package_width, package_width_units: @product.package_width_units, rank: @product.rank } }
assert_redirected_to product_url(@product)
end
test "should destroy product" do
assert_difference('Product.count', -1) do
delete product_url(@product)
end
assert_redirected_to products_url
end
end
| 9193f4fad2b391c45b1b435052c8fe668c10f5b6 | [
"Ruby",
"Markdown",
"JavaScript",
"Dockerfile",
"Shell"
] | 9 | Markdown | starlocke/amazon-asin-toy | 6861e4a8f2960c515e8d6228663666b30f83d8ef | d77bb2424ee2af9e440cee77d6a1699a2ae487a5 |
refs/heads/master | <file_sep># projectfirewall_testing
Scripts for testing with python
---------------------------------
Guide to repository
=====================
#########################################################################################################################################
## __Works only for linux__
## __Needs access to a DNS server__
## __Along with internet access__
#########################################################################################################################################
* 1. apt-get install git
* 2. git clone https://github.com/Boxx1483/projectfirewall_testing.git
* 3. cd projectfirewall_testing
* 4. chmod a+x bash && ./bash
* 5. Everything will be installed and launched.
<file_sep>Script must be run as root
<file_sep>import socket, nmap, os
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
# This script will test our network
# Global_Constants
ipclass = ip[0:2]
def main():
if ipclass == '10':
menu_inside()
elif ipclass == '19':
menu_outside()
else:
print('You are not connected correctly')
exit()
def print_menu():
print('-' * 25)
print('1. Test FW')
print('2. Test Webserver')
print('3. Test Work Station')
print('4. To exit program')
print('-' * 25)
return
def menu_inside():
#clearing screen when reloading menu
print('Inside Menu')
os.system("clear")
#calling function that prints the menu
print_menu()
#asking for user input menu option
user_inside = str(input('Enter your choice:'))
if user_inside == '1':
testfw_in()
elif user_inside == '2':
testweb_in()
elif user_inside == '3':
testws_in()
elif user_inside == '4':
exit()
else:
print('kys')
menu_inside()
def menu_outside():
#clearing screen when reloading menu
os.system("clear")
#printing the menu options
print('Outside Menu')
print_menu()
#asking for user input menu option
user_inside = str(input('Enter your choice:'))
if user_inside == '1':
testfw_out()
elif user_inside == '2':
testweb_out()
elif user_inside == '3':
testws_out()
elif user_inside == '4':
exit()
else:
print('kys')
menu_outside()
def testfw_in():
devicename = str(input('Enter IP of Firewall:'))
nm = nmap.PortScanner()
nm.scan(devicename, '1-123')
print(nm.all_hosts())
for host in nm.all_hosts():
print('-' * 25)
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('-' * 10)
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
for port in lport:
print('port : %s\tstate : %s' % (port, nm[host][proto][port]['state']))
if ping_check(devicename):
print(devicename, 'Is up')
else:
print(devicename, 'Is down')
input()
def testweb_in():
hostname = str(input('Enter IP of Webserver:'))
if ping_check(hostname):
print(hostname, 'Is up')
else:
print(hostname, 'Is down')
input()
def testws_in():
hostname = str(input('Enter IP of Webserver:'))
if ping_check(hostname):
print(hostname, 'Is up')
else:
print(hostname, 'Is down')
input()
def testfw_out():
devicename = str(input('Enter IP of Firewall:'))
nm = nmap.PortScanner()
nm.scan(devicename, '1-512')
print(nm.all_hosts())
for host in nm.all_hosts():
print('-' * 25)
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('-' * 10)
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
for port in lport:
print('port : %s\tstate : %s' % (port, nm[host][proto][port]['state']))
if ping_check(devicename):
print(devicename, 'Is up')
else:
print(devicename, 'Is down')
input()
def ping_check(host_or_ip):
response = os.system("ping " + host_or_ip + " -c 1 > /dev/null 2>&1")
return response == 0
def testweb_out():
hostname = str(input('Enter IP of Webserver:'))
if ping_check(hostname):
print(hostname, 'Is up')
else:
print(hostname, 'Is down')
input()
def testws_out():
hostname = str(input('Enter IP of Webserver:'))
if ping_check(hostname):
print(hostname, 'Is up')
else:
print(hostname, 'Is down')
input()
main()
| 8f94756218f5e7c459a1d16ec38df2aee3a7ad7a | [
"Markdown",
"Python"
] | 3 | Markdown | Boxx1483/projectfirewall_testing | ca501429f9b30f6b23f17ff16b86c673793ec04a | d1a40c7ef09d87f19c3a7a26acdcbb60c76e89ff |
refs/heads/master | <repo_name>zhangxiaov/zc<file_sep>/ZC/ZFile.c
//
// ZFile.c
// ZC
//
// Created by zhangxinwei on 16/1/28.
// Copyright © 2016年 张新伟. All rights reserved.
//
#include "ZFile.h"
void zfileCreateEmptySpecifySize(char* fileName, int size) {
int ret = truncate(fileName, size);
if (ret != 0) {
printf("failed when truncate\n");
}
}<file_sep>/ZC/ZFile.h
//
// ZFile.h
// ZC
//
// Created by zhangxinwei on 16/1/28.
// Copyright © 2016年 张新伟. All rights reserved.
//
#ifndef ZFile_h
#define ZFile_h
#include <stdio.h>
#include "ZC.h"
void zfileCreate(char* fileName);
//读取len子节
char* zfileRead(char* self, int startPos, int len);
//
bool zfileWrite(char* self, int startPos, void* valuePtr);
//追加指定字节的文件
void zfileWriteStringSpecifySize(char* self, char* strval, int size);
//指定位置指定字节的文件
void zfileWriteStringSpecifySizeWithPos(char* self, char* strval, int size, unsigned long pos);
//追加指定字节的文件 8byte
void zfileWriteULongSpecifySize(char* self, unsigned long intval, int size);
//指定位置指定字节的文件 8byte
void zfileWriteULongSpecifySizeWithPos(char* self, unsigned long intval, int size, unsigned long pos);
//书长,self:名
unsigned long zfileSize(char* self);
//书存
bool zfileIsExist(char* self, char* dir);
// 二进制追加写ul
void zfileWriteInt(char* self, unsigned long v);
// 二进制指定位置写ul
void zfileWriteIntWithPos(char* self, unsigned long v, unsigned long pos);
void zfileDestory(char* self);
//创建指定大小的孔文件
void zfileCreateEmptySpecifySize(char* fileName, int size);
#endif /* ZFile_h */
<file_sep>/ZC/mmap_test.c
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
#include<fcntl.h>
#include<sys/mman.h>
#include<unistd.h>
int main_test(int argc,char *argv[]){
int fd,len;
char *ptr;
// if(argc<2){
// printf("please enter a file\n");
// return 0;
// }
if((fd=open("/tmp/zlog.log",O_RDWR))<0){
perror("open file error");
return -1;
}
len=lseek(fd,0,SEEK_END);
// printf("file len := %d\n", len);
char *s = "zadlfjasdlfjasdlfjasd";
int len_s = strlen(s);
ptr=mmap(NULL,len,PROT_READ|PROT_WRITE,MAP_SHARED,fd,0);//读写得和open函数的标志相一致,否则会报错
printf("%d\n", ptr);
if(ptr==MAP_FAILED){
perror("mmap error");
close(fd);
return -1;
}
close(fd);//关闭文件也ok
printf("length is %ld\n",strlen(ptr));
for (int i = 0; i < len/len_s; i++) {
memcpy(ptr, s, strlen(s));
ptr += len_s;
}
printf("%d\n", ptr);
int totalLen = 10000 * len_s;
munmap(ptr,totalLen);//将改变的文件写入内存
return 0;
}<file_sep>/ZC/mmap_test.h
//
// mmap_test.h
// oc_demo
//
// Created by zhangxinwei on 15/12/4.
// Copyright © 2015年 zhangxinwei. All rights reserved.
//
#ifndef mmap_test_h
#define mmap_test_h
#include <stdio.h>
#endif /* mmap_test_h */
<file_sep>/ZC/select_demo.h
//
// select_demo.h
// ZC
//
// Created by 张新伟 on 16/1/7.
// Copyright © 2016年 张新伟. All rights reserved.
//
#ifndef select_demo_h
#define select_demo_h
#include <stdio.h>
#include "ZC.h"
#endif /* select_demo_h */
<file_sep>/ZC/ZDB.h
//
// ZDB.h
// ZC
//
// Created by 张新伟 on 16/1/12.
// Copyright © 2016年 张新伟. All rights reserved.
//
#ifndef ZDB_h
#define ZDB_h
#include <stdio.h>
#include "ZC.h"
#include "ZJson.h"
#include "ZIndex.h"
#include "ZMap.h"
typedef struct _zrecord zrecord;
typedef struct _zdb zdb;
struct _zdb {
void* mmap;
ZArray* indexArray;
};
struct _zrecord {
char jsonStr[6*1024];
};
bool zdbCreate(char* fileName, int size);
void zdbClose(zdb* self, int size);
zdb* zdbInit(char* fileName);
char* zdbReadToString(zdb* self, int pos);
ZJson* zdbReadToJson(zdb* self, int pos);
void zdbUpdate(zdb* self, int pos, char* val);
int zdbAdd(zdb* self, char* val);
#endif /* ZDB_h */
<file_sep>/ZC/backup.h
//
// backup.h
// ZC
//
// Created by 张新伟 on 16/1/3.
// Copyright © 2016年 张新伟. All rights reserved.
//
#ifndef backup_h
#define backup_h
#include <stdio.h>
#endif /* backup_h */
<file_sep>/ZC/test2.c
#include <stdio.h>
#include <winsock2.h>
#include <stdlib.h>
#include <string.h>
//#pragma comment(lib,"ws2_32.lib")
// Function prototype
void StreamClient(char *szServer, short nPort, char *szMessage);
// Helper macro for displaying errors
#define PRINTERROR(s) \
fprintf(stderr,"\n%: %d\n", s, WSAGetLastError())
////////////////////////////////////////////////////////////
int main(int argc, char **argv)
{
char hostname[255];
WORD wVersionRequested = MAKEWORD(2,0);
WSADATA wsaData;
int nRet;
short nPort;
nPort =25;
nRet = WSAStartup(wVersionRequested, &wsaData);
StreamClient("smtp.126.com", nPort, "AUTH LOGIN\r\n");
WSACleanup();
return 0;
}
////////////////////////////////////////////////////////////
void StreamClient(char *szServer, short nPort, char *szMessage)
{
printf("\nStream Client connecting to server: %s on port: %d",szServer, nPort);
//
// Find the server
//
LPHOSTENT lpHostEntry;
lpHostEntry = gethostbyname(szServer);
if (lpHostEntry == NULL)
{
PRINTERROR("gethostbyname()");
return;
}
//
// Create a TCP/IP stream socket
//
SOCKET theSocket;
theSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (theSocket == INVALID_SOCKET){
PRINTERROR("socket()");
return;
}
//
// Fill in the address structure
//
SOCKADDR_IN saServer;
saServer.sin_family = AF_INET;
saServer.sin_addr = *((LPIN_ADDR)*lpHostEntry->h_addr_list);
saServer.sin_port = htons(nPort);
//
// connect to the server
//
int nRet;
nRet = connect( theSocket, // Socket
(LPSOCKADDR)&saServer, // Server address
sizeof(struct sockaddr));// Length of server address structure
printf("\n%d1lL=====================================%d\n",nRet,SOCKET_ERROR);
if (nRet == SOCKET_ERROR)
{
PRINTERROR("socket()");
closesocket(theSocket);
return;
}
char szBuf[256];
// strcpy(szBuf, "From the Client");
char buf[350]="0";
char server[250];
gethostname(server,250);
strcpy(buf, "HELO ");
strcat(buf, server);
strcat(buf, " \r\n");
printf("%s:::%d",buf,strlen(buf));
//
// Wait for a reply
//
nRet = recv(theSocket,szBuf,sizeof(szBuf)+1,0); printf("\nData received OVER DATA: %s", szBuf);
nRet = send(theSocket, buf, strlen(buf), 0);
nRet = recv(theSocket, szBuf, sizeof(szBuf), 0); printf("\nData received2: %s", szBuf);
//发送准备登陆信息
nRet = send(theSocket, "AUTH LOGIN \r\n", sizeof("AUTH LOGIN \r\n"), 0);
nRet = recv(theSocket, szBuf, sizeof(szBuf), 0); printf("\nData received LOGIN: %s", szBuf);
//发送用户名和密码,这里的用户名和密码必须用base64进行转码,发送转码以后的字符串
nRet = send(theSocket,"用户名\r\n", strlen("用户名\r\n"),0);
nRet = recv(theSocket, szBuf, sizeof(szBuf),0);printf("\nData receivedUSERNAME: %s", szBuf);
//发送用户密码
nRet = send(theSocket,"密码\r\n", strlen("密码\r\n"), 0);
nRet = recv(theSocket, szBuf, sizeof(szBuf),0); printf("\nData receivedUSERPASSWORD: %s", szBuf);
//发送[发送邮件]的信箱(改成你的邮箱!)
send(theSocket,"MAIL FROM:<<EMAIL>>\r\n",sizeof("MAIL FROM:<<EMAIL>>\r\n"),0);
nRet = recv(theSocket, szBuf, sizeof(szBuf)+1, 0);printf("\nData received MAILFROM: %s", szBuf);
//发送[接收邮件]的邮箱
nRet= send(theSocket,"RCPT TO:<<EMAIL>> \r\n",sizeof("RCPT TO:<<EMAIL>> \r\n"),0);
nRet = recv(theSocket, szBuf, sizeof(szBuf)+1, 0); printf("\nData received TOMAIL: %s", szBuf);
char MailData[1024] =
"From: \"<EMAIL>\"\r\n"
"To: maxinliang002@126.com\r\n"
"Subject: IP Address\r\n\r\n"; //主题可以改成别的
//各诉邮件服务器,准备发送邮件内容
send(theSocket,"DATA\r\n", strlen("DATA\r\n"),0);
nRet = recv(theSocket, szBuf, sizeof(szBuf)+1, 0); printf("\nData receivedSEND DATA: %s", szBuf);
//发送邮件标题
send(theSocket,MailData, strlen(MailData),0);
//发送邮件内容
send(theSocket,"邮件内容来了!\r\n", strlen("邮件内容来了!\r\n"),0);
//发送邮件结束
send(theSocket,"\r\n.\r\n", strlen("\r\n.\r\n"),0);
//接收邮件服务器返回信息
nRet = recv(theSocket,szBuf,sizeof(szBuf)+1,0); printf("\nData received OVER DATA: %s", szBuf);
//
// Display the received data
//
//printf("\nData received3: %s", szBuf);
closesocket(theSocket);
return;
}<file_sep>/ZC/test_sendMail.h
//
// test_sendMail.h
// ZC
//
// Created by zhangxinwei on 16/1/28.
// Copyright © 2016年 张新伟. All rights reserved.
//
#ifndef test_sendMail_h
#define test_sendMail_h
#include <stdio.h>
#endif /* test_sendMail_h */
<file_sep>/ZC/main.c
#include <stdio.h>
typedef struct
{
int i;
int num;
int state;
} task;
#define crBegin(state) \
switch (state) { case 0:
#define crReturn(state, ret) \
(state) = __LINE__; return (ret); case __LINE__:
#define crEnd() \
}
int cb(task *t)
{
crBegin(t->state);
for (;;) {
t->num = 1;
for (t->i = 0; t->i < 20; t->i++) {
crReturn(t->state, t->num);
t->num += 1;
}
}
crEnd();
return 0;
}
int function(void) {
static int i, state = 0;
switch (state) {
case 0: goto LABEL0;
case 1: goto LABEL1;
}
LABEL0: /* start of function */
for (i = 0; i < 10; i++) {
state = 1; /* so we will come back to LABEL1 */
return i;
LABEL1:; /* resume control straight after the return */
}
return 0;
}
int main_main()
{
// task t;
// int i;
//
// t.state = 0;
//
// for (i = 0; i < 100; i++) {
// printf("%d ", cb(&t));
// }
printf("%d ", function());
printf("%d ", function());
printf("%d ", function());
printf("%d ", function());
printf("%d ", function());
return 0;
}<file_sep>/ZC/CString.h
//
// CString.h
// oc_demo
//
// Created by zhangxinwei on 15/12/8.
// Copyright © 2015年 zhangxinwei. All rights reserved.
//
#ifndef CString_h
#define CString_h
#include <stdio.h>
#include "ZC.h"
int csIndexPrefixSkipSpace(char* self, char* prefix);
char* csLineByClientFd(int clientFd);
char* csReplaceCharAtLast(char* self, char c);
char* csAppend(char* self, char* str);
char* csAppendInt(char* self, int intVal);
char* csAppendChar(char* self, char c);
char* csIntToString(int intVal);
bool csIsEqual(char* self, char* str);
int csToInt(char* self);
//是前缀否
bool csIsPrefix(char* self, char* prefix);
//分隔
char* csSeparateToEnd(char* self, char* c);
char* csSeparateFromBegin(char* self, char* c);
#endif /* CString_h */
<file_sep>/ZC/backup.c
////
//// backup.c
//// ZC
////
//// Created by 张新伟 on 16/1/3.
//// Copyright © 2016年 张新伟. All rights reserved.
////
//
//#include "backup.h"
////
//// ZJson.c
//// oc_demo
////
//// Created by zhangxinwei on 15/12/7.
//// Copyright © 2015年 zhangxinwei. All rights reserved.
////
//
//#include "ZJson.h"
//#include <stdarg.h>
//#include <sys/malloc.h>
//#include <string.h>
//#include "ZTime.h"
//#include "CString.h"
//#include "ZStack.h"
//#include "ZMap.h"
//#include "ZArray.h"
//
//
//
//typedef struct _zjsonStr zjsonStr;
//
//struct _zjsonStr {
// char* str;
// int nextTokenIndex;
//};
//
//int hasStatus(int expectedStatus);
//int zjsonStrType(zjsonStr* jsonStr);
//void* zjsonStrReadNumber(zjsonStr* jsonStr);
//void* zjsonStrReadBoolean(zjsonStr* jsonStr);
//void zjsonStrReadNull(zjsonStr* jsonStr);
//char* zjsonStrReadString(zjsonStr* self);
//
//#define DOUBLE(a) ({\
//__typeof__(a) _x_in_DOUBLE = (a);\
//_x_in_DOUBLE + _x_in_DOUBLE;\
//})
//// type = 1 obj, = 0 array
//struct _CZ {
// char type;
// int typeSize;
//};
//
//// type = 11 obj, = 10 array
//struct _ZJson {
// char type;
// int typeSize;
// KV* data;
// char count;
//};
//
//struct _KV {
// char* key;
// void* val;
// KV* next;
//};
//
//typedef struct _zjsonNode zjsonNode;
//typedef struct _ZJson2 ZJson2;
//
//struct _ZJson2 {
// int type;
// int typeSize;
// int size;
// int count;
// zjsonNode* data;
// char* originString;
//};
//
//struct _zjsonNode {
// char* key;
// void* val;
// zjsonNode* next;
//};
//
//ZJson* zjsonArrayInitWithVal(int n,void* v1,...) {
// ZJson* jsonArray = (ZJson*)malloc(sizeof(ZJson));
// jsonArray->type = ZJSONARRAY;
// jsonArray->data = (KV*)malloc(sizeof(KV));
// jsonArray->count = n;
//
// va_list argp;
// va_start(argp, v1);
//
// KV* next = jsonArray->data;
//
// for (int i = 0; i < n; i++) {
// KV* kv = next;
// kv->val = v1;
//
// next = (KV*)malloc(sizeof(KV));
// next->next = NULL;
// kv->next = next;
//
// v1 = va_arg(argp, void*);
// }
//
// return jsonArray;
//}
//
//ZJson* zjsonInitWithJson(ZJson* json) {
//
// return 0;
//}
//
//ZJson2* zjsonInitWithString(char* jsonStr) {
// ZJson2* json = (ZJson2*)malloc(sizeof(ZJson2));
// json->type = ZJSON;
// json->data = (zjsonNode*)malloc(sizeof(zjsonNode));
// json->originString = jsonStr;
// char c = (char)(*jsonStr);
//
// zjsonStr* zjstr = (zjsonStr*)malloc(sizeof(zjsonStr));
// zjstr->str = jsonStr;
// zjstr->nextTokenIndex = 0;
//
// int i = 0;
// size_t len = strlen(jsonStr);
//
// ZStack* stack = zstackInit(10, typePtr);
//
// int status = STATUS_EXPECT_SINGLE_VALUE | STATUS_EXPECT_BEGIN_OBJECT | STATUS_EXPECT_BEGIN_ARRAY;
// while (i < len) {
//
// int type = zjsonStrType(zjstr);
// switch (type) {
// case idnumber:
// if (hasStatus(STATUS_EXPECT_SINGLE_VALUE)) {//single number
// zjsonNumber number = zjsonStrReadNumber(zjstr);
// zstackPush(stack, number);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_OBJECT_VALUE)) {
// zjsonNumber number = zjsonStrReadNumber(zjstr);
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, number);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_ARRAY_VALUE)) {
// zjsonNumber number = zjsonStrReadNumber(zjstr);
// zarrayAdd(zstackPeek(stack), number);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// printf("Unexpected umber\n");
// break;
// case idbool:
// if (hasStatus(STATUS_EXPECT_SINGLE_VALUE)) {// single boolean:
// zjsonBoolean boolean = zjsonStrReadBoolean(zjstr);
// zstackPush(stack, boolean);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_OBJECT_VALUE)) {
// zjsonBoolean boolean = zjsonStrReadBoolean(zjstr);
//
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, boolean);
//
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_ARRAY_VALUE)) {
// zjsonBoolean boolean = zjsonStrReadBoolean(zjstr);
// zarrayAdd(zstackPeek(stack), boolean);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// printf("Unexpected bool\n");
// break;
// case idstring:
// if (hasStatus(STATUS_EXPECT_SINGLE_VALUE)) {// single string:
// char* str = zjsonStrReadString(zjstr);
// zstackPush(stack, str);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_OBJECT_KEY)) {
// char* str = zjsonStrReadString(zjstr);
// zstackPush(stack, str);
// status = STATUS_EXPECT_COLON;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_OBJECT_VALUE)) {
// char* str = zjsonStrReadString(zjstr);
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, str);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_ARRAY_VALUE)) {
// char* str = zjsonStrReadString(zjstr);
// zarrayAdd(zstackPeek(stack), str);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// printf("Unexpected string \n");
// break;
// case idnull:
// if (hasStatus(STATUS_EXPECT_SINGLE_VALUE)) {// single null:
// zjsonStrReadNull(zjstr);
// zstackPush(stack, NULL);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_OBJECT_VALUE)) {
// zjsonStrReadNull(zjstr);
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, NULL);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_ARRAY_VALUE)) {
// zjsonStrReadNull(zjstr);
// zarrayAdd(zstackPeek(stack), NULL);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// printf("Unexpected null\n");
// break;
// case idobjbegin:// {
// if (hasStatus(STATUS_EXPECT_BEGIN_OBJECT)) {
// zstackPush(stack, zmapInit());
// status = STATUS_EXPECT_OBJECT_KEY | STATUS_EXPECT_BEGIN_OBJECT | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// printf("Unexpected '{'\n");
// break;
// case idobjend:// }
// if (hasStatus(STATUS_EXPECT_END_OBJECT)) {
// ZMap* map = zstackPop(stack);
// if (zstackEmpty(stack)) {//root object
// zstackPush(stack, map);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
//
// int type = zstackPreValueType(stack);
// if (type == TYPE_OBJECT_KEY) {
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, map);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (type == TYPE_ARRAY) {
// zarrayAdd(zstackPeek(stack), map);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// }
// printf("Unexpected '}'\n");
// break;
// case idlistbegin:// [
// if (hasStatus(STATUS_EXPECT_BEGIN_ARRAY)) {
// zstackPush(stack, zarrayInit());
// status = STATUS_EXPECT_ARRAY_VALUE | STATUS_EXPECT_BEGIN_OBJECT | STATUS_EXPECT_BEGIN_ARRAY | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// printf("Unexpected '['\n");
// break;
// case idlistend: // ]
// if (hasStatus(STATUS_EXPECT_END_ARRAY)) {
// ZArray* array = zstackPop(stack);
// if (zstackEmpty(stack)) {
// zstackPush(stack, array);
// status = STATUS_EXPECT_END_DOCUMENT;
// continue;
// }
//
// int type = zstackPreValueType(stack);
// if (type == TYPE_OBJECT_KEY) {// key: [ CURRENT ] ,}
// char* key = zstackPop(stack);
// zmapPut(zstackPeek(stack), key, array);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_OBJECT;
// continue;
// }
// if (type == TYPE_ARRAY) {// xx, xx, [CURRENT] ,]
// zarrayAdd(zstackPeek(stack), array);
// status = STATUS_EXPECT_COMMA | STATUS_EXPECT_END_ARRAY;
// continue;
// }
// }
// printf("Unexpected char ']'\n");
// break;
//
// case idcolon://:
// if (status == STATUS_EXPECT_COLON) {
// status = STATUS_EXPECT_OBJECT_VALUE | STATUS_EXPECT_BEGIN_OBJECT | STATUS_EXPECT_BEGIN_ARRAY;
// continue;
// }
// printf("Unexpected colon char\n");
// break;
// case idcomma://,
// if (hasStatus(STATUS_EXPECT_COMMA)) {
// if (hasStatus(STATUS_EXPECT_END_OBJECT)) {
// status = STATUS_EXPECT_OBJECT_KEY;
// continue;
// }
// if (hasStatus(STATUS_EXPECT_END_ARRAY)) {
// status = STATUS_EXPECT_ARRAY_VALUE | STATUS_EXPECT_BEGIN_ARRAY | STATUS_EXPECT_BEGIN_OBJECT;
// continue;
// }
// }
// printf("Unexpected comma char\n");
// break;
// case idend:
// if (hasStatus(STATUS_EXPECT_END_DOCUMENT)) {
// void* json = zstackPop(stack);
// if (zstackEmpty(stack)) {
// return json;
// }
// }
// printf("Unexpected EOF \n");
// break;
// default:
// break;
// }
// }
//
// return json;
//}
//
//int hasStatus(int expectedStatus) {
// int status = STATUS_EXPECT_SINGLE_VALUE | STATUS_EXPECT_BEGIN_OBJECT | STATUS_EXPECT_BEGIN_ARRAY;
// return ((status & expectedStatus) > 0);
//}
//
//int zjsonStrType(zjsonStr* jsonStr) {
//
// return 0;
//}
//
//void* zjsonStrReadNumber(zjsonStr* jsonStr) {
//
// return NULL;
//}
//
//void* zjsonStrReadBoolean(zjsonStr* jsonStr) {
//
// return NULL;
//}
//
//void zjsonStrReadNull(zjsonStr* jsonStr) {
//
//}
//
//char* zjsonStrReadString(zjsonStr* self) {
// return NULL;
//}
//
//ZJson* zjsonInitWithKV(int n, void* k1,...) {
// ZJson* json = (ZJson*)malloc(sizeof(ZJson));
// json->type = ZJSON;
// json->data = (KV*)malloc(sizeof(KV));
// json->count = n;
//
// va_list argp;
// va_start(argp, k1);
//
// KV* next = json->data;
//
// for (int i = 0; i < n; i++) {
// KV* kv = next;
//
// // key
// CZ* k_obj = (CZ*)k1;
// kv->key = ((zs*)k_obj)->val;
//
// // val
// k1 = va_arg(argp, void*);
// kv->val = k1;
//
// next = (KV*)malloc(sizeof(KV));
// next->next = NULL;
// kv->next = next;
//
// k1 = va_arg(argp, void*);
// }
//
// return json;
//}
//
//char* zjsonArrayToString(ZJson* jsonArray) {
// char* str = "[";
//
// char type = jsonArray->type;
// int count = jsonArray->count;
// KV* data = jsonArray->data;
//
// KV* kv = data;
// for (int i = 0; i < count; i++) {
// char* k = kv->key;
//
// // val
// CZ* cz = (CZ*)kv->val;
// char type = cz->type;
// switch (type) {
// case ZI:
// str = csAppendInt(str, ((zi*)cz)->val);
// str = csAppend(str, ",");
// break;
// case ZJSONARRAY:
// str = csAppend(str, zjsonArrayToString(((ZJson*)cz)));
// break;
// case ZJSON:
// str = csAppend(str, zjsonToString(((ZJson*)cz)));
// break;
// case ZS:
// str = csAppend(str, "\"");
// str = csAppend(str, ((zs*)cz)->val);
// str = csAppend(str, "\",");
// break;
// default:
// break;
// }
//
// kv = kv->next;
// }
//
// str = csReplaceCharAtLast(str, ']');
// str = csAppend(str, ",");
// return str;
//}
//
//char* zjsonToString(ZJson* json) {
// char* str = "{";
//
// char type = json->type;
// int count = json->count;
// KV* data = json->data;
//
// // array
// if (type == ZJSONARRAY) {
// zjsonArrayToString(json);
// }else if (type == ZJSON) {//kv
//
// KV* kv = data;
// for (int i = 0; i < count; i++) {
// char* k = kv->key;
//
// // key
// str = csAppend(str, "\"");
// str = csAppend(str, k);
// str = csAppend(str, "\"");
//
// // printf("%s\n", str);
//
// str = csAppend(str, "\:");
// // val
// CZ* cz = (CZ*)kv->val;
// char type = cz->type;
// switch (type) {
// case ZI:
// str = csAppendInt(str, ((zi*)cz)->val);
// str = csAppend(str, ",");
// break;
// case ZJSONARRAY:
// str = csAppend(str, zjsonArrayToString(((ZJson*)cz)));
// break;
// case ZJSON:
// str = csAppend(str, zjsonToString(((ZJson*)cz)));
// break;
// case ZS:
// str = csAppend(str, "\"");
// str = csAppend(str, ((zs*)cz)->val);
// str = csAppend(str, "\",");
// break;
// default:
// break;
// }
//
// kv = kv->next;
// }
// }
//
// str = csReplaceCharAtLast(str, '}');
// return str;
//}
//
//void main_zjson() {
// //void main(){
//
// uint64_t s = ztimeSince1970();
// ZJson* json = zjsonInitWithKV(4, zs("name"), zs("张新伟"), zs("age"), zi(26), zs("mobile"), zs("15210743874"), zs("test"), zjsonArrayInitWithVal(2, zs("zhangxinwei"), zi(100)));
//
// char* str = zjsonToString(json);
// char* str2 = zjsonToString(json);
//
// printf("time := %llu\n", ztimeSince1970() - s);
// printf("%s\n", str);
// printf("%s\n", str2);
//
// printf("%d\n", strlen("正"));
//
//
// char* jsonString = "{\"name\":\"张新伟\",\"age\":25,\"job\":\"nojob\",\"array\":[11,true,\"test\",{\"arrayInnerK\":\"arrayInnerv\"}]}";
// printf("%c \n", *jsonString);
// ZJson2* json2 = zjsonInitWithString(jsonString);
//
// ZMap* map = json2->data;
//
// printf("%s \n", zmapGet(map, "name"));
//
// ZArray* array = zmapGet(map, "array");
//
// printf("%s \n", zarrayGet(array, 0));
//
// printf("%d \n", (1==3));
//}
//
//
//
//
//
//
//
//
//
//
// main string str, list l, map m - string str
// print "xxx"
// l = [1, 3, 4, 5];
// for i1, i2 in l print %d,%d,i1,i2
//
// m = {"k1":"v1", "k2":"v2"};
// l2 = m.keys
// for k in l2 print m[k]
//
// ret
<file_sep>/ZC/ZDB.c
//
// ZDB.c
// ZC
//
// Created by 张新伟 on 16/1/12.
// Copyright © 2016年 张新伟. All rights reserved.
//
#include "ZDB.h"
#include <fcntl.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include "ZMap.h"
#include <string.h>
#include <sys/malloc.h>
#include "ZTime.h"
#include <assert.h>
#include "CString.h"
static int globalPos = 0;
//db程式,无table,只record
void* zfileToMmap(char* fileName) {
int fd = open(fileName, O_RDWR);
assert(fd > 0);
struct stat st;
fstat(fd, &st);
void* addr = mmap(0, st.st_size, PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0);
printf("%llu\n", addr);
printf("%llu\n", addr+1);
printf("%llu\n", addr+st.st_size);
return addr;
}
unsigned long zdbAddByString(zdb* self, char* val) {
int s = recordSize;
if (strlen(val) > s) {
printf("over recordSize\n");
return 0;
}
memcpy(self->mmap+globalPos, val, strlen(val));
globalPos += recordSize;
//处理索引
//
return globalPos-recordSize;
}
//插一跳新纪录
unsigned long zdbInsertByJson(zdb* self, ZJson* json) {
unsigned long pos = 0;
char* jsonStr = zjsonToString(json);
size_t jsonStrLen = strlen(jsonStr);
int size = recordSize;
if ( jsonStrLen > size) {
printf("over recordSize\n");
return 0;
}
memcpy(self->mmap+globalPos, jsonStr, jsonStrLen);
globalPos += recordSize;
pos = globalPos-recordSize;
//处理索引,json中一层key 在indexarray 中,即行zindexadd
ZMap* map = json->data;
ZArray* indexArray = self->indexArray;
int count = 0;
if (indexArray != NULL) {
count = indexArray->len;
}
for (int i = 0; i < count; i++) {
ZIndex* index = zarrayGet(indexArray, i);
switch (index->valueType) {
case typeInt:{
char* val = zmapGet(map, index->name);
zindexAppend(index, csToInt(val), NULL, pos, typeInt);
}
break;
case typeString:{
char* val = zmapGet(map, index->name);
zindexAppend(index, 0, val, pos, typeString);
}
break;
default:
break;
}
}
return pos;
}
void zdbUpdateByJson(zdb* self, ZJson* json) {
}
void zdbUpdate(zdb* self, int pos, char* val) {
int s = recordSize;
if (strlen(val) > s) {
printf("over recordSize\n");
return ;
}
memcpy(self->mmap+pos, val, strlen(val));
}
ZJson* zdbReadToJson(zdb* self, int pos) {
int s = recordSize;
char* jsonStr = (char*)malloc(s);
memcpy(jsonStr, self->mmap+pos, s);
ZJson* json = zjsonInitWithString(jsonStr);
return json;
}
char* zdbReadToString(zdb* self, int pos) {
int s = recordSize;
char* jsonStr = (char*)malloc(s);
memcpy(jsonStr, self->mmap+pos, s);
return jsonStr;
}
zdb* zdbInit(char* fileName) {
static zdb* db = NULL;
if (db == NULL) {
db = (zdb*)malloc(sizeof(zdb));
db->indexArray = NULL;
void* p = zfileToMmap(fileName);
if (p == NULL) {
return NULL;
}
db->mmap = p;
}
return db;
}
void zdbClose(zdb* self, int size) {
munmap(self->mmap, size);
}
bool zdbCreate(char* fileName, int size) {
int fd = open(fileName, O_RDWR);
if (fd < 0) {
printf("create new db file failed, desc:%s\n", strerror(errno));
return false;
}
int ret = truncate(fileName, size);
if (ret < 0) {
printf("truncate new db file failed, desc:%s\n", strerror(errno));
return false;
}
close(fd);
return true;
}
void main_zdb() {
//void main() {
int len = sizeof(zrecord);
char* dbName = "/tmp/test";
int dbSize = 1000000;
if (!zdbCreate(dbName, 100000000)) {
return;
}
zdb* db = zdbInit(dbName);
ZMap* map = zmapInit();
zmapPut(map, "userName", "张新伟");
zmapPut(map, "password", "<PASSWORD>");
time_t s = ztimeSince1970();
for (int i = 0; i < 10000; i++) {
int pos = zdbAdd(db, zmapToString(map));
zdbReadToString(db, pos);
}
printf("time = %.0f\n", difftime(ztimeSince1970(), s));
int pos = zdbAdd(db, zmapToString(map));
printf("pos = %d,\n content = %s\n", pos, zdbReadToString(db, pos));
zmapPut(map, "age", "26");
zdbUpdate(db, pos, zmapToString(map));
printf("pos = %d,\n content = %s\n", pos, zdbReadToString(db, pos));
//2
int pos2 = zdbAdd(db, zmapToString(map));
printf("pos = %d,\n content = %s\n", pos2, zdbReadToString(db, pos2));
zmapPut(map, "age", "26");
zdbUpdate(db, pos2, zmapToString(map));
printf("pos = %d,\n content = %s\n", pos2, zdbReadToString(db, pos2));
zdbClose(db, dbSize);
}
| deae6b7917e1d8283a067f252a39e211606197aa | [
"C"
] | 13 | C | zhangxiaov/zc | 0baebb74239bc33ea09bd172c6721172114180c8 | d881c7c16ee8f7aa3c193cff617b35a7c36803f4 |
refs/heads/master | <repo_name>jpc0016/Image-Encrypt-Lab05<file_sep>/jpc0016_lab5.py
# This is a script intended to encrypt images in either Electronic Code Book (ECB) and Cipher Block Chaining (CBC) modes.
import getopt
from PIL import Image
from Crypto.Cipher import AES
from Crypto import Random
import math
import os
import sys
"""
The following function getopt() is intended to check command line arguments using sys.argy and assign them to variables in the script.
"""
try:
opts, args = getopt.getopt(sys.argv[1:], "i:o:k:m:")
except getopt.GetoptError, err:
usage(err)
for opt, arg in opts:
if opt in ('-i'):
global im
im = Image.open(arg)
elif opt in ('-o'):
global of
of = arg
elif opt in ('-k'):
global key
key = arg
if len(key) != 16:
sys.stderr.write("E: <key> must be 16 characters long")
sys.stderr.flush()
elif opt in ('-m'):
global mode
mode_in = arg
if mode_in == 'CBC':
mode = AES.MODE_CBC
elif mode_in == 'ECB':
mode = AES.MODE_ECB
else:
sys.stderr.write("E: <mode> must be 'CBC or 'ECB'")
sys.stderr.flush()
"""Convert image to byte object"""
byte_object = im.tobytes()
"""
Next step is to check length and pad bytes with PKCS7. This function Performs padding on the given plaintext to ensure that it is a multiple of the given block_size value in the parameter. Uses the PKCS7 standard for performing padding.
We will pass byte_object to _pad with a 16 byte block size.
"""
def _pad(text, block_size):
no_of_blocks = math.ceil(len(text)/float(block_size))
pad_value = int(no_of_blocks * block_size - len(text))
if pad_value == 0:
return text
elif pad_value >= 10:
pad_char = chr(70 - (15 - pad_value))
return text + pad_char * pad_value
else:
pad_char = chr(57 -(9 - pad_value))
return text + pad_char * pad_value
padded_output = _pad(byte_object, 16)
"""
Next step is to encrypt the bytes in the padded data. Set up a new cipher with key, mode and iv. Pass the cipher to encrypt() function.
ECB and CBC modes are used. The mode is already checked in the first elif statement.
The padded_output is passed to encrypt() with a key, encryption mode, and initialization vector (IV) set to encryption_suite
"""
iv = os.urandom(16)
if mode == AES.MODE_CBC:
encryption_suite = AES.new(key, mode, iv)
cipher_text = encryption_suite.encrypt(padded_output)
else:
encryption_suite = AES.new(key, mode)
cipher_text = encryption_suite.encrypt(padded_output)
"""
This step involves converting bytes back to an image object using frombytes(). The function takes three arguments at a minimum: mode, size, and unpacked pixel data
New image object is subsequently saved as type jpg or png
"""
encrypt_image = Image.frombytes('RGBA', im.size, cipher_text)
encrypt_image.save(of)
<file_sep>/README.md
# Image-Encrypt-Lab05
This is a script intended to encrypt images in either Electronic Code Book (ECB) and Cipher Block Chaining (CBC) modes.
| c3f8fdace9a4080699ab6ea80e4ad3fc7e9cd851 | [
"Markdown",
"Python"
] | 2 | Python | jpc0016/Image-Encrypt-Lab05 | b5ffed0c3a01e74b6e6bc3b7d665d5999ac4f18c | 038fc135d96105868324d86ac8695bd3c700251b |
refs/heads/master | <repo_name>i-r-b/wcicmedia<file_sep>/inventory/views.py
from django.shortcuts import render
from django.contrib import messages
# Create your views here.
from django.contrib.auth.mixins import LoginRequiredMixin,PermissionRequiredMixin
from django.urls import reverse
from django.shortcuts import get_object_or_404
from django.views import generic
from inventory.models import Additive, Chemical, Bottle
from . import models
class CreateChemical(LoginRequiredMixin,generic.CreateView):
fields = ('name','atomic_weight','cas_number')
model = Chemical
class CreateBottle(LoginRequiredMixin,generic.CreateView):
fields = ('chemical','company','catalog_number','base_volume','lot_number','price','current_volume','recieved','expiration')
model = Bottle
def form_valid(self,form):
self.object = form.save(commit=False)
self.object.recieved_by = self.request.user
self.object.save()
return super().form_valid(form)
class CreateAdditive(LoginRequiredMixin,generic.CreateView):
fields = ('bottle','concentration','date_made','volume','filtered')
model = Additive
def form_valid(self,form):
self.object = form.save(commit=False)
self.object.made_by = self.request.user
self.object.save()
return super().form_valid(form)
class OpenBottle(LoginRequiredMixin,generic.UpdateView):
fields = ('date_opened',)
model = Bottle
template_name = 'inventory/bottle_open_form.html'
def form_valid(self,form):
self.object = form.save(commit=False)
self.object.opened_by = self.request.user
self.object.save()
return super().form_valid(form)
class ChemicalDetail(LoginRequiredMixin,generic.DetailView):
model = Chemical
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['bottle_list'] = Bottle.objects.filter(chemical=context['chemical'])
context['uniquecatnums'] = Bottle.objects.filter(chemical=context['chemical']).values("company",'catalog_number').distinct()
return context
class BottleDetail(LoginRequiredMixin,generic.DetailView):
model = Bottle
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['same_bottle_type'] = Bottle.objects.filter(catalog_number=context['bottle'].catalog_number)
context['allbottles'] = Bottle.objects.filter(chemical=context['bottle'].chemical).exclude(catalog_number=context['bottle'].catalog_number)
return context
class AdditiveDetail(LoginRequiredMixin,generic.DetailView):
model = Additive
class InventoryList(LoginRequiredMixin,generic.ListView):
model = Chemical
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['uniquecatnums'] = Bottle.objects.values("company",'catalog_number').distinct()
return context
class InventoryHome(LoginRequiredMixin,generic.TemplateView):
template_name = 'inventory/inventory_base.html'
<file_sep>/mediarecipes/apps.py
from django.apps import AppConfig
class MediarecipesConfig(AppConfig):
name = 'mediarecipes'
<file_sep>/mediarecipes/migrations/0001_initial.py
# Generated by Django 2.0.5 on 2018-11-15 22:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('inventory', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField()),
('name', models.CharField(max_length=256)),
('container', models.CharField(choices=[('a', 'Split Plate'), ('b', 'Thin Plate'), ('c', 'Thick Plate'), ('d', 'Plant Con'), ('e', 'Sundae Cup'), ('f', 'other')], max_length=100)),
('recipe_id', models.CharField(max_length=4, unique=True)),
('comments', models.TextField()),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['recipe_id'],
},
),
migrations.CreateModel(
name='Request',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('volume', models.IntegerField()),
('number_requested', models.IntegerField()),
('date_requested', models.DateTimeField()),
('date_needed', models.DateField()),
('initial_comments', models.TextField(max_length=180)),
('completed', models.BooleanField(default=False)),
('date_cancelled', models.DateTimeField(blank=True, null=True)),
('batchid', models.CharField(blank=True, max_length=150)),
('container', models.CharField(blank=True, max_length=150)),
('volume_made', models.IntegerField(blank=True, null=True)),
('number_made', models.IntegerField(blank=True, null=True)),
('total_volume_made', models.IntegerField(blank=True, null=True)),
('date_made', models.DateTimeField(blank=True, null=True)),
('final_recipe', models.TextField(blank=True, max_length=180)),
('final_comments', models.TextField(blank=True, max_length=180)),
('number_contaminated', models.IntegerField(blank=True, null=True)),
('date_contamination_found', models.DateField(blank=True, null=True)),
('number_wasted', models.IntegerField(blank=True, null=True)),
('date_wasted', models.DateField(blank=True, null=True)),
('waste_comments', models.TextField(blank=True, max_length=180)),
('cancelled_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cancelled_by_user', to=settings.AUTH_USER_MODEL)),
('made_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='made_by_user', to=settings.AUTH_USER_MODEL)),
('media_recipe', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mediarecipes.Recipe')),
('requested_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requested_by_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Step',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.IntegerField()),
],
),
migrations.CreateModel(
name='pHStep',
fields=[
('step_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='mediarecipes.Step')),
('ingredient', models.TextField(choices=[('1', 'KOH'), ('2', 'NaOH'), ('3', 'HCl')])),
('ph_to', models.DecimalField(decimal_places=2, max_digits=4)),
],
bases=('mediarecipes.step',),
),
migrations.CreateModel(
name='ReagentStep',
fields=[
('step_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='mediarecipes.Step')),
('amount', models.DecimalField(decimal_places=3, max_digits=10)),
('unit', models.TextField(choices=[('1', 'g'), ('2', 'mg'), ('3', 'ug'), ('4', 'M'), ('5', 'mM'), ('6', 'uM')])),
('ingredient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='inventory.Chemical')),
],
bases=('mediarecipes.step',),
),
migrations.CreateModel(
name='SterilizeStep',
fields=[
('step_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='mediarecipes.Step')),
('sterilize', models.TextField(choices=[('1', 'Autoclave for 25 minutes'), ('2', 'Filter Sterilize')])),
],
bases=('mediarecipes.step',),
),
migrations.AddField(
model_name='step',
name='recipe',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mediarecipes.Recipe'),
),
]
<file_sep>/mediarecipes/models.py
from django.db import models
from django.urls import reverse
from django.conf import settings
from django.forms import ModelForm
# Create your models here.
from django.contrib.auth import get_user_model
from inventory.models import Chemical, Bottle, Additive
User = get_user_model()
CONTAINER_CHOICES =(
('a','Split Plate'),
('b','Thin Plate'),
('c','Thick Plate'),
('d','Plant Con'),
('e','Sundae Cup'),
('f','other')
)
UNIT_CHOICES =(
('1','g'),
('2','mg'),
('3','ug'),
('4','M'),
('5','mM'),
('6','uM'),
)
ACID_BASE_CHOICES =(
('1','KOH'),
('2','NaOH'),
('3','HCl'),
)
REQUEST_STATUS_CHOICES =(
('1','Queued'),
('2', 'Started'),
('3', 'Complete'),
('4','Canceled'),
)
class Recipe(models.Model):
created_by = models.ForeignKey(User,on_delete=models.CASCADE)
date_created = models.DateTimeField()
name = models.CharField(max_length=256)
container = models.CharField(max_length=100,choices=CONTAINER_CHOICES)
recipe_id = models.CharField(max_length=4,unique=True)
comments = models.TextField()
def __str__(self):
return str(self.recipe_id) + "-" + str(self.name)
class Meta:
ordering = ['recipe_id']
class Request(models.Model):
media_recipe = models.ForeignKey(Recipe,on_delete=models.CASCADE)
volume = models.IntegerField()
number_requested = models.IntegerField()
requested_by = models.ForeignKey(User,related_name='requested_by_user',on_delete=models.CASCADE)
date_requested = models.DateTimeField()
date_needed = models.DateField()
initial_comments = models.TextField(max_length=180)
status = models.CharField(max_length=100,choices=REQUEST_STATUS_CHOICES)
completed = models.BooleanField(default=False)
cancelled_by = models.ForeignKey(User,related_name='cancelled_by_user',on_delete=models.CASCADE,null=True,blank=True)
date_cancelled = models.DateTimeField(null=True,blank=True)
batchid = models.CharField(max_length=150,blank=True)
container = models.CharField(max_length=150,blank=True)
volume_made = models.IntegerField(null=True,blank=True)
number_made = models.IntegerField(null=True,blank=True)
total_volume_made = models.IntegerField(null=True,blank=True)
made_by = models.ForeignKey(User,related_name='made_by_user',on_delete=models.CASCADE,null=True,blank=True)
date_made = models.DateTimeField(null=True,blank=True)
final_recipe = models.TextField(max_length=180,blank=True)
final_comments = models.TextField(max_length=180,blank=True)
number_contaminated = models.IntegerField(null=True,blank=True)
date_contamination_found = models.DateField(null=True,blank=True)
number_wasted = models.IntegerField(null=True,blank=True)
date_wasted = models.DateField(null=True,blank=True)
waste_comments = models.TextField(max_length=180,blank=True)
def __str__(self):
return 'REQ'+str(self.pk)
class FinalStep(models.Model):
request = models.ForeignKey(Request, on_delete=models.CASCADE)
number = models.IntegerField()
additive = models.ForeignKey(Additive, on_delete=models.CASCADE)
bottle = models.ForeignKey(Bottle, on_delete=models.CASCADE)
class Step(models.Model):
recipe = models.ForeignKey(Recipe,on_delete=models.CASCADE)
number = models.IntegerField()
class ReagentStep(Step):
ingredient = models.ForeignKey(Chemical,on_delete=models.CASCADE)
amount = models.DecimalField(max_digits=10,decimal_places=3)
unit = models.TextField(choices=UNIT_CHOICES)
class pHStep(Step):
ingredient = models.TextField(choices=ACID_BASE_CHOICES)
ph_to = models.DecimalField(max_digits=4, decimal_places=2)
class SterilizeStep(Step):
sterilize = models.TextField(choices=(('1','Autoclave for 25 minutes'),('2','Filter Sterilize')))
<file_sep>/inventory/models.py
from django.db import models
from django.urls import reverse
from django.conf import settings
from django.utils.text import slugify
# Create your models here.
from django.contrib.auth import get_user_model
User = get_user_model()
class Chemical(models.Model):
name = models.CharField(max_length=256)
atomic_weight = models.DecimalField(max_digits=10,decimal_places=3)
cas_number = models.CharField(max_length=25,unique=True)
slug = models.SlugField(allow_unicode=True,unique=True)
def __str__(self):
return self.name
def save(self,*args,**kwargs):
self.slug = slugify(self.name)
super().save(*args,**kwargs)
def get_absolute_url(self):
return reverse('inventory:chemicaldet',args=[self.slug])
class Meta:
ordering = ['name']
unique_together = ('name','atomic_weight')
class Bottle(models.Model):
chemical = models.ForeignKey(Chemical,on_delete=models.CASCADE)
company = models.CharField(max_length=256)
catalog_number = models.CharField(max_length=256)
base_volume = models.DecimalField(default=0.0,max_digits=10,decimal_places=3)
lot_number = models.CharField(max_length=256)
price = models.DecimalField(max_digits=10,decimal_places=2)
current_volume = models.DecimalField(default=0.0, max_digits=10,decimal_places=3)
expiration = models.DateField(null=True,blank=True)
recieved = models.DateField()
recieved_by = models.ForeignKey(User,related_name='recieved_by_user',on_delete=models.CASCADE)
date_opened = models.DateTimeField(null=True,blank=True)
opened_by = models.ForeignKey(User,related_name='opened_by_user',blank=True,null=True, on_delete=models.CASCADE)
date_discarded = models.DateTimeField(null=True,blank=True)
discarded_by = models.ForeignKey(User,related_name='discarded_by_user',blank=True, null=True,on_delete=models.CASCADE)
def __str__(self):
if self.date_opened == None:
is_opened = ['False']
else:
is_opened = str(self.date_opened).split()
return self.chemical.name + " (opened: " + is_opened[0] + ") " + str(self.id)
def get_absolute_url(self):
return reverse('inventory:inventoryhome')
class Meta:
ordering = ['chemical']
class Additive(models.Model):
bottle = models.ForeignKey(Bottle,on_delete=models.CASCADE)
concentration = models.DecimalField(max_digits=10,decimal_places=3)
date_made = models.DateTimeField()
made_by = models.ForeignKey(User,on_delete=models.CASCADE)
batchid = models.CharField(max_length=256)
filtered = models.BooleanField()
volume = models.DecimalField(default=0.0,max_digits=10,decimal_places=3)
# date_discarded = models.DateTimeField(null=True,blank=True)
# discarded_by = models.ForeignKey(User,related_name='additive_discarded_by',blank=True, null=True,on_delete=models.CASCADE)
def __str__(self):
if str(self.concentration).split(".")[1] == '000':
stringConcentration = str(self.concentration).split(".")[0]
else:
stringConcentration = str(self.concentration)
stringVersion = self.bottle.chemical.name + " (" + stringConcentration + " mg/ml)"
return stringVersion
def get_absolute_url(self):
return reverse('inventory:inventoryhome')
class Meta:
ordering = ['bottle','concentration']
<file_sep>/mediarecipes/views.py
from django.shortcuts import render
from django.urls import reverse_lazy, reverse
from django.views.generic import TemplateView,CreateView,View, ListView,DetailView
from django.http import HttpResponseRedirect,QueryDict
from django.contrib.auth.mixins import LoginRequiredMixin
from django.forms import formset_factory
from .forms import RecipeForm, StepForm, StepFormSet, pHStepFormSet, SterilizeStepFormSet, RequestForm
from .models import Recipe, Step, pHStep, SterilizeStep, Request, ReagentStep
from inventory.models import Chemical,Bottle, Additive
from itertools import chain
import datetime
# Create your views here.
class MediaHome(TemplateView):
template_name = 'mediarecipes/media_home.html'
model = Chemical
class RecipeCreateView(LoginRequiredMixin,View):
login_url = '/accounts/login/'
template_name = 'mediarecipes/recipe_form.html'
model = Recipe
def get(self,request):
recipeform = RecipeForm()
phstepformset = pHStepFormSet(prefix='ph')
sterilizestepformset = SterilizeStepFormSet(prefix='sterilize')
step_formset = StepFormSet(prefix='steps')
return render(request,'mediarecipes/recipe_form.html',{'recipeform':recipeform,
'step_formset':step_formset,
'phstepformset':phstepformset,
'sterilizestepformset':sterilizestepformset,
})
def post(self,request):
recipeform = RecipeForm(request.POST)
step_formset = StepFormSet(request.POST,request.FILES,prefix='steps')
phstepformset = pHStepFormSet(request.POST,request.FILES,prefix='ph')
sterilizestepformset = SterilizeStepFormSet(request.POST,request.FILES,prefix='sterilize')
if recipeform.is_valid() and step_formset.is_valid() and phstepformset.is_valid() and sterilizestepformset.is_valid():
new_recipe = Recipe()
new_recipe.name = recipeform.cleaned_data['name']
new_recipe.container = recipeform.cleaned_data['container']
new_recipe.recipe_id = recipeform.cleaned_data['recipe_id']
new_recipe.comments = recipeform.cleaned_data['comments']
new_recipe.created_by = request.user
new_recipe.date_created = datetime.datetime.now()
new_recipe.save()
for form in step_formset:
new_step = ReagentStep()
new_step.recipe = new_recipe
new_step.number = form.cleaned_data['number']
new_step.ingredient = form.cleaned_data['ingredient']
new_step.amount = form.cleaned_data['amount']
new_step.unit = form.cleaned_data['unit']
new_step.save()
for phform in phstepformset:
new_ph_step = pHStep()
new_ph_step.recipe = new_recipe
new_ph_step.number = phform.cleaned_data['number']
new_ph_step.ingredient = phform.cleaned_data['ingredient']
new_ph_step.ph_to = phform.cleaned_data['ph_to']
new_ph_step.save()
for steriform in sterilizestepformset:
new_steriliize_step = SterilizeStep()
new_steriliize_step.recipe = new_recipe
new_steriliize_step.number = steriform.cleaned_data['number']
new_steriliize_step.sterilize = steriform.cleaned_data['sterilize']
new_steriliize_step.save()
return HttpResponseRedirect('/mediarecipes/allrecipes/')
else:
return render(request,'mediarecipes/recipe_form.html',{'recipeform':recipeform, 'step_formset':step_formset,})
class MediaRecipeListView(ListView):
model = Recipe
class QueueListView(ListView):
template_name = 'mediarecipes/queue.html'
model = Request
class RecipeDetailView(DetailView):
model = Recipe
def get_context_data(self,**kwargs):
context = super().get_context_data(**kwargs)
context['orderedsteps'] = Step.objects.filter(recipe=context['object']).order_by('number')
return context
class RequestDetailView(DetailView):
model = Request
def get_context_data(self,**kwargs):
context = super().get_context_data(**kwargs)
context['orderedsteps'] = Step.objects.filter(recipe=context['object'].media_recipe).order_by('number')
mybottlelist = Bottle.objects.exclude(opened_by__isnull=True)
myadditivelist = Additive.objects.all()
# myfilteredlist = chain(mybottlelist, myadditivelist)
# print(myfilteredlist)
context['mybottlelist'] = mybottlelist
context['myadditivelist'] = myadditivelist
return context
class RequestFormView(View):
model = Request
success_url = '/mediarecipes/queue/'
def get(self,request):
requestform = RequestForm()
return render(request,'mediarecipes/request_form.html',{'requestform':requestform,})
def post(self,request):
requestform = RequestForm(request.POST)
if requestform.is_valid():
new_request = Request()
new_request.media_recipe = requestform.cleaned_data['media_recipe']
new_request.volume = requestform.cleaned_data['volume']
new_request.number_requested = requestform.cleaned_data['number_requested']
new_request.requested_by = request.user
new_request.date_requested = datetime.datetime.now()
new_request.date_needed = requestform.cleaned_data['date_needed']
new_request.initial_comments = requestform.cleaned_data['initial_comments']
new_request.save()
return HttpResponseRedirect('/mediarecipes/queue/')
else:
return render(request,'mediarecipes/request_form.html',{'requestform':requestform,})
<file_sep>/accounts/forms.py
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm
class UserCreateForm(UserCreationForm):
class Meta:
fields = ('email','first_name','last_name','<PASSWORD>','<PASSWORD>',)
model = get_user_model()
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
def save(self, commit=True):
user = super(UserCreateForm, self).save(commit=False)
user.username = self.cleaned_data["email"]
if commit:
user.save()
return user
<file_sep>/testingifle.py
dict1 = "{'step1':('Timentin',1),'step2':('Carb',2)}"
dict2 = "{'step2':('Carb',2),'step1':('Timentin',1)}"
same = (dict1 == dict2)
print(same)
<file_sep>/mediarecipes/templates/mediarecipes/media_home.html
{% extends "mediarecipes/mediarecipes_base.html" %}
{% block content %}
<h1>We made it!</h1>
{% endblock %}
<file_sep>/mediarecipes/migrations/0002_request_status.py
# Generated by Django 2.0.5 on 2019-01-25 23:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mediarecipes', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='request',
name='status',
field=models.CharField(choices=[('1', 'Queued'), ('2', 'Started'), ('3', 'Complete'), ('4', 'Canceled')], default='1', max_length=100),
preserve_default=False,
),
]
<file_sep>/mediarecipes/admin.py
from django.contrib import admin
from .models import Recipe, Request, Step, pHStep, SterilizeStep, ReagentStep
# Register your models here.
class ReagentStepInLine(admin.TabularInline):
model = ReagentStep
class pHStepInLine(admin.TabularInline):
model = pHStep
class SterilizeStepInLine(admin.TabularInline):
model = SterilizeStep
class RecipeAdmin(admin.ModelAdmin):
inlines = [
ReagentStepInLine,
pHStepInLine,
SterilizeStepInLine,
]
admin.site.register(Recipe,RecipeAdmin)
admin.site.register(Request)
admin.site.register(Step)
admin.site.register(pHStep)
admin.site.register(SterilizeStep)
admin.site.register(ReagentStep)
<file_sep>/inventory/migrations/0001_initial.py
# Generated by Django 2.0.5 on 2018-11-15 22:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Additive',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('concentration', models.DecimalField(decimal_places=3, max_digits=10)),
('date_made', models.DateTimeField()),
('batchid', models.CharField(max_length=256)),
('filtered', models.BooleanField()),
('volume', models.DecimalField(decimal_places=3, default=0.0, max_digits=10)),
],
options={
'ordering': ['bottle', 'concentration'],
},
),
migrations.CreateModel(
name='Bottle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company', models.CharField(max_length=256)),
('catalog_number', models.CharField(max_length=256)),
('base_volume', models.DecimalField(decimal_places=3, default=0.0, max_digits=10)),
('lot_number', models.CharField(max_length=256)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
('current_volume', models.DecimalField(decimal_places=3, default=0.0, max_digits=10)),
('expiration', models.DateField(blank=True, null=True)),
('recieved', models.DateField()),
('date_opened', models.DateTimeField(blank=True, null=True)),
('date_discarded', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ['chemical'],
},
),
migrations.CreateModel(
name='Chemical',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('atomic_weight', models.DecimalField(decimal_places=3, max_digits=10)),
('cas_number', models.CharField(max_length=25, unique=True)),
('slug', models.SlugField(allow_unicode=True, unique=True)),
],
options={
'ordering': ['name'],
},
),
migrations.AlterUniqueTogether(
name='chemical',
unique_together={('name', 'atomic_weight')},
),
migrations.AddField(
model_name='bottle',
name='chemical',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='inventory.Chemical'),
),
migrations.AddField(
model_name='bottle',
name='discarded_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='discarded_by_user', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='bottle',
name='opened_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='opened_by_user', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='bottle',
name='recieved_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recieved_by_user', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='additive',
name='bottle',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='inventory.Bottle'),
),
migrations.AddField(
model_name='additive',
name='made_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
<file_sep>/mediarecipes/templates/mediarecipes/queue.html
{% extends "mediarecipes/mediarecipes_base.html" %}
{% block content %}
<h1>Request Queue</h1>
<table class='table table-hover'>
<thead class='thead-light'>
<tr>
<th>REQID</th>
<th>Recipe</th>
<th>Volume</th>
<th>Number</th>
<th>Requested By</th>
<th>Needed By</th>
</tr>
</thead>
<tbody>
{% for request in object_list %}
<tr>
<td><a href="{% url 'mediarecipes:requestdet' pk=request.pk %}">REQ{{request.pk}}</a></td>
<td>{{request.media_recipe}}</td>
<td>{{request.volume}}</td>
<td>{{request.number_requested}}</td>
<td>{{request.requested_by}}</td>
<td>{{request.date_needed}}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}
<file_sep>/mediarecipes/templates/mediarecipes/recipe_list.html
{% extends "mediarecipes/mediarecipes_base.html" %}
{% block content %}
<h1>Media Recipes</h1>
<table class='table table-hover'>
<thead class='thead-light'>
<tr>
<th>ID</th>
<th>Title</th>
<th>Description</th>
<th>Recipe Details</th>
</tr>
</thead>
<tbody>
{% for recipe in object_list %}
<tr><a href="{% url 'mediarecipes:mediarecipeshome' %}"></a>
<td>{{recipe.recipe_id}}</td>
<td>{{recipe.name}}</td>
<td>{{recipe.comments}}</td>
<td><a href="{% url 'mediarecipes:recipedet' pk=recipe.pk %}">Details</a></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}
<file_sep>/inventory/admin.py
from django.contrib import admin
from .models import Additive,Bottle,Chemical
# Register your models here.
# class BottlesInLine(admin.TabularInline):
# model = Bottle
#
# class ChemicalAdmin(admin.ModelAdmin):
# inlines = [
# BottlesInLine,
# ]
admin.site.register(Additive)
admin.site.register(Bottle)
admin.site.register(Chemical)
<file_sep>/mediarecipes/urls.py
from django.urls import path
from . import views
app_name = 'mediarecipes'
urlpatterns = [
path('',views.MediaHome.as_view(),name='mediarecipeshome'),
path('createnewrecipe/',views.RecipeCreateView.as_view(),name='newrecipe'),
path('allrecipes/',views.MediaRecipeListView.as_view(),name='recipelist'),
path('allrecipes/<pk>',views.RecipeDetailView.as_view(),name='recipedet'),
path('reciperequest/',views.RequestFormView.as_view(),name='request'),
path('queue/',views.QueueListView.as_view(),name='queue'),
path('queue/request/<pk>',views.RequestDetailView.as_view(),name='requestdet'),
]
<file_sep>/inventory/urls.py
from django.urls import path
from . import views
app_name = 'inventory'
urlpatterns = [
path('',views.InventoryList.as_view(),name='inventoryhome'),
path('newchemical/',views.CreateChemical.as_view(),name='createchem'),
path('newbottle/',views.CreateBottle.as_view(),name='createbot'),
path('newadditive/',views.CreateAdditive.as_view(),name='createadt'),
path('chemical/<slug>',views.ChemicalDetail.as_view(),name='chemicaldet'),
path('bottle/<int:pk>',views.BottleDetail.as_view(),name='bottledet'),
path('additive/<int:pk>',views.AdditiveDetail.as_view(),name='additivedet'),
path('bottle/<int:pk>/open',views.OpenBottle.as_view(),name='openbottle'),
]
<file_sep>/mediarecipes/forms.py
from django import forms
from .models import Recipe, Request, Step, pHStep, SterilizeStep, ReagentStep
from django.contrib.auth import get_user_model
from django.forms import formset_factory
from django.forms.models import ModelForm
import datetime
User = get_user_model()
class RecipeForm(forms.Form):
CONTAINER_CHOICES =(
('a','Split Plate'),
('b','Thin Plate'),
('c','Thick Plate'),
('d','Plant Con'),
('e','Sundae Cup'),
('f','other')
)
name = forms.CharField(label='Name',max_length=100)
container = forms.CharField(
label='Container',
widget=forms.Select(choices=CONTAINER_CHOICES,attrs={'class':'manuallyadded'})
)
recipe_id = forms.CharField(label='Recipe ID',max_length=4)
comments = forms.CharField(label='Comments',max_length=None)
class StepForm(ModelForm):
class Meta:
model = ReagentStep
exclude = ['recipe']
class phStepForm(ModelForm):
ACID_BASE_CHOICES =(
('1','KOH'),
('2','NaOH'),
('3','HCl'),
)
class Meta:
model = pHStep
exclude = ['recipe']
class SterilizeStepForm(ModelForm):
class Meta:
model = SterilizeStep
exclude = ['recipe']
class RequestForm(ModelForm):
class Meta:
model = Request
fields = ['media_recipe','volume','number_requested','date_needed','initial_comments']
StepFormSet = formset_factory(StepForm)
pHStepFormSet = formset_factory(phStepForm)
SterilizeStepFormSet = formset_factory(SterilizeStepForm)
<file_sep>/static/js/recipe_page.js
beautifyRows();
setGlobalNames();
function beautifyRows() {
var recipeTable = document.getElementById('recipe_table');
var rows = recipeTable.getElementsByTagName('tr');
var numberOfRowsPlusOne = rows.length;
for(var i=1; i<numberOfRowsPlusOne; i++){
var inputCells = rows[i].getElementsByTagName('input');
for(var j=0; j<inputCells.length; j++){
inputCells[j].className ='form-control';
inputCells[j].required ='True';
};
var selectCells = rows[i].getElementsByTagName('select');
for(var k=0; k<selectCells.length; k++){
selectCells[k].className ='custom-select';
selectCells[k].required ='True';
};
};
};
function setGlobalNames() {
var recipeTable = document.getElementById('recipe_table');
var bodyOfTable = recipeTable.getElementsByTagName('tbody')[0];
var ogsteprow = document.getElementById('steprow');
var ogphsteprow = document.getElementById('phstep');
var ogsteristep = document.getElementById('steristep');
phrow = ogphsteprow.cloneNode(true);
steprow = ogsteprow.cloneNode(true);
sterilizerow = ogsteristep.cloneNode(true);
ogsteprow.setAttribute('id','ogsteprow');
bodyOfTable.removeChild(ogphsteprow);
bodyOfTable.removeChild(ogsteristep);
document.getElementById('id_ph-TOTAL_FORMS').setAttribute('value',0);
document.getElementById('id_sterilize-TOTAL_FORMS').setAttribute('value',0);
};
function addRow(rowtype) {
var recipeTable = document.getElementById('recipe_table');
var bodyOfTable = recipeTable.getElementsByTagName('tbody')[0];
bodyOfTable.append(rowtype);
if (rowtype.id=='steprow'){
renameAndRecopyStepRow();
} else if (rowtype.id == 'phstep') {
document.getElementById('pHbutton').setAttribute('hidden',true);
document.getElementById('id_ph-TOTAL_FORMS').setAttribute('value',1);
} else if (rowtype.id == 'steristep') {
document.getElementById('sterilizeButton').setAttribute('hidden',true);
document.getElementById('id_sterilize-TOTAL_FORMS').setAttribute('value',1);
}
};
function renameAndRecopyStepRow() {
var ogsteprow = document.getElementById('steprow')
steprow = ogsteprow.cloneNode(true);
ogsteprow.setAttribute('id','ogsteprow');
var totalogs = tellMeNumberOfOgStepRows();
var typeList = ['-number','-ingredient','-amount','-unit'];
var newid = 'id_steps-'+(totalogs-1);
var newName = 'steps-'+(totalogs-1);
var children = ogsteprow.children;
for (var i=0; i<children.length; i++){
children[i].setAttribute('id','td_'+newid+typeList[i]);
children[i].children[0].setAttribute('id',newid+typeList[i]);
children[i].children[0].setAttribute('name',newName+typeList[i]);
}
document.getElementById('id_steps-TOTAL_FORMS').setAttribute('value',totalogs);
}
function tellMeNumberOfOgStepRows() {
var numberOfRows = document.getElementsByTagName('tr');
var numberOfStepRows = 0
for(var i=0; i<numberOfRows.length; i++){
if(numberOfRows[i].id == 'ogsteprow'){
numberOfStepRows++
}
}
return(numberOfStepRows);
}
function finalizingAlert(){
alert('You are finalizing this project!')
};
function editAlert() {
alert('You are about to re-edit this project!')
};
// BUTTON script
$('.zip-button').on('click', function() {
if ($('.contact-button-wrapper').hasClass('selected') == false) {
$('.contact-button-wrapper').addClass('selected');
}
});
$('.epk-button-half').on('click', function() {
if ($('.contact-button-wrapper').hasClass('selected') == true) {
$('.contact-button-wrapper').removeClass('selected');
}
});
| 2127d5dcf5279898a351d0ff2acef14a25cf1fdf | [
"JavaScript",
"Python",
"HTML"
] | 19 | Python | i-r-b/wcicmedia | 1b914b3df83ba01361bf0d36daef674d77b7e29e | 77436ab38afaf8366dcbefe2534d68dba0d75448 |
refs/heads/master | <file_sep>package com.jaspersoft.jasperserver.jrsh.common;
import com.jaspersoft.jasperserver.jaxrs.client.core.AuthenticationCredentials;
import com.jaspersoft.jasperserver.jaxrs.client.core.JasperserverRestClient;
import com.jaspersoft.jasperserver.jaxrs.client.core.RestClientConfiguration;
import com.jaspersoft.jasperserver.jaxrs.client.core.Session;
import com.jaspersoft.jasperserver.jaxrs.client.core.SessionStorage;
import com.jaspersoft.jasperserver.jrsh.common.exception.NoActiveSessionAvailableException;
import static com.jaspersoft.jasperserver.jrsh.common.SessionFactory.updateSharedSession;
/**
* @author <NAME>
*/
public abstract class SessionUtil {
public static void reopenSession() {
Session session = SessionFactory.getSharedSession();
if (session == null)
throw new NoActiveSessionAvailableException();
SessionStorage storage = session.getStorage();
AuthenticationCredentials credentials = storage.getCredentials();
RestClientConfiguration cfg = storage.getConfiguration();
session = new JasperserverRestClient(cfg)
.authenticate(
credentials.getUsername(),
credentials.getPassword());
updateSharedSession(session);
}
}
<file_sep>package com.jaspersoft.jasperserver.jrsh;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
@RunWith(Suite.class)
@SuiteClasses({})
public class IntegrationTestSuit {
}
<file_sep>class Jrsh < Formula
desc "A Command Line Interface for JasperReports Server"
homepage "https://github.com/Jaspersoft/jrsh"
url "https://github.com/Jaspersoft/jrsh/releases/download/v2.0.6/jrsh-2.0.6.zip"
sha256 "4132106cbaf121d43cd0bc53d0cc72be604500cf8f7829f963211b094f217f56"
def install
libexec.install "jrsh.jar"
bin.write_jar_script libexec/"jrsh.jar", "jrsh"
end
test do
system "#{bin}/jrsh"
end
end<file_sep><?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.jaspersoft</groupId>
<artifactId>jrsh</artifactId>
<version>2.0.7</version>
<properties>
<!-- default
unit test
suite -->
<runSuite>**/UnitTestSuite.class</runSuite>
</properties>
<repositories>
<!-- binary
repository -->
<repository>
<id>jrs-ce-releases</id>
<name>JasperReports Server CE releases repository</name>
<url>http://jaspersoft.artifactoryonline.com/jaspersoft/jaspersoft-clients-releases/</url>
</repository>
<repository>
<id>jaspersoft</id>
<name>jaspersoft-releases</name>
<url>http://jaspersoft.artifactoryonline.com/jaspersoft/simple/jrs-ce-releases/</url>
</repository>
</repositories>
<dependencies>
<!-- rest
client -->
<dependency>
<groupId>com.jaspersoft</groupId>
<artifactId>jrs-rest-java-client</artifactId>
<version>6.0.3-ALPHA</version>
<exclusions>
<exclusion>
<artifactId>jasperserver-dto</artifactId>
<groupId>com.jaspersoft.jasperserver</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.jaspersoft.jasperserver</groupId>
<artifactId>jasperserver-dto</artifactId>
<version>6.0.1</version>
</dependency>
<!-- guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>18.0</version>
</dependency>
<!-- jline -->
<dependency>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>2.12.1</version>
</dependency>
<!-- lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.16.4</version>
<scope>provided</scope>
</dependency>
<!-- test -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.10.19</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>1.6.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito</artifactId>
<version>1.6.2</version>
<scope>test</scope>
</dependency>
<!-- log -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- graph -->
<dependency>
<groupId>org.jgrapht</groupId>
<artifactId>jgrapht-core</artifactId>
<version>0.9.1</version>
</dependency>
<!-- reflection -->
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<version>0.9.10</version>
</dependency>
<!-- collections -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
<!-- yaml -->
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.15</version>
</dependency>
<!-- joda -->
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.8.1</version>
</dependency>
<!-- zip
util -->
<!--<dependency>-->
<!--<groupId>org.zeroturnaround</groupId>-->
<!--<artifactId>zt-zip</artifactId>-->
<!--<version>1.8</version>-->
<!--</dependency>-->
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass>com.jaspersoft.jasperserver.jrsh.runner.App</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<includes>
<include>${runSuite}</include>
</includes>
</configuration>
</plugin>
</plugins>
</build>
</project>
<file_sep>/*
* Copyright (C) 2005 - 2015 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com.
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.jaspersoft.jasperserver.jrsh.completion.completer;
import com.jaspersoft.jasperserver.dto.resources.ClientResourceListWrapper;
import com.jaspersoft.jasperserver.dto.resources.ClientResourceLookup;
import com.jaspersoft.jasperserver.jaxrs.client.core.exceptions.AuthenticationFailedException;
import com.jaspersoft.jasperserver.jaxrs.client.core.exceptions.IllegalParameterValueException;
import com.jaspersoft.jasperserver.jaxrs.client.core.exceptions.ResourceNotFoundException;
import com.jaspersoft.jasperserver.jrsh.common.SessionFactory;
import com.jaspersoft.jasperserver.jrsh.common.SessionUtil;
import jline.console.completer.Completer;
import lombok.extern.log4j.Log4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.jaspersoft.jasperserver.jaxrs.client.apiadapters.resources.ResourceSearchParameter.FOLDER_URI;
import static com.jaspersoft.jasperserver.jaxrs.client.apiadapters.resources.ResourceSearchParameter.RECURSIVE;
import static org.apache.commons.lang3.StringUtils.startsWith;
@Log4j
public class RepositoryNameCompleter implements Completer {
private static List<CharSequence> bufCandidates = new ArrayList<>();
private static int uniqueId = 0;
@Override
public int complete(String buffer, int cursor, List<CharSequence> candidates) {
if (buffer != null && cursor < buffer.length()) {
candidates.add("");
return buffer.length();
}
if (uniqueId == 0) {
uniqueId = hashCode();
}
if (buffer == null) {
candidates.add("/");
return 0;
} else {
if (uniqueId == hashCode()) {
if (buffer.isEmpty()) {
return 0;
}
List<Pair<String, Boolean>> resources;
List<String> filteredResources;
try {
if (isResourceExist(buffer)) {
resources = download(buffer);
if (!resources.isEmpty() && !buffer.equals("/")) {
return buffer.length() + 1;
}
fillResources(candidates, resources);
} else {
String root = getPreviousPath(buffer);
if (isResourceExist(root)) {
resources = download(root);
List<Pair<String, Boolean>> temp = new ArrayList<>();
for (Pair<String, Boolean> pair : resources) {
String resource = pair.getKey();
Boolean isFolder = pair.getRight();
if (startsWith(resource, buffer)) {
ImmutablePair<String, Boolean> newPair = new ImmutablePair<>(resource, isFolder);
temp.add(newPair);
}
}
fillResources(candidates, temp);
} else {
String lastInput = getLastInput(buffer);
if ("".equals(lastInput)) {
List<Pair<String, Boolean>> temp = new ArrayList<>();
ImmutablePair<String, Boolean> newPair = new ImmutablePair<>("", false);
temp.add(newPair);
fillResources(candidates, temp);
return buffer.length();
}
}
}
} catch (AuthenticationFailedException e3) {
SessionUtil.reopenSession();
complete(buffer, cursor, candidates);
}
if (candidates.size() == 1) {
return buffer.lastIndexOf("/") + 1;
}
if (candidates.size() > 1) {
String lastInput = getLastInput(buffer);
if (compareCandidatesWithLastInput(lastInput, candidates)) {
return buffer.length() - lastInput.length();
}
}
return buffer.length();
} else {
candidates.addAll(bufCandidates);
if (candidates.size() > 0) {
String lastInput = getLastInput(buffer);
if (compareCandidatesWithLastInput(lastInput, candidates)) {
return buffer.length() - lastInput.length();
}
}
return buffer.length();
}
}
}
// ---------------------------------------------------------------------
// Helper methods
// ---------------------------------------------------------------------
private void fillResources(List<CharSequence> candidates, List<Pair<String, Boolean>> resources) {
log.debug("size=" + resources.size());
List<String> filteredResources;
filteredResources = reformatResources(resources);
if (resources.size() != 0) {
candidates.addAll(filteredResources);
bufCandidates.clear();
bufCandidates.addAll(filteredResources);
}
}
String getLastInput(String buffer) {
int idx = buffer.lastIndexOf("/");
if (idx == -1) {
return "";
}
String s = buffer.substring(idx, buffer.length());
if (s.equals("/")) s = "";
if (s.startsWith("/") && s.length() > 1) s = s.substring(1, s.length());
return s;
}
private boolean compareCandidatesWithLastInput(String last, List<CharSequence> candidates) {
for (CharSequence candidate : candidates) {
if (!candidate.toString().startsWith(last)) {
return false;
}
}
return true;
}
private List<String> reformatResources(List<Pair<String, Boolean>> resources) {
List<String> list = new ArrayList<>();
for (Pair<String, Boolean> pair : resources) {
String resource = pair.getLeft();
Boolean isFolder = pair.getRight();
String last;
if (isFolder) {
last = lastName(resource) + "/";
} else {
last = lastName(resource);
}
list.add(last);
}
return list;
}
private String getPreviousPath(String path) {
int idx = StringUtils.lastIndexOf(path, "/");
return idx > 0 ? path.substring(0, idx) : path.substring(0, idx + 1);
}
private String lastName(String path) {
return new File(path).getName();
}
public List<Pair<String, Boolean>> download(String path) {
List<Pair<String, Boolean>> list = new ArrayList<>();
if (!isResourceExist(path)) {
return list;
}
List<ClientResourceLookup> lookups;
ClientResourceListWrapper entity = SessionFactory.getSharedSession()
.resourcesService()
.resources()
.parameter(FOLDER_URI, path)
.parameter(RECURSIVE, "false")
.search()
.getEntity();
if (entity != null) {
lookups = entity.getResourceLookups();
} else {
// empty folder
lookups = Collections.emptyList();
}
for (ClientResourceLookup lookup : lookups) {
String uri = lookup.getUri();
String type = lookup.getResourceType();
if ("folder".equals(type)) {
list.add(new ImmutablePair<String, Boolean>(uri, true));
} else {
list.add(new ImmutablePair<String, Boolean>(uri, false));
}
}
return list;
}
public boolean isResourceExist(String path) {
try {
SessionFactory.getSharedSession()
.resourcesService()
.resources()
.parameter(FOLDER_URI, path)
.parameter(RECURSIVE, "false")
.search();
} catch (ResourceNotFoundException e2) {
log.debug("resource not found");
return false;
} catch (NullPointerException e) {
log.debug("NPE, returning empty list");
return false;
} catch (IllegalParameterValueException e) {
// issue #164 fix
return false;
}
return true;
}
}
<file_sep>package com.jaspersoft.jasperserver.jrsh.evaluation.strategy.impl;
import com.jaspersoft.jasperserver.jaxrs.client.core.Session;
import com.jaspersoft.jasperserver.jrsh.common.SessionFactory;
import com.jaspersoft.jasperserver.jrsh.operation.impl.ExportOperation;
import com.jaspersoft.jasperserver.jrsh.operation.impl.LoginOperation;
import com.jaspersoft.jasperserver.jrsh.operation.parser.OperationParser;
import com.jaspersoft.jasperserver.jrsh.operation.result.OperationResult;
import com.jaspersoft.jasperserver.jrsh.operation.result.ResultCode;
import jline.console.ConsoleReader;
import jline.console.UserInterruptException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* @author <NAME>
*/
public class ShellEvaluationStrategyTest {
@Mock
private OperationParser operationParserMock;
@Mock
private LoginOperation loginOperationMock;
@Mock
private ExportOperation exportOperationMock;
@Mock
private OperationResult loginOperationResultMock;
@Mock
private OperationResult failedLoginOperationResultMock;
@Mock
private OperationResult exportOperationResultMock;
@Mock
private Session sessionMock;
@Mock
private ConsoleReader consoleReaderMock;
@Spy
private ShellEvaluationStrategy strategySpy = new ShellEvaluationStrategy();
@Before
public void before() {
MockitoAnnotations.initMocks(this);
strategySpy.setParser(operationParserMock);
strategySpy.setConsole(consoleReaderMock);
SessionFactory.updateSharedSession(sessionMock);
}
@Test
public void shouldExecuteTwoOperationsInShellModeAndInterruptItDueToTheExitKeyBeenPressed() throws Exception {
// Given
List<String> script = Collections.singletonList("login superuser%superuser@localhost:8080/jrs-test");
Mockito.doReturn("export all")
.doThrow(new UserInterruptException("Let's pretend that we've pressed `Ctrl+C` key"))
.when(consoleReaderMock)
.readLine();
Mockito.doReturn(loginOperationMock).when(operationParserMock).parseOperation("login superuser%superuser@localhost:8080/jrs-test");
Mockito.doReturn(exportOperationMock).when(operationParserMock).parseOperation("export all");
Mockito.doReturn(loginOperationResultMock).when(loginOperationMock).execute(sessionMock);
Mockito.doReturn(exportOperationResultMock).when(exportOperationMock).execute(sessionMock);
Mockito.doReturn("Message1").when(loginOperationResultMock).getResultMessage();
Mockito.doReturn("Message2").when(exportOperationResultMock).getResultMessage();
Mockito.doReturn(ResultCode.SUCCESS).when(loginOperationResultMock).getResultCode();
Mockito.doReturn(ResultCode.SUCCESS).when(exportOperationResultMock).getResultCode();
Mockito.doNothing().when(strategySpy).print("Message1");
Mockito.doNothing().when(strategySpy).print("Message2");
// When
OperationResult result = strategySpy.eval(script);
// Then
Assert.assertEquals(result.getResultCode(), ResultCode.INTERRUPTED);
Mockito.verify(loginOperationMock, Mockito.times(1)).execute(sessionMock);
Mockito.verify(exportOperationMock, Mockito.times(1)).execute(sessionMock);
Mockito.verify(loginOperationResultMock, Mockito.times(1)).getResultCode();
Mockito.verify(loginOperationResultMock, Mockito.times(1)).getResultMessage();
Mockito.verify(exportOperationResultMock, Mockito.times(1)).getResultCode();
Mockito.verify(exportOperationResultMock, Mockito.times(1)).getResultMessage();
Mockito.verify(strategySpy, Mockito.times(1)).print("Message1");
Mockito.verify(strategySpy, Mockito.times(1)).print("Message2");
Mockito.verify(strategySpy, Mockito.times(1)).logout();
Mockito.verifyNoMoreInteractions(loginOperationMock);
Mockito.verifyNoMoreInteractions(exportOperationMock);
}
@Test
public void shouldExitShellModeIfLoginFailed() throws IOException {
// Given
List<String> script = Collections.singletonList("login wrong%credentials@localhost:8080/jrs-test");
Mockito.doReturn(loginOperationMock).when(operationParserMock).parseOperation("login wrong%credentials@localhost:8080/jrs-test");
Mockito.doReturn(failedLoginOperationResultMock).when(loginOperationMock).execute(sessionMock);
Mockito.doReturn(ResultCode.FAILED).when(failedLoginOperationResultMock).getResultCode();
Mockito.doReturn("Failed").when(failedLoginOperationResultMock).getResultMessage();
Mockito.doNothing().when(strategySpy).print("Failed");
// When
OperationResult result = strategySpy.eval(script);
// Then
Assert.assertEquals(result.getResultCode(), ResultCode.FAILED);
Assert.assertEquals(result.getResultMessage(), "Failed");
Assert.assertEquals(result.getContext(), loginOperationMock);
Mockito.verify(loginOperationMock, Mockito.times(1)).execute(sessionMock);
Mockito.verify(operationParserMock, Mockito.times(1)).parseOperation("login wrong%credentials@localhost:8080/jrs-test");
Mockito.verify(failedLoginOperationResultMock, Mockito.times(2)).getResultMessage();
Mockito.verify(strategySpy, Mockito.times(1)).print("Failed");
}
@After
public void after() {
Mockito.reset(
operationParserMock,
loginOperationMock,
sessionMock,
loginOperationResultMock,
exportOperationResultMock,
consoleReaderMock,
exportOperationMock,
failedLoginOperationResultMock
);
}
} | e09e6f27109df7da7a4b7a007e09e2630b217a1a | [
"Java",
"Ruby",
"Maven POM"
] | 6 | Java | Jaspersoft/jrsh | b56314e65b7d5cfd4c479aec22aa9a425a1aeea6 | 4611dbb0e1962dd93329e43aa75b8184ad50e5e3 |
refs/heads/master | <file_sep>const nock = require('nock');
const currency = require('./');
beforeEach(() => {
nock('https://api.exchangeratesapi.io')
.get('/latest?base=USD')
.reply(200, {
'base': 'USD',
'rates': {
'EUR': 0.899
}
});
nock('https://api.exchangeratesapi.io')
.get('/latest?base=EUR')
.reply(200, {
'base': 'EUR',
'rates': {
'USD': 1.1122
}
});
nock('https://blockchain.info')
.get('/ticker')
.reply(200, {
'USD': {
'15m': 8944.49,
'last': 8944.49,
'buy': 8944.49,
'sell': 8944.49,
'symbol': '$'
},
'EUR': {
'15m': 8048.11,
'last': 8048.11,
'buy': 8048.11,
'sell': 8048.11,
'symbol': '€'
}
});
});
describe('currency', () => {
test('should convert 1 USD to EUR', async () => {
const data = await currency({amount:1,from:'USD',to:'EUR'});
expect(data).toBe(0.899);
});
test('should convert 1 USD to USD', async () => {
const data = await currency({amount:1,from:'USD',to:'USD'});
expect(data).toBe(1);
});
test('should convert 1 EUR to USD', async () => {
const data = await currency({amount:1,from:'EUR',to:'USD'});
expect(data).toBe(1.1122);
});
test('should convert 1 BTC to USD', async () => {
const data = await currency({amount:1,from:'BTC',to:'USD'});
expect(data).toBe(8944.49);
});
test('should convert 1 BTC to EUR', async () => {
const data = await currency({amount:1,from:'BTC',to:'EUR'});
expect(data).toBe(8048.11);
});
test('should convert (with default values) without arguments', async () => {
const data = await currency({});
expect(data).toBe(1/8944.49);
});
test('should convert with amount only as argument', async () => {
const data = await currency({amount:100});
expect(data).toBe(100/8944.49);
});
test('should convert with amount and (from) currency only as arguments', async () => {
const data = await currency({amount:100,from:'EUR'});
expect(data).toBe(100/8048.11);
});
test('should return errors message for unknown `from` or `to` currency value', async () => {
expect.assertions(1);
try {
await currency({amount:100,from:'EUR', to:'GTO'});
} catch (e) {
expect(e).toStrictEqual(Error('💵 Please specify a valid `from` and/or `to` currency value!'));
}
});
});
<file_sep>const axios = require('axios');
const money = require('money');
// conversion rate from the base currency to most used currencies
const RATES_URL = 'https://api.exchangeratesapi.io/latest';
// actual value of 1 Bitcoin in all the most used currencies in the promises array
const BLOCKCHAIN_URL = 'https://blockchain.info/ticker';
const CURRENCY_BITCOIN = 'BTC';
/**
* check if one of the currency is Bitcoin (BTC)
* @param {String} from - a currency (ex : EUR)
* @param {String} to - a currency (ex : USD)
*/
const isAnyBTC = (from, to) => [from, to].includes(CURRENCY_BITCOIN);
module.exports = async opts => {
const {amount = 1, from = 'USD', to = CURRENCY_BITCOIN} = opts;
const promises = [];
let base = from;
const anyBTC = isAnyBTC(from, to);
/**
* if one of the currency is Bitcoin,
* check if Bitcoin is the "from" currency
* if not, base = from
* otherwise, base = to
*/
if (anyBTC) {
base = from === CURRENCY_BITCOIN ? to : from;
//add a GET request from BLOCKCHAIN_URL to the promises array
promises.push(axios(BLOCKCHAIN_URL));
}
// add at the beginning of the promises array a GET request to RATES_URL with the correct base
promises.unshift(axios(`${RATES_URL}?base=${base}`));
try {
// start all the promises of the array
const responses = await Promise.all(promises);
const [rates] = responses;
// set the params of money module (values of RATES_URL)
money.base = rates.data.base;
money.rates = rates.data.rates;
const conversionOpts = {
from,
to
};
/**
* if one of the currencies is Bitcoin
* checks if the response has a property with the base currency
* returns the data of BLOCKCHAIN_URL in blockchain
*/
if (anyBTC) {
const blockchain = responses.find(response =>
response.data.hasOwnProperty(base)
);
//copy the data of 'BTC' (the last rate of the Bitcoin) in money.rates (add a key/value 'BTC' with the actual rate to the rates array)
Object.assign(money.rates, {
'BTC': blockchain.data[base].last
});
}
/**
* if one of the currencies is Bitcoin
* reverse the values of from and to since all the values in money.rates are from Bitcoin and not to Bitcoin
*/
if (anyBTC) {
Object.assign(conversionOpts, {
'from': to,
'to': from
});
}
return money.convert(amount, conversionOpts);
} catch (error) {
throw new Error (
'💵 Please specify a valid `from` and/or `to` currency value!'
);
}
};
<file_sep># <center> Currency Library Guide </center>
## What is it ?
*Currency* is a library to convert amount from a chosen currency to another one.
To do so, we use the library *money* that makes the conversion depending on the given rate.
## Installing
First of all, fork the project.
Then tou can clone your forked project using
````
$ cd "/path/to/workspace"
$ git clone https://github.com/YOUR_USERNAME/3-musketeers.git
````
Then, you must install all the packages
using npm:
````
$ npm install
````
or using yarn:
````
$ yarn install
````
___
Congratulations !
Now you can run *currency* using
````
$ node cli.js
````
## How can I use it ?
Once all the packages are installed, you can start using *currency* library.
By default, you'll see that it converts 1USD to BTC (Bitcoin).
> $ node cli.js
1 USD = 0.0001149659183535041 BTC
If you want to change these parameters, you just have to run cli.js this way :
````
$ node cli.js amount 'currency to convert' 'converted curreny'
````
Another example of usage is given if you run
````
$ node cli.js --help
````
You can use most of the physical currencies + Bitcoin in both way.
## Example
>$ node cli.js 100 EUR USD
100 EUR = 111.16999999999999 USD
Here we converted 100 EUR in USD.
## Tests
You will find in this project the file *index.test.js* in which there are several unit tests.
Theses tests are here to test the capacities of the library and to make sure every situation is working.
To run these tests :
````
$ npm test
````
| 19a07cecb30f16daa8983b2e483763183ca188a5 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | leawong01/3-musketeers | 1f215738f49411856a960346a51ec80c460b309d | 0ca4d4f3469eb3f610c6ef1ddfb873f0ecf81835 |
refs/heads/main | <repo_name>hbeltrao/Twitter-Sentiment-Analysis<file_sep>/ETL/README.md
# Project to extract data from Twitter and present in Tableau
Folder containing the ETL script and the final csv files
<file_sep>/Database/README.md
# Project to extract data from Twitter and present in Tableau
Folder containing all items and configurations for the MongoDB database hosted in ".------------"
<file_sep>/Readme.md
# Project to extract data from Twitter and present in Tableau
This repository contains the tools I used to connect to the Twitter API and extract data of tweets related to the brazilian president <NAME>, save those data into a Mongo database hosted in the internet, a script to access this database and process ETL before feed the data into Tableau and create the dashboard.
<file_sep>/Tableau/README.md
# Project to extract data from Twitter and present in Tableau
Folder containing all tableau files and configurations for online publication of the dashboard
<file_sep>/TwitterScrapper/TwitterScrapper.py
from Credentials import *
import tweepy
import datetime
import pandas as pd
import psycopg2
##### Function to estabilish connection to twiter API
def create_API ():
authenticator = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
authenticator.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET)
api = tweepy.API(authenticator, wait_on_rate_limit=True)
return api
#### Function to execute a query into the twitter API, the method is refered to the tweepy query methods such as search, user_timeline etc
def execute_query(method, start_time, end_time, query=""):
query2 = query + " since:" + str(start_time) + " until:" + str(end_time)
print("Exectuting query")
tweet_list = tweepy.Cursor(method, q=query2, tweet_mode='extended', place_country='BR').items()
print("Query complete")
return tweet_list
#### Function to organize the data extracted from the twitter API, the raw data is a JSON with a lot of information, this function selects only
#### the necessary data for the future use
def organize_query(query):
output_data = []
for tweet in query:
line = {'id': tweet.id,
'text' : tweet.full_text,
'favorite_count' : tweet.favorite_count,
'retweet_count' : tweet.retweet_count,
'created_at' : tweet.created_at,
'author_id' : tweet.author.name}
output_data.append(line)
return output_data
#### Function used to save the twitter data into a csv file, the data is added cumulatively
def update_into_csv(data, path):
print("\nUpdating data into csv")
dataset = pd.DataFrame(data)
dataset.set_index("id", inplace=True)
dataset.to_csv(path, mode='a', header=False)
print("\nUpdate Complete")
#### Function to connect to the PostgreSQL database and save the data scrapped into a table of this database
def save_into_db():
print("\nConnecting to database")
conn = psycopg2.connect(host=SQL_HOST, database=SQL_DATABASE,
user=SQL_USER, password=SQL_PASSWORD)
print("\nConnection estabilished")
cur = conn.cursor()
sql = "COPY %s FROM STDIN WITH CSV HEADER DELIMITER AS ','"
table='twitter_data'
print("\nExecuting Query")
with open('TwitterScrapper/data.csv', 'r', encoding='utf8') as file:
cur.execute("truncate " + table + ";")
cur.copy_expert(sql=sql % table, file=file)
conn.commit()
print("\nQuery finished")
#### main function
def main():
today = datetime.date.today()
yesterday= today - datetime.timedelta(days=2)
day_before = today - datetime.timedelta(days=5)
api = create_API()
query="from:jairbolsonaro"
path = 'TwitterScrapper/data.csv'
tweet_list = execute_query(api.search, day_before, yesterday, query)
data = organize_query(tweet_list)
update_into_csv(data, path)
save_into_db()
if __name__ == '__main__':
main()
<file_sep>/TwitterScrapper/README.md
# Project to extract data from Twitter and present in Tableau
Folder containing the script for fetching the data from Twitter API and saving it into the database as well as a copy into this folder
| 65433c26454ca16bac6ba301baedaa186e4eec7e | [
"Markdown",
"Python"
] | 6 | Markdown | hbeltrao/Twitter-Sentiment-Analysis | ce39a9f5925354715a54d438249a55c4bb0a4d3e | 9bd18f8b91f38628a14ba7384c06ec01d2cf6edf |
refs/heads/master | <file_sep># efcompanies_analysis
Analysis of Entrepreneur First (EF) Funded Companies
<file_sep>import scrapy
class EFCompaniesSpider(scrapy.Spider):
name = "efcompanies_location"
start_urls = [
"https://www.joinef.com/companies/location/berlin/",
"https://www.joinef.com/companies/location/hong-kong/",
"https://www.joinef.com/companies/location/london/",
"https://www.joinef.com/companies/location/new-york/",
"https://www.joinef.com/companies/location/paris/",
"https://www.joinef.com/companies/location/singapore/"
]
def parse(self, response):
companies = response.css("div.company")
for company in companies:
company_name = company.css("div.company__name span::text").get()
if "/page/" in response.url:
company_location = response.url.split("/")[-4]
else:
company_location = response.url.split("/")[-2]
yield {
"company_name": company_name,
"company_location": company_location
}
next_page = response.css("span.paging__link--next a::attr(href)").get()
if next_page is not None:
yield response.follow(next_page, callback=self.parse)
<file_sep>import json
import pandas as pd
with open("../data/companies.json", "r") as f:
companies = json.load(f)
for company in companies:
# clean founders info
cleaned = []
for finfo in company["founders_info"]:
# remove names
if len(finfo) > 30:
cleaned.append(finfo)
before = len(company["founders_info"])
company["total_founders"] = len(company["founders"]) or len(cleaned)
company["founders_info"] = cleaned
df = pd.DataFrame(data=companies, columns=list(companies[0].keys()))
df.to_csv("../data/companies.csv", index=False)
<file_sep>import scrapy
import unicodedata
class EFCompaniesSpider(scrapy.Spider):
name = "efcompanies"
start_urls = [
"https://www.joinef.com/companies/"
]
def parse(self, response):
companies = response.css("div.company")
for company in companies:
company_name = company.css("div.company__name span::text").get()
company_short_desc = company.css("div.company__description span::text").get()
company_cat = company.css("div.company__cat span::text").get()
detailbio = company.css("div.detailbio")
company_url = detailbio.css("div.detailbio__website a::attr(href)").get()
founders = detailbio.css("span.detailbio__founders::text").getall()
# parse company and founders info
txt = ""
info = []
for p in detailbio.css("div.margin--top p"):
if p.css("strong").get() or p.css("b").get():
if len(txt) != 0:
info.append(txt) # append previous
txt = ""
txt += " ".join(p.css("::text").getall())
info.append(txt)
yield {
"company_name": company_name,
"company_cat": company_cat,
"company_url": company_url,
"company_short_desc": company_short_desc,
"company_long_desc": info[0],
"founders": founders,
"founders_info": info[1:]
}
next_page = response.css("span.paging__link--next a::attr(href)").get()
if next_page is not None:
yield response.follow(next_page, callback=self.parse)
<file_sep>import json
with open("../data/companies_info.json", "r") as f:
companies = json.load(f)
with open("../data/companies_location.json", "r") as f:
locations = json.load(f)
with open("../data/companies_cohort.json", "r") as f:
cohorts = json.load(f)
for company in companies:
cname = company["company_name"]
# add location
for locs in locations:
if cname == locs["company_name"]:
company["company_location"] = locs["company_location"]
for cohort in cohorts:
if cname == cohort["company_name"]:
company["company_cohort"] = cohort["company_cohort"]
with open("../data/companies.json", "w") as f:
json.dump(companies, f)
<file_sep>import scrapy
class EFCompaniesSpider(scrapy.Spider):
name = "efcompanies_cohort"
start_urls = ['https://www.joinef.com/companies/cohort/asia-4/',
'https://www.joinef.com/companies/cohort/ef1-london/',
'https://www.joinef.com/companies/cohort/ef2-london/',
'https://www.joinef.com/companies/cohort/ef3-london/',
'https://www.joinef.com/companies/cohort/ef4-london/',
'https://www.joinef.com/companies/cohort/ef5-london/',
'https://www.joinef.com/companies/cohort/ef6-london/',
'https://www.joinef.com/companies/cohort/ef7-london/',
'https://www.joinef.com/companies/cohort/ef8-london/',
'https://www.joinef.com/companies/cohort/ef9-london/',
'https://www.joinef.com/companies/cohort/efsg1-singapore/',
'https://www.joinef.com/companies/cohort/efsg2-singapore/',
'https://www.joinef.com/companies/cohort/efsg3-singapore/',
'https://www.joinef.com/companies/cohort/europe-10/',
'https://www.joinef.com/companies/cohort/europe-11/']
def parse(self, response):
companies = response.css("div.company")
for company in companies:
company_name = company.css("div.company__name span::text").get()
if "/page/" in response.url:
company_cohort = response.url.split("/")[-4]
else:
company_cohort = response.url.split("/")[-2]
yield {
"company_name": company_name,
"company_cohort": company_cohort
}
next_page = response.css("span.paging__link--next a::attr(href)").get()
if next_page is not None:
yield response.follow(next_page, callback=self.parse)
| 97e2347e2d85b6880d535bb0d460fdbae6248554 | [
"Markdown",
"Python"
] | 6 | Markdown | torayeff/efcompanies_analysis | 000120518477bbb1ce1393d9852cc8263d9512df | 5b7ee5baeb88a34b843dce5836aa1c44f906a236 |
refs/heads/master | <file_sep>import java.lang.Comparable;
import java.util.Iterator;
import java.util.Random;
public class SkipList<T extends Comparable<? super T>> {
/*
*
* All the Functions which are implemented here have followed the Exact
* similar approach of the Lecture 13 Class Notes PseudoCode -> SkipLists
*/
int skipListPeakSize;
int limit;
// pointer to first node - head
SkipListNode<T> firstNode;
int skipListSize;
// pointer to last node - Tail
SkipListNode<T> lastNode;
SkipList(int skipSize, T skipListFirstNode, T skipListLastNode) {
skipListSize = 0;
skipListPeakSize = (int) Math
.ceil(Math.log10(skipSize) / Math.log10(2));
limit = skipSize;
firstNode = new SkipListNode<>(skipListFirstNode);
lastNode = new SkipListNode<>(skipListLastNode);
// for i->0 to maxlevel
for (int i = 0; i <= skipListPeakSize; i++) {
firstNode.nextPointer[i] = lastNode;
lastNode.nextPointer[i] = null;
}
lastNode.previousPointer = firstNode;
}
@SuppressWarnings("hiding")
class SkipListNode<T> {
SkipListNode<T>[] nextPointer;
SkipListNode<T> previousPointer;
T data;
@SuppressWarnings("unchecked")
SkipListNode(T x) {
nextPointer = new SkipListNode[skipListPeakSize + 1];
previousPointer = null;
data = x;
}
@SuppressWarnings("unchecked")
SkipListNode(T x, int howManyLevels) {
nextPointer = new SkipListNode[howManyLevels + 1];
previousPointer = null;
data = x;
for (int i = 0; i < howManyLevels; i++) {
nextPointer[i] = null;
}
}
}
/**
* Helper function to locate x.
*
*/
@SuppressWarnings("unchecked")
SkipListNode<T>[] find(T x) {
SkipListNode<T>[] previous = new SkipListNode[skipListPeakSize + 1];
SkipListNode<T> currentNode;
// here p points to header
currentNode = firstNode;
// for i<-maxlevel down to zero travelling along level i
for (int i = skipListPeakSize; i >= 0; i--) {
while (currentNode.nextPointer[i].data.compareTo(x) < 0) {
currentNode = currentNode.nextPointer[i];
}
previous[i] = currentNode;
}
// Here previous is ->The nodes where algorithm went down one level.
return previous;
}
/*
* If X exists replace it else return true if new node is added
*/
boolean add(T x) {
SkipListNode<T>[] previous;
previous = find(x);
// if the element already exists don't add.
if (previous[0].nextPointer[0].data.compareTo(x) == 0) {
previous[0].nextPointer[0].data = x;
return false;
}
else {
int level = probabilityOfFlippingCoin(skipListPeakSize);
// new node is created and added to skiplists
SkipListNode<T> newNode = new SkipListNode<>(x, level);
for (int i = 0; i <= level; i++) {
newNode.nextPointer[i] = previous[i].nextPointer[i];
previous[i].nextPointer[i] = newNode;
}
newNode.previousPointer = newNode.nextPointer[0].previousPointer;
newNode.nextPointer[0].previousPointer = newNode;
skipListSize++;
// if the size of the skip list exceeds the limit then call rebuild
// function i,e basically when we are not able to maintain
// the balance equally between all the levels we may approach o(n)
// time so in order to maintain proper balance
// between all the levels we will call rebuild function.
if (skipListSize >= limit) {
rebuild();
}
return true;
}
}
boolean remove(T x) {
// return false since x is not found
if (skipListSize == 0)
return false;
SkipListNode<T>[] previousPointer;
SkipListNode<T> elementToBeCompared;
previousPointer = find(x);
elementToBeCompared = previousPointer[0].nextPointer[0];
// Here if the element does not exist in the skip list return false
if (elementToBeCompared.data.compareTo(x) != 0)
return false;
else {
// for i->0 to maxlevel do
for (int i = 0; i <= skipListPeakSize; i++) {
if (previousPointer[i].nextPointer[i] == elementToBeCompared)
previousPointer[i].nextPointer[i] = elementToBeCompared.nextPointer[i];
else
break;
}
// if the element x that you are trying to remove is present then
// remove that element and decrease the skiplist size
// finally return true.
elementToBeCompared.nextPointer[0].previousPointer = previousPointer[0];
skipListSize--;
return true;
}
}
/**
* Here we will flip a coin say if we encounter head we will go to Next
* Level else we will stay in that base level itself probability of choosing
* a level i = 1/2 probability {choosing level i-1}
*/
int probabilityOfFlippingCoin(int level) {
int currentLevel = 0;
Random flipCoin = new Random();
while (currentLevel < skipListPeakSize) {
// The nextBoolean() method is used to get the next pseudorandom,
// uniformly distributed boolean value from this random number
// generator's sequence.
if (flipCoin.nextBoolean())
currentLevel++;
else
break;
}
return currentLevel;
}
/*
* Here this function will return Least element that is >= x, or null if no
* such element
*/
T ceiling(T x) {
if (skipListSize == 0)
return null;
SkipListNode<T>[] previousPointer;
previousPointer = find(x);
if (previousPointer[0].nextPointer[0].data.compareTo(x) == 0)
return x;
else {
if (previousPointer[0].nextPointer[0] != lastNode)
return previousPointer[0].nextPointer[0].data;
else
return null;
}
}
/*
* Here this function will check whether x in the list?
*/
boolean contains(T x) {
if (skipListSize == 0)
return false;
SkipListNode<T>[] previousPointer;
previousPointer = find(x);
return previousPointer[0].nextPointer[0].data.compareTo(x) == 0;
}
/*
* Here this function will Return the element at index n in the list
*/
T findIndex(int n) {
// If the element is not there in the list return null else return the
// index
if (n >= skipListSize || n < 0)
return null;
SkipListNode<T> pointer = firstNode.nextPointer[0];
for (int i = 0; i < n; i++)
pointer = pointer.nextPointer[0];
return pointer.data;
}
/*
* Returns the first element of the list
*/
T first() {
return firstNode.nextPointer[0] != lastNode ? firstNode.nextPointer[0].data
: null;
}
/*
* Greatest element that is <= x, or null if no such element As usual first
* get the index of where the element is located using find(x) and then
* search in the skip list ie Greatest element that is <= x
*/
T floor(T x) {
if (skipListSize == 0)
return null;
SkipListNode<T>[] previousPointer;
previousPointer = find(x);
if (previousPointer[0].nextPointer[0].data.compareTo(x) == 0)
return x;
else {
if (previousPointer[0] == firstNode)
return null;
else
return previousPointer[0].data;
}
}
/*
* Is the list empty? or Not ?
*/
boolean isEmpty() {
if (skipListSize == 0) {
return true;
} else {
return false;
}
}
/*
* Iterator for SkipList
*/
Iterator<T> iterator() {
return new SLIterator();
}
public class SLIterator implements Iterator<T> {
private SkipListNode<T> nodeBeingProcessed;
// Points to the first Node in skip List
SLIterator() {
nodeBeingProcessed = firstNode;
}
@Override
public boolean hasNext() {
return nodeBeingProcessed.nextPointer[0] != lastNode;
}
@Override
public T next() {
T data = nodeBeingProcessed.nextPointer[0].data;
nodeBeingProcessed = nodeBeingProcessed.nextPointer[0];
return data;
}
}
/*
* Returns the last element of the list
*/
T last() {
return lastNode.previousPointer != firstNode ? lastNode.previousPointer.data
: null;
}
/*
* Rebuilding to a Perfect SkipList Here the mail goal is to maintain the
* balance between all the levels we do that by flipping a coin say when we
* get head we add the element and extend the base lane to express lane then
* if we get head again we go level up else we keep on adding to base level
* itself. so by flipping a coin if we are able to maintain perfect balance
* say half elements in level 1 and quarter elements in level 2 and so on
* proper balance will be maintained between all the levels so we can
* achieve the wanted log(n) time.
*/
@SuppressWarnings("unchecked")
void rebuild() {
skipListPeakSize = skipListPeakSize * 2;
limit = (int) Math.pow(2, skipListPeakSize);
int currentLevel = 0;
SkipListNode<T>[] previousPointer = new SkipListNode[skipListPeakSize + 1];
SkipListNode<T> firstNodeAfterAdding = firstNode;
SkipListNode<T> modifiedLastNode = lastNode;
SkipListNode<T> pointerToTheNextNode = firstNodeAfterAdding.nextPointer[0];
firstNode = new SkipListNode<>(firstNodeAfterAdding.data,
skipListPeakSize);
lastNode = new SkipListNode<>(modifiedLastNode.data, skipListPeakSize);
for (int i = 0; i <= skipListPeakSize; i++) {
firstNode.nextPointer[i] = lastNode;
lastNode.nextPointer[i] = null;
previousPointer[i] = firstNode;
}
lastNode.previousPointer = firstNode;
for (int i = 0; i < skipListSize; i++) {
SkipListNode<T> numberOfElements = new SkipListNode<>(
pointerToTheNextNode.data, currentLevel);
for (int j = 0; j <= currentLevel; j++) {
numberOfElements.nextPointer[j] = previousPointer[j].nextPointer[j];
previousPointer[j].nextPointer[j] = numberOfElements;
previousPointer[j] = numberOfElements;
}
/*
* Here the pointers which are in each level will be pointing
* exactly 2 to the power level from it.
*/
currentLevel = (currentLevel + 1) % skipListPeakSize;
numberOfElements.nextPointer[0].previousPointer = numberOfElements;
pointerToTheNextNode = pointerToTheNextNode.nextPointer[0];
}
}
int size() {
return skipListSize;
}
}
<file_sep># SkipLists
Implemented Skip-list data-structure.Time complexity of search, insert and delete can become O(Logn) in average case.
<file_sep>import java.io.File;
import java.io.FileNotFoundException;
import java.util.Scanner;
import java.util.TreeSet;
public class TreeMap {
public static void main(String[] args) {
Scanner sc = null;
String operation = "";
long operand = 0;
int modValue = 9907;
long result = 0;
if (args.length > 0) {
File file = new File(args[0]);
try {
sc = new Scanner(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
sc = new Scanner(System.in);
}
TreeSet<Long> tree = new TreeSet<Long>();
Timer timer = new Timer();
while (!((operation = sc.next()).equalsIgnoreCase("End"))) {
switch (operation) {
case "Insert":
case "Add":
case "add":
operand = sc.nextLong();
tree.add(operand);
result = (result + 1) % modValue;
break;
case "Find":
case "Contains":
operand = sc.nextLong();
if (tree.contains(operand)) {
result = (result + 1) % modValue;
}
break;
case "Delete":
case "Remove":
operand = sc.nextLong();
if (tree.remove(operand)) {
result = (result + 1) % modValue;
}
break;
}
}
timer.end();
System.out.println(result);
System.out.println(timer);
}
}
| 69cff98296b13f2bd8687f978cf367f16072b6e7 | [
"Markdown",
"Java"
] | 3 | Java | supreeth18/SkipLists | 91f28be688732f059c6aad67f966efea43d2a2c2 | e1f27d188acd48f741ae3117ffcd3f55c97d46cc |
refs/heads/main | <file_sep>import React, { Component } from "react";
class Index extends Component {
state = {
savings:'',
mySaving: 1000,
overdraft: '',
error: '',
sum: 0,
overdraftSum: 0,
savingsSum:[1000],
story: [{ item: 0, id: 0 }],
};
handleChange(e) {
this.setState({error:''})
this.setState({ [e.target.name]: e.target.value });
}
hendleOnclick = async () => {
if (Number(this.state.savings) + 101000 - Number(this.state.overdraft) < -101000) {
await this.setState({
error:
"One cannot withdraw more than the balance on a savings account. (balance + overdraft limit)",
});
} else if (this.state.savings < 0 || this.state.overdraft < 0) {
await this.setState({
error:
"Only positive numeric values are allowed when specifying the deposit/withdraw amount.",
});
} else if (this.state.savings === "" && this.state.overdraft === "") {
await this.setState({ error: " you didnt write input value " });
} else if(this.state.overdraft > 0){
this.state.story.push({
item: this.state.overdraft,
id: Math.random() * 1000,
});
this.setState({ error: "" });
let num = this.state.story.reduce((sum, caunt) => Number(sum) + Number(caunt.item), 0);
this.setState({ sum: num });
}
this.state.savingsSum.push(this.state.savings)
let savingsNum = this.state.savingsSum.reduce((acc,col)=>acc + Number(col))
setInterval(()=>{
if(this.state.mySaving < savingsNum){
this.setState({mySaving:this.state.mySaving + 1})
}
})
};
onclickDilete = (id) => {
const filter = this.state.story.filter((item) => {
return item.id !== id;
});
this.setState({ story: filter });
};
hendleSubmit = (e) => {
e.preventDefault();
this.setState({ savings: '', overdraft: '' });
};
render() {
console.log(this.state);
return (
<div className="container">
<div className="row mt-5">
<div className="col-md-6 m-auto">
<div className="card card-body">
<h1 className="text-center mb-3">
<i className="fas fa-user-plus"></i>Your balance{" "}
</h1>
<form onSubmit={this.hendleSubmit}>
<div className="form-group">
<label className="alert alert-warning alert-dismissible fade show"
role="alert">your savings {this.state.mySaving} $</label>
<br />
<br />
<label htmlFor="name">will add your savings : $ </label>
<input
className="form-control"
type="number"
name="savings"
onChange={(e) => this.handleChange(e)}
value={this.state.savings}
/>
</div>
<div className="form-group">
<label className="textColor">{this.state.error}</label>
<br />
<br />
<label>to take your overdraft : $ </label>
<input
className="form-control"
type="number"
name="overdraft"
onChange={(e) => this.handleChange(e)}
value={this.state.overdraft}
/>
</div>
<div className="form-group">
<label> your overdraft : {this.state.sum} $</label>
<br />
<br />
<input
type="submit"
value="submit"
className="btn btn-primary btn-block"
onClick={this.hendleOnclick}
/>
<br />
<br />
</div>
</form>
<div className = "hidOver">
{this.state.story.filter(item=>item.item !== 0)
.map((item) => {
return (
<div
className="alert alert-warning alert-dismissible fade show"
role="alert"
key={item.id}
>
{item.item}
<button
type="button"
className="close"
data-dismiss="alert"
aria-label="Close"
onClick={() => this.onclickDilete(item.id)}
>
<span aria-hidden="true">×</span>
</button>
</div>
);
})}
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Index; | d95bdbf005b7bce35ed64c30c4469504585aa087 | [
"JavaScript"
] | 1 | JavaScript | kmartirosyann/bank_balanse | c74d624fd7d141cbab04313521afd77b5414f963 | 01e776c786d1ccabdc4dd6e8f80fddcfe937736e |
refs/heads/main | <file_sep>import zmq, time,sys
context = zmq.Context()
s = context.socket(zmq.PUB)
server_number = int(sys.argv[1])
HOST="127.0.0.1"
PORT={1:"1618",2:"1619",3:"1620",4:"1621",5:"1622"}
Publishers=["Entertainment","News","Sports","Politics","Facts"]
p = "tcp://"+ HOST +":"+ PORT[server_number]
s.bind(p)
print("Hey! Your channel name is ",Publishers[server_number-1])
while True:
message=input("Enter the message: ")
s.send_string(message)
<file_sep>import zmq,sys,time
context = zmq.Context()
server_number=1
try:
server_number = int(sys.argv[1])
except:
server_number=1
p="tcp://127.0.0.1:1618"
if(server_number==1):
print("\n Initiated Customer Care Executive 1 !!")
print("_________________________________________________________")
p = "tcp://127.0.0.1:1618" # how and where to connect
else:
print("\n Initiated Customer Care Executive 2 !!")
print("_________________________________________________________")
p = "tcp://127.0.0.1:1619" # how and where to connect
# create reply socket
# bind socket to address
# bind socket to address
s = context.socket(zmq.PAIR)
s.bind(p)
while True:
Req = s.recv_string()
if Req != "STOP":
print("\nCustomer:",Req)
Rep=input("Me: ")
s.send_string(Rep)
else:
break
<file_sep>import zmq,sys,time
context = zmq.Context()
server_number=1
try:
server_number = int(sys.argv[1])
except:
server_number=1
p="tcp://127.0.0.1:1618"
articles={}
if(server_number==1):
print("\n Initiated Repository 1 !!")
print("______________________________________________")
p = "tcp://127.0.0.1:1618" # how and where to connect
for i in range(10):
articles[i+1]="Hello! This is article "+str(i+1)+" of Repository 1. Thanks for requesting it!"
else:
print("\n Initiated Repository 2 !!")
print("______________________________________________")
p = "tcp://127.0.0.1:1619" # how and where to connect
for i in range(10):
articles[i+1]="Hello! This is article "+str(i+1)+" of Repository 2. Thanks for requesting it!"
# create reply socket
# bind socket to address
# bind socket to address
s = context.socket(zmq.REP)
s.bind(p)
while True:
Req = int(s.recv_string())
if Req != -1:
print("\nRepository",server_number,"recieved a request for article",Req)
print("Replying....")
Rep=articles[1]
try:
Rep=articles[Req]
except:
Rep="Repository Not Found!!"
print("Repository Not found!")
s.send_string(Rep)
# else:
# s.send_string("Closed Repository "+str(server_number))
# time.sleep(3)
# break
<file_sep>import zmq,time
import threading
Subscribed=[""]*5
Publishers=["Entertainment","News","Sports","Politics","Facts"]
print("WELCOME TO OUR TEXTUAL YOUTUBE APPLICATION")
print("\nHere, we deliver quality messages from our recognised publishers.")
print("You can subscribe to as many subscribers tou want.")
print("Here's the list of our publishers:")
print("______________________________________________________________________")
print("| PUBLISHERS:- | Wanna Subscribe?")
print("|____________________________________________________________________|(yes/no)")
Subscribed[0]=input("| 1). | Entertainment | |")
Subscribed[1]=input("| 2). | News | |")
Subscribed[2]=input("| 3). | Sports | |")
Subscribed[3]=input("| 4). | Politics | |")
Subscribed[4]=input("| 5). | Facts | |")
print("|__________|_______________|_________________________________________|")
print("\n Great setting things up for you!!")
print(" ALL DONE!! ")
print("\n___________________________Your Feed____________________________________")
for i in range(5):
if(Subscribed[i]=="Yes" or Subscribed[i]=="yes" or Subscribed[i]=="YES"):
Subscribed[i]=1
else:
Subscribed[i]=0
def listener(publisher):
context = zmq.Context()
s = context.socket(zmq.SUB)
PORT=str(1618+publisher)
p = "tcp://127.0.0.1:"+PORT
s.connect(p)
s.setsockopt_string(zmq.SUBSCRIBE, "")
while(True):
tm = s.recv_string() # receive a message
print("\n"+Publishers[publisher]+" published something : ")
print(tm)
for i in range(5):
if(Subscribed[i]==1):
t1 = threading.Thread(target=listener, args=(i,))
t1.start()
<file_sep>import zmq, time, pickle, sys, random
print("WELCOME TO OUR ZOMATO FOOD DELIVERY APPLICATION")
print("\nHere, we deliver quality food to our customers")
server_number=1
try:
server_number = int(sys.argv[1])
except:
server_number=1
Name = input("Enter the Name of Restaurant: ")
Num = int(input("Enter the number of orders to be delivered: "))
context = zmq.Context()
SRC = '127.0.0.1'
PORT = str(3015 + server_number)
s = context.socket(zmq.PUSH)
p = "tcp://"+ SRC +":"+ PORT
s.bind(p)
for i in range(Num):
workload = random.randint(2,5)
s.send(pickle.dumps({'name':Name, 'order_id':i,'workload':workload}))
time.sleep(0.5)
<file_sep>import zmq,sys,time
context = zmq.Context()
print("\n Welcome to our Helpdesk paltform!!")
print("______________________________________________________________________________")
print(" _________________________________________ ")
print("We'll be creating your account in a minute.")
print("We have two Customer Care Executives. You can connect to either of them :)")
s = context.socket(zmq.PAIR)
con=int(input("\nConnect to Customer Care Executive 1/2 ? (1/2): "))
if(con==1):
print("Connecting to the Executive 1.......")
PORT="1618"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
s.connect(php)
elif(con==2):
print("Connecting to the Executive 2.......")
PORT="1619"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
print("\nConnection Successful !!")
print("You can now request any help :)")
while(True):
req=input("\nMe: ")
s.send_string(str(req))
Rep = s.recv_string()
print("Executive: "+Rep)
con="No"
con=input("STOP ? (Yes/No) : ")
if(con=="yes" or con=="YES" or con=="Yes"):
time.sleep(1)
break
<file_sep>import zmq,sys,time
context = zmq.Context()
print("\n Welcome to our repositries paltform!!")
print("______________________________________________________________________________")
print(" _________________________________________ ")
print("We'll be creating your account in a minute.")
print("We have two repositories. You can connect to either/both of them :)")
s = context.socket(zmq.REQ)
connections=0
con=input("\nConnect to Repository 1? (Yes/No): ")
if(con=="Yes" or con=="YES" or con=="yes"):
PORT="1618"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
s.connect(php)
connections+=1
con=input("Connect to Repository 2? (Yes/No): ")
if(con=="Yes" or con=="YES" or con=="yes"):
PORT="1619"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
s.connect(php)
connections+=1
print("\nEach repository has 10 articles.")
while(True):
req=int(input("\nEnter the article number you want to request: "))
s.send_string(str(req))
Rep = s.recv_string()
print(Rep)
con="No"
con=input("Want to Stop? (Yes/No) : ")
if(con=="Yes" or con=="YES" or con=="yes"):
# s.send_string("-1")
# temp=s.recv_string()
# # time.sleep(2)
# # print(temp)
# if(connections==2):
# s.send_string("-1")
time.sleep(1)
break
<file_sep>import zmq, time, pickle, sys
import threading
context = zmq.Context()
def func(id):
s = context.socket(zmq.PULL)
con=input("\nConnect to Restaurant 1/2? (1/2): ")
if(con=="1"):
PORT="3016"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
s.connect(php)
elif(con=="2"):
PORT="3017"
php = "tcp://127.0.0.1:"+ PORT # how and where to connect
s.connect(php)
worker_id = id
SRC = '127.0.0.1'
PORT2 = '3031'
r = context.socket(zmq.PUSH)
p2 = "tcp://"+ SRC +":"+ PORT2
r.connect(p2)
while True:
work = pickle.loads(s.recv())
print("Order Id: ", work['order_id'])
print("Restaurant Name: ",work['name'])
print("Order picked by Delivery person: ",worker_id)
time.sleep(work['workload'])
r.send(pickle.dumps({'Order_Id': work['order_id'],'worker_id':worker_id,'name':work['name']}))
number = int(sys.argv[1])
func(number)
<file_sep>import zmq, time, pickle, sys
context = zmq.Context()
results_receiver = context.socket(zmq.PULL)
SRC = '127.0.0.1'
PORT = '3031'
p = "tcp://"+ SRC +":"+ PORT
results_receiver.bind(p)
worker_data = {}
restaurant_data = {}
while True:
result = pickle.loads(results_receiver.recv())
if worker_data.get(result['worker_id'],-1)==-1:
worker_data[result['worker_id']] = 1
else:
worker_data[result['worker_id']] += 1
if restaurant_data.get(result['name'],-1)==-1:
restaurant_data[result['name']] = 1
else:
restaurant_data[result['name']] += 1
print(worker_data)
print(restaurant_data)
| 8dbab74705c0c4f1b5d679b7d336e00c4236389c | [
"Python"
] | 9 | Python | sphinx1618/Python-implementation-of-Connection-Patterns | dd8e33fafe404782ad45f087f53ac3cc707ed423 | edb15a78c4a43811f592d463b6b972d1f3ab6fc0 |
refs/heads/master | <repo_name>dohoan259/MovieGuide<file_sep>/app/src/main/java/com/example/hoanbk/movieguide/listing/sorting/SortType.java
package com.example.hoanbk.movieguide.listing.sorting;
/**
* Created by hoanbk on 05/02/2018.
*/
public enum SortType {
MOST_POPULAR(0), HIGHEST_RATED(1), FAVORITES(2);
private final int vale;
SortType(int value)
{
this.vale = value;
}
public int getVale() {
return vale;
}
}
<file_sep>/app/src/main/java/com/example/hoanbk/movieguide/listing/MoviesListingPresenter.java
package com.example.hoanbk.movieguide.listing;
import com.example.hoanbk.movieguide.data_manager.model.Movie;
import java.util.List;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
/**
* Created by hoanbk on 02/02/2018.
*/
public class MoviesListingPresenter implements MoviesContract.IMoviesListingPresenter{
private MoviesContract.IMoviesListingView mView;
private IMoviesListingInteractor mMoviesInteractor;
private Disposable mFetchSubscription;
public MoviesListingPresenter(IMoviesListingInteractor moviesInteractor) {
mMoviesInteractor = moviesInteractor;
}
@Override
public void displayMovies() {
showLoading();
mFetchSubscription = mMoviesInteractor.fetchMovies()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::onMoviesFetchSuccess, this::onMoviesFetchFailed);
}
private void showLoading() {
if (isViewAttached()) {
mView.loadingStarted();
}
}
private boolean isViewAttached() {
return mView != null;
}
@Override
public void setView(MoviesContract.IMoviesListingView view) {
mView = view;
displayMovies();
}
@Override
public void destroy() {
mView = null;
// unsubscribe subscription
}
private void onMoviesFetchSuccess(List<Movie> movies) {
if (isViewAttached()) {
mView.showMovies(movies);
}
}
private void onMoviesFetchFailed(Throwable e) {
mView.loadingFailed(e.getMessage());
}
}
<file_sep>/app/src/main/java/com/example/hoanbk/movieguide/Constants.java
package com.example.hoanbk.movieguide;
/**
* Created by hoanbk on 31/01/2018.
*/
public class Constants {
}
<file_sep>/app/src/main/java/com/example/hoanbk/movieguide/favorites/FavoritesInteractor.java
package com.example.hoanbk.movieguide.favorites;
import com.example.hoanbk.movieguide.data_manager.model.Movie;
import com.example.hoanbk.movieguide.data_manager.movies.IMoviesRepository;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by hoanbk on 05/02/2018.
*/
public class FavoritesInteractor implements IFavoritesInteractor{
private IMoviesRepository mMoviesRepository;
public FavoritesInteractor(IMoviesRepository moviesRepository) {
mMoviesRepository = moviesRepository;
}
@Override
public void setFavorite(Movie movie) {
mMoviesRepository.setFavorite(movie);
}
@Override
public boolean isFavorite(String id) {
return mMoviesRepository.isFavorite(id);
}
@Override
public List<Movie> getFavorites() {
try{
return mMoviesRepository.getFavorites();
} catch (IOException e) {
return new ArrayList<>(0);
}
}
@Override
public void unFavorite(String id) {
mMoviesRepository.unFavorite(id);
}
}
| 7fea3b50b8fcdb836d07c6a59951773dec2e0ee0 | [
"Java"
] | 4 | Java | dohoan259/MovieGuide | 72a924b8d32a6948c1fcb0e28ac7fcc09cc84a62 | 6c353a7e65e423c832f33d52b2def477ece9a3ff |
refs/heads/master | <repo_name>norlight/HiPDA-iOS<file_sep>/README.md
# HiPDA iOS
*基于MVVM+RAC的一次实践。*
## 简介
采用MVVM,Model层为瘦Model,只包含属性和少许可以自管理的解析转换逻辑。
大多情况下,每个Controller会有一个对应的mainView(可在loadView处直接赋值的view),以ViewModel初始化Controller,Controller内再用该ViewModel初始化mainView,两者共用一个ViewModel。
每个View会有一个ViewModel与之适配,View内一般会有setupViews初始化视图和bindViewModel两个阶段。
上述流程各方法使用protocol进行约定。
除了侧滑抽屉部分,应用内所有的跳转都基于ViewModel。思路来自雷纯锋大神,做了少许改进,hook了ViewModel层,避开对服务总线的依赖,可在任意ViewModel内类似`[self pushViewModel:anotherVM]`这样直接进行跳转。
参考:[MVVM With ReactiveCocoa](http://blog.leichunfeng.com/blog/2016/02/27/mvvm-with-reactivecocoa/)
## 基本思路
爬回HTML,生成DOM,结合CSS Selector、XPath、正则等进行解析、清理、修改。
帖子详情楼层内容先进行清理,再使用DTCoreText进行原生显示。诸如投票贴、屏蔽贴、引用等特殊内容,会插入object标签,计算好尺寸,显示时用原生视图替代。
**注意:使用DTCoreText渲染含有图片的内容时在模拟器下有可能会 crash,需到`安全性与隐私-隐私-辅助功能`将所有使用辅助功能的程序先勾掉。**
该 Bug 见:[Radar: Accessibility-based Tools crash app running in iOS 7 Simulator
](https://www.cocoanetics.com/2013/11/radar-accessibility-based-tools-crash-app-running-in-ios-7-simulator/)
## 目录结构
.
└── HiPDA
├── Model:数据层
├── View:视图层
├── ViewModel:VM层
├── Controller:控制器层
├── Resource:资源文件,图片、JSON、HTML、PLIST等
│ └── Image:图片
├── Util:各种工具、通用类
│ ├── Base:基类
│ ├── Category:分类
│ ├── Common:一些共有类
│ └── Manager:单例
└── Vendor:没使用CocoaPods管理的第三方库
## 第三方类库
.
└── Pods
├── AFNetworking:网络库,必备,不多说
├── Aspects:AOP面向切面编程
├── BlocksKit:各式callback的block化
├── DTCoreText:HTML原生渲染
├── HTMLKit:HTML解析、修改,部分DOM API在Cocoa上的实现,支持CSS Selector
├── JDStatusBarNotification:状态栏提示
├── Kiwi:单元测试
├── MBProgressHUD:HUD提示
├── MJRefresh:下拉、上拉刷新
├── Masonry:自动布局,必备,不多说
├── Ono:HTML解析,支持CSS Selector、XPath
├── RETableViewManager:表单UI库
├── ReactiveCocoa:Cocoa上的函数响应式编程,重型武器,炒鸡好使
├── ReactiveViewModel:RAC的MVVM辅助类
├── RegexKitLite:正则表达式
├── SAMKeychain:keychain封装
├── SWRevealViewController:侧滑抽屉
├── UITableView+FDTemplateLayoutCell:几行代码解决不定行高计算
└── YYKit:全能库,优酷大神作品<file_sep>/Podfile
# Uncomment this line to define a global platform for your project
platform :ios, '8.0'
target 'HiPDA' do
# Uncomment this line if you're using Swift or would like to use dynamic frameworks
# use_frameworks!
# Pods for HiPDA
pod 'Ono', '~> 1.2.2'
pod 'YYKit', '~> 1.0.9'
pod 'HTMLKit', '~> 1.1.0'
pod 'Masonry', '~> 1.0.2'
pod 'BlocksKit', '~> 2.2.5'
pod 'MJRefresh', '~> 3.1.12'
pod 'DTCoreText', '~> 1.6.19'
pod 'SAMKeychain', '~> 1.5.2'
pod 'ReactiveCocoa', '~> 2.5'
pod 'AFNetworking', '~> 3.1.0'
pod 'AspectsV1.4.2', '~> 1.4.2'
pod 'MBProgressHUD', '~> 1.0.0'
pod 'ReactiveViewModel', '~> 0.3'
pod 'RETableViewManager', '~> 1.7'
#pod 'HYBUnicodeReadable', '~> 1.3'
pod 'JDStatusBarNotification', '1.5.4'
pod 'RegexKitLite-NoWarning', '~> 1.1.0'
pod 'SWRevealViewController', '~> 2.3.0'
pod 'UITableView+FDTemplateLayoutCell', '~> 1.5.beta'
target 'HiPDATests' do
inherit! :search_paths
# Pods for testing
pod 'Kiwi', '2.4.0'
end
target 'HiPDAUITests' do
inherit! :search_paths
# Pods for testing
end
end
| 4a032fdc10288853ed26500677afc8ca3143a436 | [
"Markdown",
"Ruby"
] | 2 | Markdown | norlight/HiPDA-iOS | b6f91eac2a6b12dfbdcf89be04caac8390aa8e0e | 012e12c14b8331d9b871c436c379027ac0d81620 |
refs/heads/master | <repo_name>jonsjoberg/ddp_project<file_sep>/server.R
# Project server.R
require(shiny)
require(ggplot2)
data("mtcars")
mtcars_model <- lm(mpg ~ qsec + wt, data = mtcars)
shinyServer(
function(input, output){
predicted <<- reactive({predict(mtcars_model, data.frame('qsec' = input$qsec, 'wt' = input$wt))})
output$chart <- renderPlot({
i_wt <<- input$wt
i_qsec <<- input$qsec
plot <- ggplot(data = mtcars) +
geom_point(aes(x = i_wt, y = i_qsec, size = predicted()[[1]]), color = 'red', alpha= 1 ) +
geom_point(aes(x = wt, y = qsec, size = mpg), color = 'steelblue', alpha = 0.75) +
ylab('1/4 Mile time') +
xlab('Weight') +
scale_size(name="MPG")
plot
})
output$wt_out <- renderText({input$wt})
output$qsec_out <- renderText({input$qsec})
output$predicted_out <-renderText({predicted()})
})<file_sep>/index.md
---
title : The MPG predictor
subtitle : Developing Data Products Project
author : <NAME>
job :
framework : io2012 # {io2012, html5slides, shower, dzslides, ...}
highlighter : highlight.js # {highlight.js, prettify, highlight}
hitheme : tomorrow #
widgets : [] # {mathjax, quiz, bootstrap}
mode : selfcontained # {standalone, draft}
knit : slidify::knit2slides
ext_widgets : {rCharts: libraries/nvd3}
---
## The data
Every model needs to be buld on data, the we're going to use is the well known mtcars data set, esp the variables, weight, 1/4 mile time and the MPG. Here's what the data looks like plotted:

--- .class #id
## The model
To create the predictor a linear model is fitted to the data
```r
model <- lm(mpg ~ qsec + wt, data = mtcars)
summary(model)
```
```
##
## Call:
## lm(formula = mpg ~ qsec + wt, data = mtcars)
##
## Residuals:
## Min 1Q Median 3Q Max
## -4.3962 -2.1431 -0.2129 1.4915 5.7486
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 19.7462 5.2521 3.760 0.000765 ***
## qsec 0.9292 0.2650 3.506 0.001500 **
## wt -5.0480 0.4840 -10.430 2.52e-11 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 2.596 on 29 degrees of freedom
## Multiple R-squared: 0.8264, Adjusted R-squared: 0.8144
## F-statistic: 69.03 on 2 and 29 DF, p-value: 9.395e-12
```
--- .class #id
## Predictions
The fitted model can be used for predictions of MPG's, like this:
```r
predict(model, newdata = data.frame('qsec' = 20, 'wt'=2.5))
```
```
## 1
## 25.71023
```
or
```r
predict(model, newdata = data.frame('qsec' = 15, 'wt'=1.5))
```
```
## 1
## 26.11222
```
---. class #id
## Predictions plotted

<file_sep>/ui.R
# Project ui.R
require(shiny)
shinyUI(fluidPage(
title = 'MPG prediction',
plotOutput('chart'),
hr(),
fluidRow(
column(3,
p("This shiny app predicts the MPG of a car using a linear model built on the mtcars data set, the red point the plot is the predicted one. The blue dots are the cars that is included in the dataset.
Enter the weight and 1/4 mile time to predict the MPG")
),
column(3,
numericInput(inputId = 'wt', label = 'Weight (lb/1000)', min = 1.5, max = 5.5, step = 0.5, value = 3)
),
column(3,
numericInput(inputId = 'qsec', label = "1/4 mile time (seconds)", min = 14, max = 25, step = 0.5, value = 18)
),
column(3,
submitButton(text = 'Submit'),
h5('Entered values:'),
p('Weight:'),
textOutput('wt_out'),
p('1/4 Mile time:'),
textOutput('qsec_out'),
h5('Predicted MPG:'),
textOutput('predicted_out')
)
)
)
)<file_sep>/index.Rmd
---
title : The MPG predictor
subtitle : Developing Data Products Project
author : <NAME>
job :
framework : io2012 # {io2012, html5slides, shower, dzslides, ...}
highlighter : highlight.js # {highlight.js, prettify, highlight}
hitheme : tomorrow #
widgets : [] # {mathjax, quiz, bootstrap}
mode : selfcontained # {standalone, draft}
knit : slidify::knit2slides
ext_widgets : {rCharts: libraries/nvd3}
---
## The data
Every model needs to be buld on data, the we're going to use is the well known mtcars data set, esp the variables, weight, 1/4 mile time and the MPG. Here's what the data looks like plotted:
```{r, comment = NA, message = F, echo = F, fig.height=6, fig.width=12}
require(ggplot2)
plot <- ggplot(data = mtcars) +
geom_point(aes(x = wt, y = qsec, size = mpg), color = 'steelblue', alpha = 0.75) +
ylab('1/4 Mile time') +
xlab('Weight') +
scale_size(name="MPG")
plot
```
--- .class #id
## The model
To create the predictor a linear model is fitted to the data, this is the characteristics of the model:
```{r}
model <- lm(mpg ~ qsec + wt, data = mtcars)
summary(model)
```
--- .class #id
## Predictions
The fitted model can be used for predictions of MPG's, like this:
```{r}
predict(model, newdata = data.frame('qsec' = 20, 'wt'=2.5))
```
or
```{r}
predict(model, newdata = data.frame('qsec' = 15, 'wt'=1.5))
```
---. class #id
## Predictions plotted
And to make it easier to visualize the results here are those predictions plotted:
```{r, comment = NA, message = F, echo = F, fig.height=6, fig.width=12}
new_data = predict(model, newdata = data.frame('qsec' = c(20,15), 'wt'=c(2.5, 1.5)))
new_data_df = data.frame('mpg'= new_data,'qsec' = c(20,15), 'wt'=c(2.5, 1.5))
plot <- ggplot(data = mtcars) +
geom_point(data = new_data_df, aes(x = wt, y = qsec, size=mpg), color = 'red') +
geom_point(aes(x = wt, y = qsec, size = mpg), color = 'steelblue', alpha = 0.5) +
ylab('1/4 Mile time') +
xlab('Weight') +
scale_size(name="MPG")
plot
```
| 8df1334daee1bb9004e4d028da5746e39654c8a6 | [
"Markdown",
"R",
"RMarkdown"
] | 4 | R | jonsjoberg/ddp_project | e290b12f6347e03e08f1c85397c31d8d6da6212b | b4439bc7b1d710f128346acf2d507d595a158453 |
refs/heads/master | <repo_name>giovanny-c/be-the-hero<file_sep>/backend/tests/integration/ong.spec.js
const request = require('supertest')
const app = require('../../src/app')
const connection = require('../../src/database/connection')
describe('ONG', () => {//nome do test
let ongId = ""
beforeAll(async () => {//antes do test vai
//await connection.migrate.rollback('create_ongs', true)
//await connection.migrate.rollback('create_incidents', true)//faz um rollback para nao acumular tabelas no banco de dados
//NAO ESTA FUNCIONANDO, deletar o banco apos fazer o teste
//PARA CRIAR UM NOVO RODAR COM ESSE CODIGO
await connection.migrate.latest()//executar as migrations de criação do banco de dados de teste
})
afterAll(async () => {//depois do teste vai
await connection.destroy()//terminar a conexao com o banco de dados
})
it('Should be able to create a new ONG', async () => {//descrição da funcionalidade do teste
const response = await request(app) //chamando o app, que configura o server e usa as rotas
.post('/ongs')//usando a rota '/ongs' pelo metodo post
.send({//enviando os dados
name: "APAD2",
email: "<EMAIL>",
whatsapp: "74000000000",
city: "Rio do SUl",
uf: "SC"
})
ongId = response.body.id
console.log(ongId)
expect(response.body).toHaveProperty('id')//espera que retorne um id
expect(response.body.id).toHaveLength(8)//espera esse id tenha 8 caracteres
}),
it('Should be able to create a new incident based on an determined ONG', async() => {
const response = await request(app)
.post('/incidents')
.set('authorization', ongId )
.send({
title: "caso1",
description: "asdasdasd",
value: "200",
})
console.log(response.header.authorization)
console.log(response.body)
//expect(req.headers.authorization).toBe(ongId)
expect(response.body).toHaveProperty('id')
})
})<file_sep>/frontend/src/pages/Profile/index.js
import React, {useState, useEffect} from'react'
import {Link, useHistory} from 'react-router-dom'
import {FiPower, FiTrash2} from 'react-icons/fi'
import api from '../../services/api'
import './style.css'
import logoImg from '../../assets/logo.svg'
export default function Profile(){
const [incidents, setIncidents] = useState([])//vai retornar um array de resultados
const history = useHistory()
const ongName = localStorage.getItem('ongName')
const ongId = localStorage.getItem('ongId')
//pegando o nome no storge, colocado la na hora do login
useEffect(() => {//vai executar a funçao de listagem de casos
api.get('/profile', {//vai executar a rota /profile por GET
headers: {//colocando o id da ong no header da pagina, meio que o backend usa para procurar os casos de uma ong
Authorization: ongId, //nome desse header
}
}).then(res => {//usando o .then()
setIncidents(res.data)//colocando o reultado da query no incidents, pelo useState()
})
}, [ongId])//toda vez que o id da ong mudar, como ele nao muda a nao ser que troque de user, mudara só uma vez
async function handleDeleteIncident(id){
try{
await api.delete(`incidents/${id}`, {
headers: {
Authorization: ongId,
}
})
setIncidents(incidents.filter(incident => incident.id !== id))
//vai remover da page os casos que sao deletados, vai manter todos os casos tiverem o id diferente do id do caso deletado
}
catch(err){
alert('erro ao deletar')
}
}
function handleLogout(){
localStorage.clear()
history.push('/')
}
return(
<div className="profile-container">
<header>
<img src={logoImg} alt="Be the Hero"/>
<span>Bem vinda, {ongName}</span>
<Link className="button" to="/incidents/new">
Cadastrar novo caso
</Link>
<button onClick={handleLogout}
type="button">
<FiPower size={18} color="#e02041"/>
</button>
</header>
<h1>Casos Cadastrados</h1>
<div className="list">
<ul>
{incidents.map(incident => (
<li key={incident.id}>
<p className="title">Caso:</p>
<p>{incident.title}</p>
<p className="title">Descriçao</p>
<p>{incident.description}</p>
<p className="title">Valor:</p>
<p>{Intl.NumberFormat('pr-BR', { style: 'currency', currency: 'BRL'}).format(incident.value)}</p>
<button onClick={() => handleDeleteIncident(incident.id)}//quando passar uma func em
type="button"
>
<FiTrash2 size={20} color="#a8a8b3"/>
</button>
</li>
))}
</ul>
</div>
</div>
)
}<file_sep>/frontend/src/App.js
import React, {useState} from 'react';
//chama o useState para poder usar a variavel
//como um estado
import './global.css'//configs de css globais
import Routes from './routes'
function App() {
return (
<div>
<Routes />
</div>
);
}
export default App;
| b181dcb4e1b4817e6aaa61e2beefb742b568bfa5 | [
"JavaScript"
] | 3 | JavaScript | giovanny-c/be-the-hero | caa15d1fd67ed9dceb5d37beb7b28bf51f5e9ae2 | 912b142d5661a491fb77d36aadca4b0284742254 |
refs/heads/master | <file_sep>//
// ViewController.swift
// HelloWorld
//
// Created by ARTEAGA,KA KEVIN on 9/17/18.
// Copyright © 2018 ARTEAGA,KA KEVIN. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var helloWorldLabel: UILabel!
@IBOutlet weak var favoriteBandLabel: UILabel!
@IBOutlet weak var imageView1: UIImageView!
@IBOutlet weak var imageView2: UIImageView!
@IBOutlet weak var yellowButton: UIButton!
@IBOutlet weak var favoriteClassLabel: UILabel!
@IBOutlet weak var nameTextField: UITextField!
@IBOutlet weak var whatFunTextField: UITextField!
@IBOutlet weak var greenSlider: UISlider!
var nameOfBand :String = "Banda MS"
var numberOfMembers :Int = 16
override func viewDidLoad() {
super.viewDidLoad()
favoriteBandLabel.text = "\(nameOfBand) has \(numberOfMembers) members"
}
@IBAction func clickMeButton(_ sender: Any) { view.backgroundColor = .blue
}
}
| ce8c297ce7b2db6d6632d93fe57cb857f805e23a | [
"Swift"
] | 1 | Swift | kevin106856/HelloWorld | 8a48d3755355d138e12ca7923dd61896d7ef4e82 | 1044098f7564f671c621abb135e8272345bb92d3 |
refs/heads/master | <file_sep># Commang-line-games
Repository for storing command line games
This repository isn for all those wg\ho are intrested in building command line games
<file_sep>
import pyfiglet
import time
import random
def intro():
print("Welcome To")
w=pyfiglet.figlet_format("Rock\n Paper\n &scissors")
print(w)
print(" BY The <NAME>")
intro()
print("Please Enter Your Name")
player_name=input()
print("Welcome To The Game")
print(pyfiglet.figlet_format(player_name))
gameloop=0
player_score=0
computer_score=0
player_choice="null"
computer_coice="null"
def pwin():
global player_score
print("\nYou win")
player_score+=1
def cwin():
global computer_score
print("\ncomputer wins")
computer_score+=1
def tie():
print("\nits a TIE")
while gameloop<5: # 1=Scissor 2=paper 3=rock
print("Your score is")
print(player_score)
print("Computer score is ")
print(computer_score)
player_choice=int(input("Enter Your Choice \n1.Scissor \n2.PAPER \n3.ROCK"))
computer_choice=int(random.randint(1,3))
if computer_choice==player_choice:
tie()
elif(computer_choice==1 and player_choice)==2:
print("\n\ncomputer choose Scissor")
cwin()
elif computer_choice==1 and player_choice==3:
print("\n\ncomputer choose Scissor")
pwin()
elif computer_choice==2 and player_choice==1:
print("\n\ncomputer choose paper")
pwin()
elif computer_choice==2 and player_choice==3:
print("\n\ncomputer choose paper")
cwin()
elif computer_choice==3 and player_choice==1:
print("\n\ncomputer choose Rock")
cwin()
elif computer_choice==3 and player_choice==2:
print("\n\ncomputer choose Rock")
pwin()
gameloop=gameloop+1
print("\n\n\n\nyour score is ")
print(player_score)
print("\n\n\ncomputer score is")
print(computer_score)
if computer_score>player_score:
print(pyfiglet.figlet_format("Congrats Computer WINs"))
if player_score>computer_score:
print(pyfiglet.figlet_format("Congrats YOU Win"))
if computer_score==player_score:
print(pyfiglet.figlet_format("ITS a TIE"))
time.sleep(3)
| 63a3e105ba8f26f61816dad1685c9e50ac18e64e | [
"Markdown",
"Python"
] | 2 | Markdown | Asher-MS/Commang-line-games | f9584990ed78164d031507bfbcab2d0911c1b99e | b7308dfa4d2032aa49a722e36dcbf008e5095a7e |
refs/heads/master | <file_sep><?php
namespace Hamlet\Database\MySQLSwoole;
use Hamlet\Database\Database;
use Hamlet\Database\Procedure;
use Hamlet\Database\Session;
use PHPUnit\Framework\Assert;
use PHPUnit\Framework\TestCase;
class MySQLSwooleDatabaseTest extends TestCase
{
/** @var Database */
private $database;
/** @var Procedure */
private $procedure;
/** @var int */
private $userId;
/**
* @before
*/
public function _setUp()
{
$this->database = new MySQLSwooleDatabase('0.0.0.0', 'root', '', 'test', 4);
$this->database->init();
$this->database->withSession(function (Session $session) {
$procedure = $session->prepare("INSERT INTO users (name) VALUES ('Vladimir')");
$this->userId = $procedure->insert();
$procedure = $session->prepare("INSERT INTO addresses (user_id, address) VALUES (?, 'Moskva')");
$procedure->bindInteger($this->userId);
$procedure->execute();
$procedure = $session->prepare("INSERT INTO addresses (user_id, address) VALUES (?, 'Vladivostok')");
$procedure->bindInteger($this->userId);
$procedure->execute();
$this->procedure = $session->prepare('
SELECT users.id,
name,
address
FROM users
JOIN addresses
ON users.id = addresses.user_id
');
});
}
/**
* @after
*/
public function _tearDown()
{
$this->database->withSession(function (Session $session) {
$session->prepare('DELETE FROM addresses WHERE 1')->execute();
$session->prepare('DELETE FROM users WHERE 1')->execute();
});
}
public function testProcessOne()
{
$result = $this->procedure->processOne()
->coalesceAll()
->collectAll();
Assert::assertEquals([$this->userId], $result);
}
public function testProcessAll()
{
$result = $this->procedure->processAll()
->selectValue('address')->groupInto('addresses')
->selectFields('name', 'addresses')->name('user')
->map('id', 'user')->flatten()
->collectAll();
Assert::assertCount(1, $result);
Assert::assertArrayHasKey($this->userId, $result);
Assert::assertEquals('Vladimir', $result[$this->userId]['name']);
Assert::assertCount(2, $result[$this->userId]['addresses']);
}
public function testFetchOne()
{
Assert::assertEquals(['id' => $this->userId, 'name' => 'Vladimir', 'address' => 'Moskva'], $this->procedure->fetchOne());
}
public function testFetchAll()
{
Assert::assertEquals([
['id' => $this->userId, 'name' => 'Vladimir', 'address' => 'Moskva'],
['id' => $this->userId, 'name' => 'Vladimir', 'address' => 'Vladivostok']
], $this->procedure->fetchAll());
}
public function testStream()
{
$iterator = $this->procedure->stream()
->selectValue('address')->groupInto('addresses')
->selectFields('name', 'addresses')->name('user')
->map('id', 'user')->flatten()
->iterator();
foreach ($iterator as $id => $user) {
Assert::assertEquals($this->userId, $id);
Assert::assertEquals(['Moskva', 'Vladivostok'], $user['addresses']);
}
}
public function testInsert()
{
$this->database->withSession(function (Session $session) {
$procedure = $session->prepare("INSERT INTO users (name) VALUES ('Anatoly')");
Assert::assertGreaterThan($this->userId, $procedure->insert());
});
}
public function testUpdate()
{
$this->database->withSession(function (Session $session) {
$procedure = $session->prepare("UPDATE users SET name = 'Vasily' WHERE name = 'Vladimir'");
$procedure->execute();
Assert::assertEquals(1, $procedure->affectedRows());
$procedure = $session->prepare("UPDATE users SET name = 'Nikolay' WHERE name = 'Evgeniy'");
$procedure->execute();
Assert::assertEquals(0, $procedure->affectedRows());
});
}
}
<file_sep>#!/usr/bin/env bash
php phpunit-coroutine.php --bootstrap `pwd`/../vendor/autoload.php --verbose ../tests
<file_sep><?php
go(function () {
require __DIR__ . '/../vendor/bin/phpunit';
});
<file_sep><?php
namespace Hamlet\Database\MySQLSwoole;
use Hamlet\Database\{Procedure, Session};
use Swoole\Coroutine\MySQL;
/**
* @extends Session<MySQL>
*/
class MySQLSwooleSession extends Session
{
/**
* @param MySQL $handle
*/
public function __construct(MySQL $handle)
{
parent::__construct($handle);
}
/**
* @param string $query
* @return Procedure
*/
public function prepare(string $query): Procedure
{
$procedure = new MySQLSwooleProcedure($this->handle, $query);
$procedure->setLogger($this->logger);
return $procedure;
}
/**
* @param MySQL $connection
* @return void
*/
protected function startTransaction($connection)
{
$this->logger->debug('Starting transaction');
$connection->begin();
}
/**
* @param MySQL $connection
* @return void
*/
protected function commit($connection)
{
$this->logger->debug('Committing transaction');
$connection->commit();
}
/**
* @param MySQL $connection
* @return void
*/
protected function rollback($connection)
{
$this->logger->debug('Rolling back transaction');
$connection->rollback();
}
}
<file_sep><?php
namespace Hamlet\Database\MySQLSwoole;
use Exception;
use Hamlet\Database\{Database, DatabaseException, Session};
use Hamlet\Http\Swoole\Bootstraps\WorkerInitializable;
use Swoole\Coroutine;
use Swoole\Coroutine\{Channel, MySQL};
use function gethostbyname;
/**
* @extends Database<MySQL>
* @psalm-suppress PropertyNotSetInConstructor
*/
class MySQLSwooleDatabase extends Database implements WorkerInitializable
{
/**
* @var array<string,string>
*/
private $hosts = [];
public function __construct(string $host, string $user, string $password, string $databaseName = null, int $poolCapacity = 512)
{
$connector = function () use ($host, $user, $password, $databaseName): MySQL {
$connection = new MySQL();
if (!isset($this->hosts[$host])) {
$this->hosts[$host] = gethostbyname($host);
}
$params = [
'host' => $this->hosts[$host],
'user' => $user,
'password' => $<PASSWORD>
];
if ($databaseName) {
$params['database'] = $databaseName;
}
/**
* @psalm-suppress TooManyArguments
*/
$connection->connect($params);
return $connection;
};
$pool = new MySQLSwooleConnectionPool($connector, $poolCapacity);
return parent::__construct($pool);
}
public function init()
{
assert($this->pool instanceof MySQLSwooleConnectionPool);
$this->pool->init();
}
public function withSession(callable $callable)
{
$handle = $this->pool->pop();
Coroutine::defer(function () use ($handle) {
$this->pool->push($handle);
});
$session = $this->createSession($handle);
try {
return $callable($session);
} catch (DatabaseException $e) {
throw $e;
} catch (Exception $e) {
throw new DatabaseException('Failed to execute statement', 0, $e);
}
}
/**
* @template K as array-key
* @template Q
* @param array<K,callable(Session):Q> $callables
* @return array<K,Q>
* @psalm-suppress InvalidReturnStatement
* @psalm-suppress InvalidReturnType
* @psalm-suppress MixedArrayAccess
* @psalm-suppress MixedArrayOffset
* @psalm-suppress MixedAssignment
*/
public function withSessions(array $callables): array
{
$channel = new Channel(count($callables));
$result = [];
foreach ($callables as $key => $callable) {
/**
* @psalm-suppress UnusedFunctionCall
*/
go(function () use ($channel, $callable, $key) {
$channel->push(
$this->withSession(
function (Session $session) use ($callable, $key) {
return [$key, $callable($session)];
}
)
);
});
$result[$key] = -1;
}
foreach ($callables as $_) {
list($key, $item) = $channel->pop();
$result[$key] = $item;
}
return $result;
}
protected function createSession($handle): Session
{
$session = new MySQLSwooleSession($handle);
$session->setLogger($this->logger);
return $session;
}
public static function exception(MySQL $connection): DatabaseException
{
return new DatabaseException((string) ($connection->error ?? 'Unknown error'), (int) ($connection->errno ?? -1));
}
}
<file_sep>CREATE TABLE users (
id INTEGER(11) AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(255) NOT NULL
);
CREATE TABLE addresses (
user_id INTEGER(11) NOT NULL,
address VARCHAR(255) NOT NULL,
FOREIGN KEY user_fk (user_id)
REFERENCES users (id)
ON DELETE CASCADE
ON UPDATE CASCADE
);
<file_sep><?php
namespace Hamlet\Database\MySQLSwoole;
use DomainException;
use Exception;
use Hamlet\Database\ConnectionPool;
use Hamlet\Http\Swoole\Bootstraps\WorkerInitializable;
use Psr\Log\{LoggerInterface, NullLogger};
use Swoole\Coroutine\{Channel, MySQL};
use function Hamlet\Cast\_class;
/**
* @implements ConnectionPool<MySQL>
*/
class MySQLSwooleConnectionPool implements ConnectionPool, WorkerInitializable
{
/**
* @var callable():(MySQL|false)
*/
private $connector;
/**
* @var LoggerInterface
*/
private $logger;
/**
* @var Channel|null
*/
private $pool = null;
/**
* @var int
*/
private $capacity;
/**
* @param callable():(MySQL|false) $connector $connector
* @param int $capacity
*/
public function __construct(callable $connector, int $capacity)
{
$this->connector = $connector;
$this->logger = new NullLogger;
$this->capacity = $capacity;
}
/**
* @param LoggerInterface $logger
*/
public function setLogger(LoggerInterface $logger)
{
$this->logger = $logger;
}
public function init()
{
$this->pool = new Channel($this->capacity);
$i = $this->capacity;
while ($i > 0) {
try {
$connection = ($this->connector)();
if ($connection !== false) {
$this->pool->push($connection);
$i--;
}
} catch (Exception $e) {
$this->logger->warning('Failed to establish connection', ['exception' => $e]);
}
}
}
/**
* @return MySQL
*/
public function pop()
{
if ($this->pool === null) {
throw new DomainException('Pool not initialized');
}
return _class(MySQL::class)->assert($this->pool->pop());
}
/**
* @param MySQL $connection
* @return void
*/
public function push($connection)
{
if ($this->pool === null) {
throw new DomainException('Pool not initialized');
}
$this->pool->push($connection);
}
}
<file_sep><?php
namespace Hamlet\Database\MySQLSwoole;
use Generator;
use Hamlet\Database\DatabaseException;
use Hamlet\Database\Procedure;
use Hamlet\Database\Traits\QueryExpanderTrait;
use Swoole\Coroutine\MySQL;
use Swoole\Coroutine\MySQL\Statement;
use function Hamlet\Cast\_float;
use function Hamlet\Cast\_int;
use function Hamlet\Cast\_map;
use function Hamlet\Cast\_null;
use function Hamlet\Cast\_string;
use function Hamlet\Cast\_union;
class MySQLSwooleProcedure extends Procedure
{
use QueryExpanderTrait;
/**
* @var MySQL
*/
private $handle;
/**
* @var string
*/
private $query;
/**
* @var int|null
*/
private $lastInsertId = null;
/**
* @var int|null
*/
private $affectedRows = null;
public function __construct(MySQL $handle, string $query)
{
$this->handle = $handle;
$this->query = $query;
}
public function execute()
{
list($statement) = $this->bindParametersAndExecute($this->handle);
$this->affectedRows = _int()->assert($statement->affected_rows);
}
public function insert(): int
{
list($statement) = $this->bindParametersAndExecute($this->handle);
$this->lastInsertId = _int()->assert($statement->insert_id);
return $this->lastInsertId;
}
/**
* @return Generator<int,array<string,int|string|float|null>>
* @psalm-suppress MixedReturnTypeCoercion
*/
public function fetch(): Generator
{
list($_, $result) = $this->bindParametersAndExecute($this->handle);
assert(($type = _map(_int(), _map(_string(), _union(_int(), _string(), _null(), _float())))) && $type->matches($result), var_export($result, true));
yield from $result;
}
public function affectedRows(): int
{
return $this->affectedRows ?? -1;
}
/**
* @param MySQL $handle
* @return array{Statement,mixed}
* @psalm-suppress MixedAssignment
*/
private function bindParametersAndExecute(MySQL $handle): array
{
list($query, $parameters) = $this->unwrapQueryAndParameters($this->query, $this->parameters);
$this->parameters = [];
$key = 'statement_' . md5($query);
$statement = property_exists($this->handle, $key) ? $this->handle->{$key} : false;
if (!$statement) {
$statement = $handle->prepare($query);
if ($statement) {
$this->handle->{$key} = $statement;
} else {
throw new DatabaseException(sprintf('Cannot prepare statement %s', $query));
}
}
assert($statement instanceof Statement);
$values = [];
foreach ($parameters as list($_, $value)) {
$values[] = $value;
}
$result = $statement->execute($values);
return [$statement, $result];
}
public function __destruct()
{
$this->handle = null;
}
}
<file_sep>Hamlet Framework / DB / MySQL
===
## ToDo
- Add docker for simple testing
- Add realistic test case for AMP and Swoole
<file_sep>#!/usr/bin/env bash
docker run --name mysql \
--rm -e MYSQL_ALLOW_EMPTY_PASSWORD=1 \
-e MYSQL_DATABASE=test \
-p 3306:3306 \
-v `pwd`/schema.sql:/docker-entrypoint-initdb.d/schema.sql mysql:5.7
| 19171c41dff4a1fd90b0b328bbb8bf7b63b2b05b | [
"Markdown",
"SQL",
"PHP",
"Shell"
] | 10 | PHP | hamlet-framework/db-mysql-swoole | 6d31c3c4111ede4692076aefec8e1fd34dd5c2fc | a2c5fdcaf5a0df0c3a7e1b91137923a915df19ff |
refs/heads/master | <file_sep>#ifndef LOGGER_INCLUDE
#define LOGGER_INCLUDE
#include <gsl/string_span>
#include "log/log.h"
BOOST_LOG_GLOBAL_LOGGER(
exec_helper_plugins_logger,
execHelper::log::LoggerType); // NOLINT(modernize-use-using)
static const gsl::czstring<> LOG_CHANNEL = "plugins";
#define LOG(x) \
BOOST_LOG_STREAM_CHANNEL_SEV(exec_helper_plugins_logger::get(), \
LOG_CHANNEL, execHelper::log::x) \
<< boost::log::add_value(fileLog, __FILE__) \
<< boost::log::add_value(lineLog, __LINE__)
#endif /* LOGGER_INCLUDE */
<file_sep>Command line arguments
**********************
.. highlight:: gherkin
.. literalinclude:: no-args.feature
.. literalinclude:: invalid-args.feature
.. literalinclude:: help-option.feature
.. literalinclude:: version-option.feature
.. literalinclude:: dry-run.feature
.. literalinclude:: keep-going.feature
.. literalinclude:: list-plugins-option.feature
<file_sep>#include "executionContent.h"
#include <fstream>
#include <iostream>
#include <string>
#include <utility>
#include <vector>
#include <boost/archive/archive_exception.hpp>
#include <boost/archive/text_iarchive.hpp>
#include <boost/archive/text_oarchive.hpp>
#include <boost/core/swap.hpp>
#include "base-utils/yaml.h"
using std::cerr;
using std::endl;
using std::make_shared;
using std::move;
using std::string;
using std::terminate;
using std::thread;
using boost::asio::buffer;
using boost::asio::io_service;
using boost::asio::socket_base;
using boost::asio::local::stream_protocol;
using boost::system::error_code;
using boost::system::system_error;
using gsl::not_null;
using execHelper::test::baseUtils::ExecutionContentData;
using execHelper::test::baseUtils::ExecutionContentDataReply;
using execHelper::test::baseUtils::ReturnCode;
using execHelper::test::baseUtils::YamlReader;
namespace {
constexpr uint8_t HEADER_LENGTH = 8;
using ExecuteContentMessageCallback =
std::function<ExecutionContentDataReply(const ExecutionContentData&)>;
class ExecutionSession : public std::enable_shared_from_this<ExecutionSession> {
public:
explicit ExecutionSession(stream_protocol::socket socket,
ExecuteContentMessageCallback callback)
: m_callback(move(callback)), m_socket(move(socket)) {
;
}
void start() { readHeader(); }
private:
void readHeader() noexcept {
auto self(shared_from_this());
boost::asio::async_read(
m_socket, buffer(&messageLength, HEADER_LENGTH),
[this, self](error_code ec, std::size_t length) {
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< "Unexpected error occurred: " << ec << ": "
<< ec.message() << std::endl;
terminate();
}
if(length != HEADER_LENGTH) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< "Partial message received. Received length = "
<< length
<< " expected length = " << int(HEADER_LENGTH)
<< std::endl;
terminate();
}
std::istringstream iss(
std::string(messageLength, HEADER_LENGTH));
std::size_t headerLength = 0;
if(!(iss >> std::hex >> headerLength)) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< "Could not stream data to length" << endl;
terminate();
}
readMessage(headerLength);
});
}
void readMessage(uint32_t length) noexcept {
auto self(shared_from_this());
m_data.resize(length);
boost::asio::async_read(
m_socket, buffer(m_data),
[this, self](error_code ec, std::size_t length) {
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< "Unexpected error occurred: " << ec << ": "
<< ec.message() << std::endl;
terminate();
}
// Deserialize the received data
std::string inbound_data(&m_data[0], length);
ExecutionContentData content;
std::istringstream deserialized_stream(inbound_data);
try {
boost::archive::text_iarchive deserialized(
deserialized_stream);
deserialized >> content;
} catch(const boost::archive::archive_exception& e) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< e.what() << endl;
terminate();
}
// Process data
auto reply = m_callback(content);
sendReply(reply);
});
}
void sendReply(const ExecutionContentDataReply& reply) {
auto self(shared_from_this());
boost::asio::async_write(
m_socket, buffer(&reply, sizeof(reply)),
[this, self](boost::system::error_code ec, std::size_t /*length*/) {
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " " << this << " "
<< "Unexpected error occurred: " << ec << ": "
<< ec.message() << std::endl;
terminate();
}
m_socket.shutdown(
boost::asio::local::stream_protocol::socket::shutdown_both);
m_socket.close();
});
}
char messageLength[HEADER_LENGTH];
std::vector<char> m_data;
ExecuteContentMessageCallback m_callback;
stream_protocol::socket m_socket;
};
} // namespace
namespace execHelper {
namespace test {
namespace baseUtils {
IoService::~IoService() noexcept { stop(); }
void IoService::start() noexcept {
m_isRunning = true;
error_code ec;
m_service.run(ec);
if(ec) {
cerr << "Received an unexpected system error: " << ec << ":"
<< ec.message() << endl;
assert(false);
}
m_isRunning = false;
m_service.stop();
}
void IoService::stop() noexcept {
if(m_isRunning) {
m_service.stop();
}
assert(m_thread.joinable());
m_thread.join();
}
void IoService::run() noexcept {
if(!m_isRunning) {
m_thread = std::thread([this]() { this->start(); });
}
}
boost::asio::io_service& IoService::get() noexcept { return m_service; }
IoService* ExecutionContentServer::m_ioService = nullptr;
ExecutionContentServer::ExecutionContentServer(ReturnCode returnCode) noexcept
: m_returnCode(returnCode),
m_file("exec-helper.unix-socket.%%%%%%%%"),
m_endpoint(m_file.getPath().string()),
m_socket(m_ioService->get()),
m_acceptor(m_ioService->get()) {
try {
// Explicitly open the acceptor in the constructor body: exceptions will leak out of the constructor otherwise
openAcceptor();
} catch(const system_error& e) {
cerr << "Unexpected exception caught: '" << e.what()
<< "'. The execution content server will not work" << endl;
assert(false);
}
init();
}
ExecutionContentServer::ExecutionContentServer(
ExecutionContentServer&& other) noexcept
: m_numberOfExecutions(other.m_numberOfExecutions),
m_returnCode(other.m_returnCode),
m_endpoint(move(other.m_endpoint)),
m_socket(move(other.m_socket)),
#if BOOST_VERSION < 107000
m_acceptor(other.m_acceptor.get_io_service()) {
#else
m_acceptor(other.m_acceptor.get_executor()) {
#endif
try {
openAcceptor();
} catch(const system_error& e) {
cerr << "Unexpected exception caught: " << e.what() << endl;
assert(false);
}
}
ExecutionContentServer::~ExecutionContentServer() noexcept {
m_acceptor.close();
::unlink(m_file.getPath().native().c_str());
}
void ExecutionContentServer::openAcceptor() {
m_acceptor.open(m_endpoint.protocol());
m_acceptor.set_option(socket_base::reuse_address(true));
m_acceptor.bind(m_endpoint);
m_acceptor.listen(socket_base::max_connections);
}
ExecutionContentServer&
ExecutionContentServer::operator=(ExecutionContentServer&& other) noexcept {
swap(other);
return *this;
}
void ExecutionContentServer::swap(ExecutionContentServer& other) noexcept {
std::swap(m_numberOfExecutions, other.m_numberOfExecutions);
std::swap(m_returnCode, other.m_returnCode);
try {
boost::swap(m_endpoint, other.m_endpoint);
boost::swap(m_acceptor, other.m_acceptor);
} catch(const system_error& e) {
cerr << "Unexpected exception caught: " << e.what() << endl;
assert(false);
}
}
void ExecutionContentServer::registerIoService(
gsl::not_null<IoService*> ioService) noexcept {
m_ioService = ioService;
}
void ExecutionContentServer::init() noexcept { accept(); }
void ExecutionContentServer::accept() noexcept {
m_acceptor.async_accept(m_socket, [this](error_code ec) {
switch(ec.value()) {
case boost::system::errc::success: {
ExecuteContentMessageCallback callback(
[this](const ExecutionContentData& data) {
return addData(data);
});
make_shared<ExecutionSession>(std::move(m_socket), callback)
->start();
accept();
} break;
case boost::system::errc::operation_canceled:
break;
default:
cerr << __FILE__ << ":" << __LINE__ << " "
<< "Unexpected error occurred: " << ec << ": " << ec.message()
<< endl;
terminate();
break;
}
});
}
ExecutionContentServer::ConfigCommand
ExecutionContentServer::getConfigCommand() const noexcept {
return {
"execution-content",
m_endpoint.path()}; // Use the execution-content that is in your path
}
ExecutionContentDataReply
ExecutionContentServer::addData(ExecutionContentData data) noexcept {
m_receivedData.emplace_back(data);
++m_numberOfExecutions;
return {m_returnCode};
}
const std::vector<ExecutionContentData>&
ExecutionContentServer::getReceivedData() const noexcept {
return m_receivedData;
}
unsigned int ExecutionContentServer::getNumberOfExecutions() const noexcept {
return m_numberOfExecutions;
}
void ExecutionContentServer::clear() noexcept { m_numberOfExecutions = 0; }
ExecutionContentClient::ExecutionContentClient(const Path& file)
: m_endpoint(file.native()) {
;
}
ReturnCode
ExecutionContentClient::addExecution(const ExecutionContentData& data) {
io_service ioService;
stream_protocol::socket socket(ioService);
std::thread t([&ioService]() { ioService.run(); });
error_code ec;
socket.connect(m_endpoint, ec);
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " "
<< "Client: Unexpected error occurred: " << ec << ": "
<< ec.message() << endl;
return RUNTIME_ERROR;
}
// Serialize the struct before sending
std::ostringstream serialized_stream;
try {
boost::archive::text_oarchive serialized(serialized_stream);
serialized << data;
} catch(const boost::archive::archive_exception& e) {
cerr << __FILE__ << ":" << __LINE__ << e.what() << endl;
return RUNTIME_ERROR;
}
auto outboundData = serialized_stream.str();
std::ostringstream header_stream;
header_stream << std::setw(HEADER_LENGTH) << std::hex
<< outboundData.size();
auto headerData = header_stream.str();
if(!header_stream || header_stream.str().size() != HEADER_LENGTH) {
cerr << __FILE__ << ":" << __LINE__
<< "Could not properly construct header stream" << endl;
terminate();
}
std::vector<boost::asio::const_buffer> buffers;
buffers.emplace_back(buffer(headerData));
buffers.emplace_back(buffer(outboundData));
boost::asio::write(socket, buffers, ec);
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " "
<< "Client: Unexpected error occurred: " << ec << ": "
<< ec.message() << endl;
return RUNTIME_ERROR;
}
ExecutionContentDataReply reply(RUNTIME_ERROR);
size_t replyLength =
boost::asio::read(socket, buffer(&reply, sizeof(reply)), ec);
if(ec) {
cerr << __FILE__ << ":" << __LINE__ << " "
<< "Client: Unexpected error occurred: " << ec << ": "
<< ec.message() << endl;
return RUNTIME_ERROR;
}
if(replyLength != sizeof(reply)) {
cerr << __FILE__ << ":" << __LINE__ << " "
<< "Client: Mismatch in reply length detected." << endl;
return RUNTIME_ERROR;
}
socket.close();
t.join(); // Note: since it will have no more work, io_service.run should automatically return and end the thread. So a join on the thread should suffice.
return reply.returnCode;
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>#ifndef TEST_PATH_INCLUDE
#define TEST_PATH_INCLUDE
#include <filesystem>
namespace execHelper {
namespace test {
namespace baseUtils {
using Path = std::filesystem::path;
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* TEST_PATH_INCLUDE */
<file_sep>#ifndef LOG_GENERATORS_INCLUDE
#define LOG_GENERATORS_INCLUDE
#include "log/logLevel.h"
#include "rapidcheck.h"
// NOTE: Must be in rc namespace!
namespace rc {
template <> struct Arbitrary<execHelper::log::LogLevel> {
static Gen<execHelper::log::LogLevel> arbitrary() {
return gen::elementOf(execHelper::log::getLogLevels());
};
};
} // namespace rc
#endif /* LOG_GENERATORS_INCLUDE */
<file_sep>#include "logLevel.h"
#include <algorithm>
#include <vector>
#include "log/assertions.h"
using std::distance;
using std::find;
using std::string_view;
using std::vector;
namespace {
inline auto getLogLevelStrings() noexcept -> const vector<string_view>& {
using namespace std::literals;
static const vector<string_view> logLevels(
{"all"sv, "test"sv, "trace"sv, "debug"sv, "info"sv, "warning"sv,
"error"sv, "fatal"sv, "none"sv});
return logLevels;
}
} // namespace
namespace execHelper::log {
auto toLogLevel(std::string_view level) -> LogLevel {
const auto& logLevelStrings = getLogLevelStrings();
const auto& element =
find(logLevelStrings.begin(), logLevelStrings.end(), level);
if(element == logLevelStrings.end()) {
throw InvalidLogLevel();
}
auto index = distance(logLevelStrings.begin(), element);
return static_cast<LogLevel>(index);
}
auto getLogLevels() -> const vector<LogLevel>& {
static const vector<LogLevel> LOG_LEVELS(
{none, fatal, error, warning, info, debug, trace, all});
return LOG_LEVELS;
}
auto operator<<(std::ostream& os, LogLevel level) noexcept -> std::ostream& {
os << toString(level);
return os;
}
auto toString(LogLevel level) noexcept -> string_view {
expectsMessage(level < getLogLevelStrings().size(),
"Level must be a log level value");
return getLogLevelStrings()[level];
}
} // namespace execHelper::log
<file_sep>FROM archlinux/base:latest
LABEL maintainer="<EMAIL>"
RUN pacman -Sy --needed --noconfirm archlinux-keyring && pacman -Scc --noconfirm && rm -rf /var/lib/pacman/sync/* # Fixes some pacman keyring issues
RUN pacman -Syu --needed --noconfirm boost boost-libs yaml-cpp ninja cmake sudo && pacman -Scc --noconfirm && rm -rf /var/lib/pacman/sync/*
<file_sep>pyyaml
pytest-bdd
pytest-html
<file_sep>include(CMakeDependentOption)
CMAKE_DEPENDENT_OPTION(BUILD_USAGE_MAN_DOCUMENTATION "Create and install the MAN usage documentation (requires Sphinx)" ON "BUILD_USAGE_DOCUMENTATION" OFF)
CMAKE_DEPENDENT_OPTION(BUILD_USAGE_HTML_DOCUMENTATION "Create and install the HTML usage documentation (requires Sphinx)" ON "BUILD_USAGE_DOCUMENTATION" OFF)
find_package(Sphinx REQUIRED)
if(BUILD_USAGE_HTML_DOCUMENTATION)
include(html.CMakeLists.txt)
endif()
if(BUILD_USAGE_MAN_DOCUMENTATION)
include(man.CMakeLists.txt)
endif()
<file_sep>#include "pathManipulation.h"
#include <string>
using std::string;
namespace execHelper::config {
auto getAllParentDirectories(Path path) noexcept -> Paths {
if(path.is_relative()) {
path = absolute(path);
}
if(is_regular_file(path)) {
path = path.parent_path();
}
Paths parentPaths;
while(path != path.parent_path()) {
parentPaths.push_back(path);
path = path.parent_path();
}
parentPaths.push_back(path.parent_path());
return parentPaths;
}
auto getHomeDirectory(const EnvironmentCollection& env) noexcept
-> std::optional<Path> {
const string HOME_DIR_KEY("HOME");
if(env.count(HOME_DIR_KEY) == 0) {
return std::nullopt;
}
return Path(env.at(HOME_DIR_KEY));
}
} // namespace execHelper::config
<file_sep>Sources taken from https://github.com/ahupowerdns/luawrapper
<file_sep>.. _exec-helper-plugins-command-line-command:
Command-line-command plugin
*******************************
Description
===========
The command-line-command plugin is used for executing arbitrary command lines. This plugin can be used for constructing the command line for commands that do not have a corresponding plugin available.
Mandatory settings
==================
The configuration of the command-line-command must contain the following settings:
.. program:: exec-helper-plugins-command-line-command
.. describe:: command-line
The command-line to execute. There are two different usages:
* **No identification key**: Set one command line as a list of separate arguments. This form is only usable if only one line needs to be executed.
* **With identification key**: Make a map with arbitrary keys, where each associated value is one command line, described as a list of separate arguments. This form is usable if one or more lines need to be executed. Multiple commands are executed in the order the identification keys are defined.
**Note**: see the documentation of **wordexp** (3) for the limitations on what characters are not allowed in the command-line command.
Optional settings
=================
The configuration of the command-line-command plugin may contain the following settings:
.. program:: exec-helper-plugins-command-line-command
.. include:: patterns.rst
.. include:: environment.rst
.. include:: working-dir.rst
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/command-line-command.example
:language: yaml
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>task:add_args({'scan-build'})
task:add_args(get_verbose('-v'))
task:add_args(get_commandline())
local build_commands = list(config['build-command'])
if type(build_commands) == 'nil' then
input_error('Clang-static-analyzer: one must define at least one build command')
end
if type(next(build_commands)) == 'nil' then
user_feedback_error('Clang-static-analyzer: one must define at least one build command')
input_error('Clang-static-analyzer: one must define at least one build command')
end
run_target(task, build_commands)
<file_sep>#include "argv.h"
#include <cstring>
#include <iostream>
#include <string_view>
#include <gsl/span>
#include <gsl/string_span>
#include "log/assertions.h"
using std::string;
using std::vector;
using gsl::czstring;
using gsl::span;
using namespace std::literals;
namespace execHelper::config {
Argv::Argv(int argc, const char* const* argv) noexcept {
span<const czstring<>> spanArgv(argv, argc);
m_argv.reserve(argc + 1UL);
for(const auto& arg : spanArgv) {
const auto argLength = strnlen(arg, 256U) + 1U;
auto* newArg = // NOLINT(cppcoreguidelines-owning-memory)
new char[argLength];
strncpy(newArg, arg, argLength);
m_argv.emplace_back(newArg);
}
m_argv.emplace_back(nullptr);
}
Argv::Argv(const vector<string>& task) noexcept {
m_argv.reserve(task.size() + 1);
for(const auto& arg : task) {
auto* newArg = // NOLINT(cppcoreguidelines-owning-memory)
new char[arg.size() + 1U];
strncpy(newArg, arg.c_str(), arg.size() + 1U);
m_argv.emplace_back(newArg);
}
m_argv.emplace_back(nullptr);
}
Argv::Argv(const Argv& other) noexcept {
m_argv.reserve(other.m_argv.size());
deepCopy(other);
}
Argv::Argv(Argv&& other) noexcept { swap(other); }
Argv::~Argv() noexcept { clear(); }
void Argv::deepCopy(const Argv& other) noexcept {
clear();
for(const auto& otherElement : other.m_argv) {
if(otherElement == nullptr) {
break;
}
size_t length = strlen(otherElement) + 1U;
auto* newArg = // NOLINT(cppcoreguidelines-owning-memory)
new char[length];
strncpy(newArg, otherElement, length);
m_argv.emplace_back(newArg);
}
m_argv.emplace_back(nullptr);
}
auto Argv::operator=(const Argv& other) noexcept -> Argv& {
if(this != &other) {
m_argv.reserve(other.m_argv.size());
deepCopy(other);
}
return *this;
}
auto Argv::operator=(Argv&& other) noexcept -> Argv& {
swap(other);
return *this;
}
auto Argv::operator==(const Argv& other) const noexcept -> bool {
return m_argv == other.m_argv;
}
auto Argv::operator!=(const Argv& other) const noexcept -> bool {
return !(*this == other);
}
auto Argv::operator[](size_t index) const noexcept -> char* {
if(index >=
m_argv.size() -
1U) { // Accessing the last nullptr element is considered an error
return nullptr;
}
return m_argv[index];
}
void Argv::swap(Argv& other) noexcept { m_argv.swap(other.m_argv); }
void Argv::clear() noexcept {
for(const auto& arg : m_argv) {
delete[] arg; // NOLINT(cppcoreguidelines-owning-memory)
}
m_argv.clear();
}
auto Argv::getArgc() const noexcept -> size_t { return m_argv.size() - 1U; }
auto Argv::getArgv() noexcept -> char** { return &m_argv.at(0); }
auto Argv::getArgv() const noexcept -> const char* const* {
return &m_argv.at(0);
}
auto operator<<(std::ostream& os, const Argv& argv) noexcept -> std::ostream& {
const span<const czstring<>> args(argv.getArgv(), argv.getArgc());
bool firstIteration = true;
for(const auto& arg : args) {
if(!firstIteration) {
os << ", "sv;
} else {
firstIteration = false;
}
os << arg;
}
return os;
}
} // namespace execHelper::config
<file_sep>#include <algorithm>
#include <filesystem>
#include <map>
#include <memory>
#include <optional>
#include <string>
#include <string_view>
#include <vector>
#include "config/commandLineOptions.h"
#include "config/environment.h"
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/executePlugin.h"
#include "plugins/logger.h"
#include "plugins/luaPlugin.h"
#include "plugins/memory.h"
#include "base-utils/nonEmptyString.h"
#include "config/generators.h"
#include "core/coreGenerators.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "utils/addToConfig.h"
#include "utils/addToTask.h"
#include "utils/commonGenerators.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::count;
using std::inserter;
using std::make_pair;
using std::make_shared;
using std::map;
using std::move;
using std::optional;
using std::shared_ptr;
using std::static_pointer_cast;
using std::string;
using std::string_view;
using std::transform;
using execHelper::config::Command;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Pattern;
using execHelper::config::PatternKey;
using execHelper::config::Patterns;
using execHelper::config::PatternValues;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::ExecutePlugin;
using execHelper::test::addToConfig;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::propertyTest;
namespace filesystem = std::filesystem;
namespace {
auto scriptPath() noexcept -> std::string {
return string(PLUGINS_INSTALL_PATH) + "/selector.lua";
}
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Testing the configuration settings of the selector plugin",
"[selector]") {
propertyTest("", [](const optional<filesystem::path>& workingDir,
const optional<EnvironmentCollection>& environment,
const Pattern& pattern, Task task) {
Patterns patterns = {pattern};
map<std::string, shared_ptr<SpecialMemory>> memories;
const auto& patternValues = pattern.getValues();
transform(patternValues.begin(), patternValues.end(),
inserter(memories, memories.end()), [](const auto& value) {
return make_pair(value, make_shared<SpecialMemory>());
});
VariablesMap config("selector-test");
auto target = string("{").append(pattern.getKey()).append("}");
LuaPlugin plugin(scriptPath());
addToConfig("targets", target, &config);
if(workingDir) {
handleWorkingDirectory(*workingDir, config, task);
}
if(environment) {
handleEnvironment(*environment, config, task);
}
FleetingOptionsStub fleetingOptions;
// Register each memories mapping as the endpoint for every target command
Plugins plugins;
transform(memories.begin(), memories.end(),
inserter(plugins, plugins.end()), [](const auto& memory) {
return make_pair(
memory.first,
static_pointer_cast<Plugin>(memory.second));
});
ExecutePlugin::push(move(plugins));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode("selector-test"));
ExecutePlugin::push(Patterns(patterns));
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK(
"The memories were called the expected number of times") {
for(const auto& memory : memories) {
// Expected executions is 1 per memory, multiplied by the number of occurences of the pattern value in all the pattern values
size_t nbOfExpectedExecutions =
count(patternValues.begin(), patternValues.end(),
memory.first);
REQUIRE(memory.second->getExecutions().size() ==
nbOfExpectedExecutions);
}
}
THEN_CHECK("It called the right commands") {
for(const auto& memory : memories) {
auto executions = memory.second->getExecutions();
for(const auto& execution : executions) {
REQUIRE(execution.task == task);
REQUIRE(execution.patterns.empty());
}
}
}
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
});
}
SCENARIO("Unconfigured target in selector", "[selector]") {
GIVEN("A config without a defined target") {
LuaPlugin plugin(scriptPath());
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode("selector-test"));
ExecutePlugin::push(Patterns());
WHEN("We call the plugin") {
bool returnCode =
plugin.apply(Task(), VariablesMap("selector-test"), Patterns());
THEN("It should fail") { REQUIRE_FALSE(returnCode); }
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
}
}
} // namespace execHelper::plugins::test
<file_sep>import subprocess
from config import Config
from pattern import Pattern
class RunEnvironment(object):
def __init__(self, root_dir):
self._config = None
self._root_dir = root_dir
self._working_dir = self._root_dir
self._args = ['exec-helper']
self._root_dir.mkdir(parents=True, exist_ok=False)
print(f"Creating simulation environment in directory '{self._root_dir}'")
def __del__(self):
# self.remove()
pass
@property
def config(self):
return self._config
@config.setter
def config(self, value):
self._config = value
@property
def last_run(self):
return self._last_run
@property
def working_dir(self):
return self._working_dir
@property
def root_dir(self):
return self._root_dir
def set_working_dir(self, newWorkingDir):
self._working_dir = newWorkingDir
def no_config(self):
self._config.remove()
def config_is_external(self):
self._args.append('--settings-file')
self._args.append(str(self._config.file))
def add_pattern(self, command, pattern_string):
""" Add the list of patterns to the given command """
parts = pattern_string.split(":")
if len(parts) != 2:
raise AssertionError("Cannot parse '{value}' to pattern".format(value = value))
pattern_id = parts[0]
pattern_values = parts[1].split(',')
pattern = Pattern(pattern_id, pattern_values)
self._config.add_pattern(command, pattern)
def add_commandline(self, arg_list):
self._args.extend(arg_list)
def run_application(self, arg_list=[]):
if self._config:
self._config.write()
args = self._args
args.extend(arg_list)
print("Executing '" + ' '.join(args) + "'")
self._last_run = subprocess.run(args, cwd = self._working_dir, capture_output = True, check = False)
def remove(self):
if os.path.is_directory(self._root_dir):
shutil.rmtree(self._root_dir)
<file_sep>#ifndef PATH_INCLUDE
#define PATH_INCLUDE
#include <filesystem>
#include <vector>
namespace execHelper {
namespace config {
using Path = std::filesystem::path;
using Paths = std::vector<Path>;
} // namespace config
} // namespace execHelper
#endif /* PATH_INCLUDE */
<file_sep>from pathlib import Path
import os
import yaml
import command
class Config(object):
def __init__(self, directory, filename = '.exec-helper'):
self._settings_file = Path(directory).joinpath(filename)
self._directory = directory
self._commands = dict()
self._patterns = set()
self._plugin_search_path = []
if os.path.exists(self._settings_file):
raise AssertionError("Temporary file '{file}' already exists!".format(file = self._settings_file))
def __del__(self):
# self.remove()
pass
@property
def directory(self):
return self._directory
@property
def file(self):
return self._settings_file
@property
def commands(self):
return self._commands
def create_command(self, command_id):
""" Creates a command for the given command id using an implementation-specific plugin
"""
self._commands[command_id] = command.Command(command_id, 'command-line-command', self._directory)
def add_command(self, command):
""" Adds the given command as a command associated with the command id to the configuration """
self._commands[command.id] = command
def set_environment(self, cmd, envs):
self._commands[cmd].set_environment(envs)
def add_pattern(self, pattern):
self._patterns.add(pattern)
def add_plugin_search_path(self, path):
self._plugin_search_path.append(str(path))
def write(self):
config_file = dict()
# Make sure the config file is not empty
config_file['blaat'] = []
if self._plugin_search_path:
config_file['additional-search-paths'] = self._plugin_search_path
if self._patterns:
config_file['patterns'] = {}
for pattern in self._patterns:
config_file['patterns'][pattern.id] = {
'default-values': pattern.default_values
}
if pattern.long_options:
config_file['patterns'][pattern.id]['long-option'] = pattern.long_options
if self._commands:
config_file['commands'] = []
for id,cmd in self._commands.items():
config_file['commands'].append(id)
config_file.update(cmd.to_dict())
cmd.write_binary()
with open(self._settings_file, 'w') as f:
yaml.dump(config_file, f)
def remove(self):
if os.path.exists(self._settings_file):
os.remove(self._settings_file)
<file_sep>#include "commandLineCommand.h"
#include <string>
#include <gsl/string_span>
#include "config/environment.h"
#include "config/variablesMap.h"
#include "core/patterns.h"
#include "core/task.h"
#include "plugins/workingDirectory.h"
#include "commandLine.h"
#include "logger.h"
#include "pluginUtils.h"
using std::move;
using std::string;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::EnvArgs;
using execHelper::config::ENVIRONMENT_KEY;
using execHelper::config::EnvironmentCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Path;
using execHelper::config::Patterns;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsValues;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::Tasks;
namespace {
const czstring<> PLUGIN_NAME = "command-line-command";
} // namespace
namespace execHelper::plugins {
auto CommandLineCommand::getVariablesMap(
const FleetingOptionsInterface& /*fleetingOptions*/) const noexcept
-> VariablesMap {
VariablesMap defaults(PLUGIN_NAME);
if(!defaults.add(COMMAND_LINE_KEY, CommandLineArgs())) {
LOG(error) << "Failed to add key '" << COMMAND_LINE_KEY << "'";
}
if(!defaults.add(ENVIRONMENT_KEY, EnvArgs())) {
LOG(error) << "Failed to add key '" << ENVIRONMENT_KEY << "'";
}
return defaults;
}
auto CommandLineCommand::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept
-> bool {
task.appendToEnvironment(getEnvironment(variables));
auto workingDir = variables.get<WorkingDir>(WORKING_DIR_KEY);
if(workingDir) {
task.setWorkingDirectory(*(workingDir));
}
auto commandLine = *(variables.get<CommandLineArgs>(COMMAND_LINE_KEY));
if(commandLine.empty()) {
user_feedback_error("Could not find the '"
<< COMMAND_LINE_KEY << "' setting in the '"
<< PLUGIN_NAME << "' settings");
return false;
}
Tasks tasks;
if(variables
.get<SettingsValues>(
SettingsKeys({COMMAND_LINE_KEY, commandLine.front()}),
SettingsValues())
.empty()) {
task.append(move(commandLine));
tasks.emplace_back(move(task));
} else {
SettingsKeys keys({COMMAND_LINE_KEY});
for(const auto& commandKey :
variables.get<SettingsValues>(COMMAND_LINE_KEY, SettingsValues())) {
SettingsKeys tmpKey = keys;
tmpKey.emplace_back(commandKey);
Task newTask = task;
newTask.append(move(*(variables.get<CommandLineArgs>(tmpKey))));
tasks.emplace_back(newTask);
}
}
for(const auto& combination : makePatternPermutator(patterns)) {
for(const auto& executeTask : tasks) {
Task newTask = replacePatternCombinations(executeTask, combination);
if(!registerTask(newTask)) {
return false;
}
}
}
return true;
}
auto CommandLineCommand::summary() const noexcept -> std::string {
return "Command-line-command (internal)";
}
} // namespace execHelper::plugins
<file_sep>#include <iostream>
#include <sstream>
#include <string>
#include <vector>
#include <gsl/string_span>
#include "config/envp.h"
#include "unittest/catch.h"
using gsl::czstring;
using std::move;
using std::string;
using std::stringstream;
using std::vector;
namespace execHelper::config::test {
SCENARIO("Test envp construction", "[config][envp]") {
GIVEN("A taskcollection to take the arguments from") {
const EnvironmentCollection env(
{{"key1", "value1"}, {"key2", "value2"}, {"key3", "value3"}});
vector<string> expectedEnv;
expectedEnv.reserve(env.size());
for(const auto& element : env) {
expectedEnv.emplace_back(
string(element.first).append("=").append(element.second));
}
WHEN("We create the envp") {
Envp envp(env);
THEN("The number of arguments should match") {
REQUIRE(env.size() == envp.size());
}
THEN("The associated envp char array should be returned") {
char** returnedEnvp = envp.getEnvp();
size_t index = 0U;
while(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(index < env.size());
REQUIRE(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] == expectedEnv[index]);
++index;
}
}
}
WHEN("We create the const envp") {
const Envp envp(env);
THEN("The number of arguments should match") {
REQUIRE(expectedEnv.size() == envp.size());
}
THEN("The associated envp char array should be returned") {
const char* const* returnedEnvp = envp.getEnvp();
size_t index = 0U;
while(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(index < expectedEnv.size());
REQUIRE(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] == expectedEnv[index]);
++index;
}
}
}
}
}
SCENARIO("Test the envp copy and move constructor, assignment operators and "
"the swap operator",
"[config][envp][check]") {
GIVEN("An envp object to copy") {
const EnvironmentCollection env(
{{"key1", "value1"}, {"key2", "value2"}, {"key3", "value3"}});
vector<string> expectedEnv;
expectedEnv.reserve(env.size());
for(const auto& element : env) {
expectedEnv.emplace_back(
string(element.first).append("=").append(element.second));
}
Envp envp(env);
WHEN("We copy the given object") {
Envp copy(envp);
// Save the pointers to the arguments
vector<const char*> envpAddresses;
const char* const* returnedEnvp = envp.getEnvp();
size_t index = 0U;
while(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
envpAddresses.push_back(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
THEN("The values of the pointers must be equal") {
const char* const* copiedEnvp = copy.getEnvp();
size_t index = 0U;
while(
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
expectedEnv[index] ==
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
REQUIRE(index == expectedEnv.size());
}
THEN("It must have been a deep copy/the actual pointer must be "
"different") {
const char* const* copiedEnvp = copy.getEnvp();
size_t index = 0U;
while(
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
envpAddresses[index] !=
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
REQUIRE(index == expectedEnv.size());
}
}
WHEN("We copy assign the given object") {
Envp assign(
EnvironmentCollection({{"copy-assign-key1", "copy-assign1"},
{"copy-assign-key2", "copy-assign2"}}));
// Save the pointers to the arguments
vector<const char*> envpAddresses;
const char* const* returnedEnvp = envp.getEnvp();
size_t index = 0U;
while(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
envpAddresses.push_back(
returnedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
assign = envp;
THEN("The values of the pointers must be equal") {
const auto* copiedEnvp = assign.getEnvp();
size_t index = 0U;
while(
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] !=
nullptr) { // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
REQUIRE(
expectedEnv[index] ==
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
++index;
}
REQUIRE(index == expectedEnv.size());
}
THEN("It must have been a deep copy/the actual pointer must be "
"different") {
const auto* copiedEnvp = assign.getEnvp();
size_t index = 0U;
while(
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] !=
nullptr) { // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
REQUIRE(
envpAddresses[index] !=
copiedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
++index;
}
REQUIRE(index == expectedEnv.size());
}
}
WHEN("We move the given object") {
Envp copied(envp); // Copy so we can move the copy
Envp moved(move(copied));
THEN("We must find the expected content") {
const char* const* movedEnvp = moved.getEnvp();
size_t index = 0U;
while(
movedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
expectedEnv[index] ==
movedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
REQUIRE(index == expectedEnv.size());
}
}
WHEN("We move assign the given object") {
Envp copied(envp); // Copy so we can move the copy
Envp assign(
EnvironmentCollection({{"move-assign-key1", "move-assign1"},
{"move-assign-key2", "move-assign2"}}));
assign = move(copied); // NOLINT(hicpp-invalid-access-moved)
THEN("We must find the expected content") {
const char* const* movedEnvp = assign.getEnvp();
size_t index = 0U;
while(
movedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
expectedEnv[index] ==
movedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index]);
++index;
}
REQUIRE(index == expectedEnv.size());
}
}
WHEN("We swap the given object") {
const EnvironmentCollection swappedContent(
{{"swap-key1", "swap-value1"},
{"swap-key2", "swap-value2"},
{"swap-key3", "swap-value3"}});
vector<string> expectedSwapEnv;
expectedSwapEnv.reserve(swappedContent.size());
for(const auto& element : swappedContent) {
expectedSwapEnv.emplace_back(
string(element.first).append("=").append(element.second));
}
Envp swapped(swappedContent);
envp.swap(swapped); // NOLINT(hicpp-invalid-access-moved)
THEN("We must find the expected content for the first object") {
const char* const* swappedEnvp = envp.getEnvp();
size_t index = 0U;
while(
swappedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
swappedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] == expectedSwapEnv[index]);
++index;
}
REQUIRE(index == expectedSwapEnv.size());
}
THEN("We must find the swapped content for the second object") {
const char* const* swappedEnvp = swapped.getEnvp();
size_t index = 0U;
while(
swappedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] != nullptr) {
REQUIRE(
swappedEnvp // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[index] == expectedEnv[index]);
++index;
}
REQUIRE(index == expectedEnv.size());
}
}
}
}
SCENARIO("Test the envp streaming operator", "[config][envp]") {
GIVEN("An envp object to stream") {
const EnvironmentCollection env({{"stream-key1", "stream1"},
{"stream-key2", "stream2"},
{"stream-key3", "stream3"}});
const Envp envp(env);
WHEN("We stream the argv object") {
stringstream stream;
stream << envp;
THEN("We must find the expected one") {
stringstream correctStream;
bool firstIteration = true;
for(const auto& envPair : env) {
if(!firstIteration) {
correctStream << ", ";
} else {
firstIteration = false;
}
correctStream << envPair.first << "=" << envPair.second;
}
REQUIRE(stream.str() == correctStream.str());
}
}
}
}
} // namespace execHelper::config::test
<file_sep>#ifndef CONFIG_BUILDER_INCLUDE
#define CONFIG_BUILDER_INCLUDE
#include "executionContent.h"
#include "testCommand.h"
namespace execHelper {
namespace test {
namespace baseUtils {
class ConfigBuilder {
private:
class ConfigBuilderRAII {
public:
explicit ConfigBuilderRAII(Commands& commands) noexcept;
ConfigBuilderRAII(const ConfigBuilderRAII& other) = default;
ConfigBuilderRAII(ConfigBuilderRAII&& other) = default;
~ConfigBuilderRAII();
ConfigBuilderRAII& operator=(const ConfigBuilderRAII& other) = delete;
ConfigBuilderRAII& operator=(ConfigBuilderRAII&& other) =
delete; // NOLINT(misc-noexcept-move-constructor)
private:
Commands& m_commands;
};
public:
ConfigBuilder();
Commands::const_iterator begin() const noexcept;
Commands::const_iterator end() const noexcept;
void add(const TestCommand& command) noexcept;
void add(TestCommand&& command) noexcept;
void write(gsl::not_null<YamlWriter*> yaml) noexcept;
ConfigBuilderRAII startIteration() noexcept;
private:
// Note: the io service needs to be created before and destroyed after the commands in this config builder
IoService m_ioService;
Commands m_commands;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* CONFIG_BUILDER_INCLUDE */
<file_sep>import tempfile
from pathlib import Path
import pytest
from pytest_bdd import scenarios, given, when, then
from custom_plugin import CustomPlugin
from command import Command
from scenarios_run import *
scenarios('../feature/custom-plugins', example_converters=dict(command_line = CommandLineArgs, description = str, plugin_id = str, command = str, nb_of_times = int))
def register_command_for_plugin(run_environment, config, command, plugin_id):
custom_command = Command(command, plugin_id, run_environment.working_dir)
config.add_command(custom_command)
@given('a random custom plugin directory')
def custom_plugin_dir(run_environment):
path = Path(run_environment.working_dir).joinpath('custom-plugins')
path.mkdir(parents = True, exist_ok = False)
return path
@given('a custom module with id <plugin_id>')
def custom_plugin(plugin_id, custom_plugin_dir):
return CustomPlugin(plugin_id, custom_plugin_dir)
@given('the same custom module <plugin_id> on a different location and add it to the command line search path')
def other_custom_plugin(plugin_id, run_environment, custom_plugin_dir):
other_location = Path(custom_plugin_dir).joinpath('other')
other_location.mkdir(parents=True, exist_ok=False)
add_search_path_commandline(run_environment, str(other_location))
return CustomPlugin(plugin_id, other_location)
@given('a registered command <command> that uses the module <plugin_id>')
def registered_command_custom_plugin(run_environment, config, command, plugin_id):
register_command_for_plugin(run_environment, config, command, plugin_id)
@given('the custom plugin search path is registered in the configuration')
def search_path_registered(config, custom_plugin_dir):
config.add_plugin_search_path(custom_plugin_dir)
@when('we register the command <command> to use the module <plugin_id>')
def register_command_plugin(run_environment, config, command, plugin_id):
register_command_for_plugin(run_environment, config, command, plugin_id)
@when('add the search path to the configuration')
def add_search_path_configuration(config, custom_plugin_dir):
config.add_plugin_search_path(custom_plugin_dir)
@when('add the search path to the command line')
def add_search_path_commandline(run_environment, custom_plugin_dir):
run_environment.add_commandline(['--additional-search-path', str(custom_plugin_dir)])
@then('stdout should contain <plugin_id>')
def stdout_plugin_id(run_environment, plugin_id):
stdout_contains(run_environment, plugin_id)
@then('stdout should contain regex <description>')
def stdout_plugin_id(run_environment, description):
stdout_contains_regex(run_environment, description)
<file_sep>task:add_args({'cppcheck'})
task:add_args({"--enable=" .. table.concat(list(config['enable-checks']) or {'all'}, ',')})
task:add_args(get_verbose('--verbose'))
task:add_args({'-j', one(config['jobs']) or jobs})
task:add_args(get_commandline())
task:add_args(list(config['src-dir']) or {'.'})
register_task(task)
<file_sep>#ifndef PLUGINS_INCLUDE
#define PLUGINS_INCLUDE
#include <string>
namespace execHelper {
namespace test {
namespace baseUtils {
struct CommandPluginSettings {
static inline std::string getKey() { return COMMAND_KEY; }
static inline std::string getStatementKey() {
assert(false); // The command plugin does not have a command line key
return COMMAND_KEY;
}
};
struct CommandLineCommandSettings {
static inline std::string getKey() { return COMMAND_LINE_COMMAND_KEY; }
static inline std::string getStatementKey() {
return COMMAND_LINE_COMMAND_LINE_KEY;
}
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* PLUGINS_INCLUDE */
<file_sep># This Makefile contains only the targets to easily build and install exec-helper on your native system. Additional targets are available in the exec-helper-configuration. This requires exec-helper to be installed.
# For installing exec-helper, run:
# make
# make install
#
# Note: if you want to change the default installation directory, then add the PREFIX variable to the first make command:
# make PREFIX=<installation root directory>
# Override these on the command line if required
PREFIX?=/usr/local## Sets the installation prefix. Default: /usr/local.
JOBS?= $(shell grep -c ^processor /proc/cpuinfo)## Sets the number of jobs. Default: number of processors in /proc/cpuinfo.
BUILD_DIR?=build/native/release## Sets the build folder. Default: build/native/release.
CHANGELOG_CONFIG?= .gitchangelog.rc## Sets the changelog config to use. Default: .gitchangelog.rc
CHANGELOG_OUTPUT?= &1## Sets the changelog output redirection for print-changelog. Default: stdout
CMAKE_BUILD_TYPE?=Release ## Sets the cmake build type. Default: Release
USE_SYSTEM_GSL?= ON## Sets whether to use the system GSL package. Default: ON
BUILD_DOCUMENTATION?=ON## Switches the building of the documentation on or off. Default: ON
ACTUAL_PLUGINS_INSTALL_PREFIX?=$(PREFIX)/share/exec-helper/plugins## Set the actual installation prefix: useful if the files end up on a different place than the current PLUGINS_INSTALL_PREFIX
all: binary docs-usage changelog
init: ## Initialize native build
cmake -H. -B$(BUILD_DIR) -DCMAKE_INSTALL_PREFIX=$(PREFIX) -DCMAKE_BUILD_TYPE=$(CMAKE_BUILD_TYPE) -DENABLE_WERROR=OFF -DENABLE_TESTING=OFF -DUSE_SYSTEM_GSL=$(USE_SYSTEM_GSL) -DCMAKE_EXPORT_COMPILE_COMMANDS=OFF -DBUILD_USAGE_DOCUMENTATION=$(BUILD_DOCUMENTATION) -DVERSION=$(shell git describe --long --dirty)-MANUAL -DCOPYRIGHT="Copyright (c) $(shell date +'%Y') <NAME>" -DACTUAL_PLUGINS_INSTALL_PREFIX=$(ACTUAL_PLUGINS_INSTALL_PREFIX)
binary: init ## Build the exec-helper binary
make -C $(BUILD_DIR) --jobs $(JOBS) exec-helper
docs-html: init ## Build the HTML documentation
make -C $(BUILD_DIR) --jobs $(JOBS) docs-html
docs-man: init ## Build the man-page documentation
make -C $(BUILD_DIR) --jobs $(JOBS) docs-man
docs-usage: docs-html docs-man
changelog: init ## Create the associated changelog file
make -C $(BUILD_DIR) --jobs $(JOBS) changelog
print-changelog: ## Print the changelog to CHANGELOG_OUTPUT (default: stdout)
GITCHANGELOG_CONFIG_FILENAME=$(CHANGELOG_CONFIG) gitchangelog >$(CHANGELOG_OUTPUT)
docs: init docs-usage
install-bin: ## Install the exec-helper binary
cmake -DCOMPONENT=runtime -P $(BUILD_DIR)/cmake_install.cmake
install-docs: ## Install what was build from the HTML and pman-page documentation documentation
# Omitting installation of xml documentation
cmake -DCOMPONENT=docs-man -P $(BUILD_DIR)/cmake_install.cmake
cmake -DCOMPONENT=docs-html -P $(BUILD_DIR)/cmake_install.cmake
install-changelog: ## Install the changelog
cmake -DCOMPONENT=changelog -P $(BUILD_DIR)/cmake_install.cmake
install: install-bin install-docs install-changelog
clean: ## Clean the build directory
make -C $(BUILD_DIR) --jobs $(JOBS) clean
distclean: clean ## Clean everything
rm -rf $(BUILD_DIR)
help: ## Show this help.
@echo 'Command line overrides:'
@grep "##" $(MAKEFILE_LIST) | grep -P "^[^\t]" | grep "?=" | sed -e 's/^\([^?]*\)?=.*##\(.*\)$$/ \1:\2/g'
@echo 'Targets:'
@grep "##" $(MAKEFILE_LIST) | grep -P "^[^\t]" | grep -v "?=" | grep -v ":=" | sed -e 's/^\([^:]*\):.*##\(.*\)$$/ \1:\2/g'
list: ## List all targets
@grep "##" $(MAKEFILE_LIST) | grep -P "^[^\t]" | sed -e 's/^\([^:]*\):.*/\1/g'
.PHONY: all init binary docs-html docs-man changelog print-changelog docs install-bin install-docs install-changelog install clean distclean list help
<file_sep>#ifndef __MEMORY_H__
#define __MEMORY_H__
#include <tuple>
#include <vector>
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugin.h"
namespace execHelper {
namespace plugins {
/**
* \brief Remembers information that was passed to it
*/
struct Memory_t {
const core::Task task; //!< The task to remember
const config::VariablesMap variables; //!< The variables to remember
const config::Patterns patterns; //!< The patterns to remember
/**
* Constructor
*
* \param[in] aTask The task to remember
* \param[in] variables The variables to remember
* \param[in] patterns The patterns to remember
*/
Memory_t(core::Task aTask, config::VariablesMap variables,
// cppcheck-suppress passedByValue symbolName=patterns
config::Patterns patterns)
: task(std::move(aTask)),
variables(std::move(variables)),
patterns(patterns) {
;
}
};
/**
* \brief Plugin for remembering later on what has been executed. Mainly
* useful for testing purposes.
*/
class Memory : public Plugin {
public:
using Memories = std::vector<Memory_t>; //!< brief A collection of memories
Memory() = default;
/*! @copydoc config::Argv::Argv(const Argv&)
*/
Memory(const Memory& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
*/
Memory(Memory&& other) noexcept = delete;
~Memory() override = default;
/*! @copydoc config::Argv::operator=(const Argv&)
*/
Memory& operator=(const Memory& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
*/
Memory& operator=(Memory&& other) noexcept = delete;
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
protected:
/**
* Getter for the executions that were remembered
*
* \returns A collection of memories
*/
static const Memories& getExecutions() noexcept;
/**
* Reset the remembered memories
*/
static void reset() noexcept;
/**
* Set the return code for the next invocation(s) of a Memory object
*
* \param[in] returnCode The code to return on the next invocation(s)
*/
static void setReturnCode(bool returnCode) noexcept;
private:
static bool m_returnCode;
static Memories m_executions;
};
/**
* \brief Makes access to a memory more accessible
*/
class MemoryHandler : public Memory {
public:
using Memories = Memory::Memories; //!< brief A collection of memories
MemoryHandler();
/*! @copydoc config::Argv::Argv(const Argv&)
*/
MemoryHandler(const MemoryHandler& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
*/
MemoryHandler(MemoryHandler&& other) noexcept = delete;
~MemoryHandler() override;
/*! @copydoc config::Argv::operator=(const Argv&)
*/
MemoryHandler& operator=(const MemoryHandler& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
*/
MemoryHandler& operator=(MemoryHandler&& other) noexcept = delete;
/*! @copydoc Memory::getExecutions()
*/
static const Memories& getExecutions() noexcept;
/*! @copydoc Memory::reset()
*/
static void reset() noexcept;
/*! @copydoc Memory::setReturnCode(bool)
*/
static void setReturnCode(bool returnCode) noexcept;
};
/**
* \brief Plugin for remembering later on what has been executed. Mainly
* useful for testing purposes.
*/
class SpecialMemory : public Plugin {
public:
using Memories = std::vector<Memory_t>; //!< brief A collection of memories
SpecialMemory() : SpecialMemory(true) {}
/**
* \param[in] returnCode The code to return on invocation
*/
explicit SpecialMemory(bool returnCode) noexcept;
auto getVariablesMap(
const config::FleetingOptionsInterface& fleetingOptions) const noexcept
-> config::VariablesMap override;
auto apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept
-> bool override;
auto summary() const noexcept -> std::string override;
/**
* Getter for the executions that were remembered
*
* \returns A collection of memories
*/
auto getExecutions() noexcept -> const Memories&;
private:
const bool m_returnCode;
mutable Memories
m_executions; // We are keeping state in this plugin, while this is not allowed in 'regular' plugins
};
} // namespace plugins
} // namespace execHelper
#endif /* __MEMORY_H__ */
<file_sep>#ifndef IS_CONTAINER_INCLUDE
#define IS_CONTAINER_INCLUDE
namespace execHelper {
namespace config {
template <class T> struct isContainer : public std::false_type {};
template <class T, class Alloc>
struct isContainer<std::vector<T, Alloc>> : public std::true_type {};
} // namespace config
} // namespace execHelper
#endif /* IS_CONTAINER_INCLUDE */
<file_sep>#ifndef __POSIX_SHELL_H__
#define __POSIX_SHELL_H__
#include <csignal>
#include "shell.h"
#include "task.h"
namespace execHelper {
namespace core {
/**
* \brief Implementation for Shell that represents a posix shell
*/
class PosixShell final : public Shell {
public:
ShellReturnCode execute(const Task& task) override;
bool
isExecutedSuccessfully(ShellReturnCode returnCode) const noexcept override;
private:
// cppcheck-suppress unusedPrivateFunction
//void childProcessExecute(const Task& task) const noexcept;
// cppcheck-suppress unusedPrivateFunction
//ShellReturnCode waitForChild(pid_t pid) const noexcept;
static TaskCollection shellExpand(const Task& task) noexcept;
static TaskCollection wordExpand(const Task& task) noexcept;
};
} // namespace core
} // namespace execHelper
#endif /* __POSIX_SHELL_H__ */
<file_sep>#include "settingsNode.h"
#include <iostream>
#include <memory>
#include <optional>
#include <ostream>
#include <log/assertions.h>
#include "logger.h"
using std::initializer_list;
using std::make_unique;
using std::ostream;
using std::string;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsValues;
namespace {
auto stream(ostream& os, const execHelper::config::SettingsNode& settings,
const string& prepend) noexcept -> ostream& {
os << prepend << "- " << settings.key();
auto values =
settings.get<SettingsValues>(SettingsKeys(), SettingsValues());
if(!values.empty()) {
os << ":";
}
os << std::endl;
const string newPrepend = string(prepend).append(" ");
for(const auto& value : values) {
stream(os, settings[value], newPrepend);
}
return os;
}
} // namespace
namespace execHelper::config {
SettingsNode::SettingsNode(SettingsKey key) noexcept : m_key(std::move(key)) {
;
}
SettingsNode::SettingsNode(const SettingsNode& other) noexcept
: m_key(other.m_key) {
deepCopy(other);
}
SettingsNode::SettingsNode(SettingsNode&& other) noexcept
: m_key(std::move(other.m_key)), m_values(std::move(other.m_values)) {
;
}
SettingsNode::~SettingsNode() noexcept { ; }
auto SettingsNode::operator=(const SettingsNode& other) noexcept
-> SettingsNode& {
if(this != &other) {
m_key = other.m_key;
deepCopy(other);
}
return *this;
}
auto SettingsNode::operator=(SettingsNode&& other) noexcept -> SettingsNode& {
swap(other);
return *this;
}
auto SettingsNode::operator==(const SettingsNode& other) const noexcept
-> bool {
if(m_key != other.m_key) {
return false;
}
if(!m_values || !other.m_values) {
return (!m_values && !other.m_values);
}
if(m_values->size() != other.m_values->size()) {
return false;
}
for(const auto& value : *m_values) {
if(!other.contains(value.m_key)) {
return false;
}
if(value != other[value.m_key]) {
return false;
}
}
return true;
}
auto SettingsNode::operator!=(const SettingsNode& other) const noexcept
-> bool {
return !(*this == other);
}
auto SettingsNode::operator[](const SettingsKey& key) noexcept
-> SettingsNode& {
if(contains(key)) {
auto value =
find_if(m_values->begin(), m_values->end(),
[&key](const auto& value) { return value.m_key == key; });
ensures(value != m_values->end());
return *value;
}
if(!add(key)) {
LOG(warning) << "Failed to add key '" << key << "'";
}
return m_values->back();
}
auto SettingsNode::operator[](const SettingsKey& key) const noexcept
-> const SettingsNode& {
expectsMessage(contains(key), "Key must exist");
auto value =
find_if(m_values->begin(), m_values->end(),
[&key](const auto& value) { return value.m_key == key; });
ensures(value != m_values->end());
return *value;
}
auto SettingsNode::contains(const SettingsKey& key) const noexcept -> bool {
if(!m_values) {
return false;
}
return std::any_of(
m_values->begin(), m_values->end(),
[&key](const auto& value) { return value.m_key == key; });
}
auto SettingsNode::contains(const SettingsKeys& key) const noexcept -> bool {
const SettingsNode* settings = this;
for(const auto& keyPart : key) {
if(settings->contains(keyPart)) {
settings = settings->at(keyPart);
} else {
return false;
}
}
return true;
}
auto SettingsNode::add(const SettingsValue& newValue) noexcept -> bool {
if(!m_values) {
m_values = make_unique<SettingsNodeCollection>();
}
m_values->emplace_back(SettingsNode(newValue));
return true;
}
auto SettingsNode::add(const SettingsKeys& key,
const SettingsValue& newValue) noexcept -> bool {
return add(key, SettingsValues({newValue}));
}
auto SettingsNode::add(const initializer_list<SettingsValue>& newValue) noexcept
-> bool {
return add(SettingsValues(newValue));
}
auto SettingsNode::add(const SettingsKeys& key,
const initializer_list<SettingsValue>& newValue) noexcept
-> bool {
return add(key, SettingsValues(newValue));
}
auto SettingsNode::add(
const std::initializer_list<SettingsKey>& key,
const std::initializer_list<SettingsValue>& newValue) noexcept -> bool {
return add(SettingsKeys(key), SettingsValues(newValue));
}
auto SettingsNode::add(const std::initializer_list<SettingsKey>& key,
const SettingsValues& newValue) noexcept -> bool {
return add(SettingsKeys(key), newValue);
}
auto SettingsNode::add(const std::initializer_list<SettingsKey>& key,
const SettingsValue& newValue) noexcept -> bool {
return add(SettingsKeys(key), SettingsValues({newValue}));
}
auto SettingsNode::add(const SettingsKeys& key,
const SettingsValues& newValue) noexcept -> bool {
SettingsNode* settings = this;
for(const auto& parentKey : key) {
if(!settings->contains(parentKey)) {
if(!settings->add(parentKey)) {
LOG(warning) << "Failed to add key '" << parentKey << "'";
}
}
settings = settings->at(parentKey);
}
return settings->add(newValue);
}
auto SettingsNode::add(const SettingsKey& key,
const SettingsValue& newValue) noexcept -> bool {
return add(SettingsKeys({key}), {newValue});
}
auto SettingsNode::add(const SettingsValues& newValue) noexcept -> bool {
if(!m_values) {
m_values = make_unique<SettingsNodeCollection>();
}
m_values->reserve(m_values->size() + newValue.size());
std::transform(newValue.begin(), newValue.end(),
std::back_inserter(*m_values),
[](auto&& value) { return SettingsNode(value); });
return true;
}
auto SettingsNode::add(const SettingsKey& key,
const SettingsValues& newValue) noexcept -> bool {
return add(SettingsKeys({key}), newValue);
}
auto SettingsNode::clear(const SettingsKey& key) noexcept -> bool {
return clear(SettingsKeys({key}));
}
auto SettingsNode::clear(const SettingsKeys& keys) noexcept -> bool {
if(keys.empty()) {
LOG(debug) << "Cannot clear the settingsnode itself";
return false;
}
if(!contains(keys)) {
return true;
}
SettingsNode* settings = this;
for(auto key = keys.begin(); key != keys.end() - 1; ++key) {
settings = settings->at(*key);
}
for(auto value = settings->m_values->begin();
value != settings->m_values->end(); ++value) {
if(value->m_key == keys.back()) {
settings->m_values->erase(value);
return true;
}
}
return false;
}
auto SettingsNode::values() const noexcept -> std::optional<SettingsValues> {
if(!m_values) {
return std::nullopt;
}
SettingsValues result;
result.reserve(m_values->size());
std::transform(m_values->begin(), m_values->end(),
std::back_inserter(result),
[](const auto& value) { return value.m_key; });
return result;
}
auto SettingsNode::key() const noexcept -> const SettingsKey& { return m_key; }
void SettingsNode::swap(SettingsNode& other) noexcept {
m_key.swap(other.m_key);
m_values.swap(other.m_values);
}
void SettingsNode::overwrite(const SettingsNode& newSettings) noexcept {
std::vector<std::string> children =
newSettings.values().value_or(std::vector<std::string>());
for(const auto& key : children) {
const SettingsNode& newValue = newSettings[key];
if(!contains(key)) {
if(!add(key)) {
LOG(warning) << "Failed to add key '" << key << "'";
}
}
at(key)->deepCopy(newValue);
}
}
auto SettingsNode::at(const SettingsKey& key) noexcept -> SettingsNode* {
expectsMessage(contains(key), "Key must exist");
auto value =
std::find_if(m_values->begin(), m_values->end(),
[&key](const auto& value) { return value.m_key == key; });
ensures(value != m_values->end());
return &(*value);
}
auto SettingsNode::at(const SettingsKey& key) const noexcept
-> const SettingsNode* {
expectsMessage(contains(key), "Key must exist");
auto value =
std::find_if(m_values->begin(), m_values->end(),
[&key](const auto& value) { return value.m_key == key; });
ensures(value != m_values->end());
return &(*value);
}
auto SettingsNode::at(const SettingsKeys& key) noexcept -> SettingsNode* {
expectsMessage(contains(key), "Key must exist");
SettingsNode* settings = this;
for(const auto& keyPart : key) {
settings = settings->at(keyPart);
}
return settings;
}
auto SettingsNode::at(const SettingsKeys& key) const noexcept
-> const SettingsNode* {
expectsMessage(contains(key), "Key must exist");
const SettingsNode* settings = this;
for(const auto& keyPart : key) {
settings = settings->at(keyPart);
}
return settings;
}
void SettingsNode::deepCopy(const SettingsNode& other) noexcept {
if(!other.m_values) {
return;
}
m_values = make_unique<SettingsNodeCollection>();
m_values->reserve(other.m_values->size());
std::copy(other.m_values->begin(), other.m_values->end(),
std::back_inserter(*m_values));
}
auto operator<<(ostream& os, const SettingsNode& settings) noexcept
-> ostream& {
return stream(os, settings, "");
}
} // namespace execHelper::config
<file_sep>if(MSVC)
cmake_minimum_required(VERSION 3.15)
cmake_policy(SET CMP0091 NEW)
else()
cmake_minimum_required(VERSION 3.13)
endif()
project(exec-helper
LANGUAGES C CXX)
include(GNUInstallDirs)
# Configure pkg-config (needs to be set explicitly to support cross compilation)
set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH ON)
set(ENV{PKG_CONFIG_DIR} "")
set(ENV{PKG_CONFIG_LIBDIR} "${CMAKE_SYSROOT}/usr/${CMAKE_INSTALL_LIBDIR}/pkgconfig:${CMAKE_SYSROOT}/usr/${CMAKE_INSTALL_LIBDIR}/${CMAKE_LIBRARY_ARCHITECTURE}/pkgconfig:${CMAKE_SYSROOT}/usr/share/pkgconfig")
set(ENV{PKG_CONFIG_SYSROOT_DIR} ${CMAKE_FIND_ROOT_PATH})
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
option(POSITION_INDEPENDENT_CODE "Create position independent code" ON)
mark_as_advanced(POSITION_INDEPENDENT_CODE)
set(CMAKE_POSITION_INDEPENDENT_CODE ${POSITION_INDEPENDENT_CODE})
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/modules)
option(ENABLE_WERROR "Enable threating warnings as errors" ON)
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
if(${ENABLE_WERROR})
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
endif()
string( TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER )
if(${BUILD_TYPE_LOWER} MATCHES debug)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILE_FLAGS}" )
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${COVERAGE_LINK_FLAGS}" )
endif(${BUILD_TYPE_LOWER} MATCHES debug)
# Use gold linker
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=gold")
endif()
if(MSVC)
set(CMAKE_MSVC_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
set(Boost_USE_STATIC_LIBS ON)
set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>DLL")
set(Boost_USE_STATIC_RUNTIME OFF)
endif()
# Enable modules for clang
if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fmodules")
endif()
option(LIMITED_OPTIMIZATION "Use only limited optimization. This is mostly interesting for certain analysis tools." OFF)
if(LIMITED_OPTIMIZATION)
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
add_definitions(-O1)
endif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
endif(LIMITED_OPTIMIZATION)
option(TERMINATE_ON_ASSERT_FAILURE "Terminate on assertion failure" OFF)
MESSAGE(STATUS "Terminate on assert failure: " ${TERMINATE_ON_ASSERT_FAILURE})
if(${TERMINATE_ON_ASSERT_FAILURE})
add_definitions(-DTERMINATE_ON_ASSERT_FAILURE)
endif()
set(BIN_DIRECTORY ${CMAKE_INSTALL_BINDIR})
set(LIB_DIRECTORY ${CMAKE_INSTALL_LIBDIR})
set(ACTUAL_PLUGINS_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/share/exec-helper/plugins" CACHE PATH "The actual installation path for the default plugin files. Useful for packaging systems where the install PREFIX is different from where the plugins will end up eventually")
message(STATUS "Expecting plugins to ultimately being exported to ${ACTUAL_PLUGINS_INSTALL_PREFIX}")
option(ENABLE_TESTING "Enable building tests" ON)
add_subdirectory(3rdparty)
link_directories(${yaml-3rd_LIBRARY_DIRS})
link_directories(${rpcheck_LIBRARY_DIRS})
add_subdirectory(src)
if(${ENABLE_TESTING})
link_directories(${rapidcheck_LIBRARY_DIRS})
enable_testing()
add_subdirectory(test)
endif()
add_subdirectory(docs)
add_subdirectory(gitchangelog)
add_subdirectory(completion)
<file_sep>.. _exec-helper-config:
Configuration
*************
.. toctree::
:maxdepth: 0
:hidden:
:glob:
exec-helper-config-*
Description
===========
Exec-helper configuration files are written in the YAML 1.2 specification.
Mandatory keys
==============
A valid configuration file must contain at least the following keys on the root level of the configuration file:
.. program:: exec-helper-config
.. describe:: commands
The commands that are configured in the configuration file. It will either contain a list of commands or a list of the commands as keys with an explanation of the command as a value. These formats can not be used interchangeably.
.. describe:: <command-keys>
For every command defined under the *commands* key, the configuration must define this command as a key in the root of the configuration file. The value of the key must either be a registered plugin or another command.
.. describe:: <plugin-keys>
For at least every plugin that is used by a *command* key, configure the specifics of the plugin (if applicable).
Optional keys
=============
Optionally the configuration file contains the following keys on the root level of the configuration file:
.. describe:: patterns
Patterns are parts of the configuration that will be replaced by its value when evaluated by exec-helper. The *patterns* keyword describes a list of patterns identified by their key. See the @ref exec-helper-config-patterns for more information about how to define a pattern.
.. describe:: additional-search-paths
An ordered list of additional search paths to use when searching for plugins. The search paths can be absolute or relative w.r.t. the parent path of the *settings file* in which these paths are defined.
Defining search paths is useful for extending exec-helper with your own custom plugins or for overwriting or extending the functionality in the provided plugins. See [exec-helper-custom-plugins](@ref exec-helper-custom-plugins)(5) for more information on writing a custom plugin.
The paths defined in this list take precedence over the system search paths for modules with the same name. A higher position in this list implicates higher precedence.
Working directory
=================
Configured commands are executed from the so-called *working directory*. Executing commands in a different working directory will not affect your current working directory (e.g. when executing from a shell). Each separately configured command can be executed in a separate working directory.
The *working directory* is the directory that is associated with the first of the following lines whose requirement is met:
1. The *working-dir* configuration setting is configured for the specific command. The value of the *working-dir* configuration key can be an absolute path to the working directory or a relative one w.r.t. the directory of the considered configuration file. If the command should be executed in the actual working directory, use *<working-dir>* as the value in the configuration file.
2. The directory of the considered configuration file.
Paths
=====
All relative paths in the configuration should be *relative to the directory in which the configuration resides*. While relative paths are convenient for users as they can freely choose the root directory of an application, some applications require an absolute path. In such case, use the *${PWD}* environment variable (both POSIX and non-POSIX systems) to convert a relative path in your configuration into an absolute path for calling these particular applications.
Example configuration
=====================
.. literalinclude:: ../examples/exec-helper-config.example
:language: yaml
See also
========
See :ref:`exec-helper-config-patterns` (5) for more information on defining and using patterns.
See :ref:`exec-helper-config-environment` (5) for more information on configuring execution environments.
See :ref:`exec-helper` (1) for information about the usage of exec-helper.
See :ref:`exec-helper-plugins` (5) for the available plugins and their configuration options.
See :ref:`exec-helper-custom-plugins` (5) for the available plugins and their configuration options.
<file_sep>Configuration
*************
Usage information on the configuration can be found on the :ref:`exec-helper-config` page.
.. highlight:: gherkin
.. literalinclude:: environment.feature
<file_sep>#ifndef __OPTION_DESCRIPTIONS_H__
#define __OPTION_DESCRIPTIONS_H__
#include <memory>
#include <optional>
#include <vector>
#include <boost/program_options.hpp>
#include "log/assertions.h"
#include "log/log.h"
#include "variablesMap.h"
namespace execHelper::config {
class Argv;
} // namespace execHelper::config
namespace execHelper {
namespace config {
typedef std::string ArgumentOption;
typedef std::vector<ArgumentOption> ArgumentOptions;
/**
* \brief Interface for determining the option characteristics associated with
* an argument option
*/
class OptionInterface {
public:
virtual ~OptionInterface() {}
/**
* Returns the id for identifying the specified option throughout the system
*
* \returns The unique id associated with this option
*/
[[nodiscard]] virtual auto getId() const noexcept -> std::string = 0;
/**
* Extract the value(s) associated with this option from the optionsMap
*
* \param[out] variablesMap The variables map to write to
* \param[in] optionsMap The options map to take the value(s) from
* \returns True if the associated value(s) were successfully extracted
* False otherwise
*/
virtual auto toMap(config::VariablesMap& variablesMap,
const boost::program_options::variables_map& optionsMap)
const noexcept -> bool = 0;
/**
* Return the associated type value of this options in order to be able to
* properly parse it
*
* \returns The implementation-specific semantic to be used by the
* optionparser.
*/
virtual auto getTypeValue() const noexcept
-> const boost::program_options::value_semantic* = 0;
protected:
OptionInterface() {}
};
/**
* \brief Class for generic OptionInterface parent functions
*/
class OptionBase : public OptionInterface {
public:
virtual ~OptionBase() {}
[[nodiscard]] auto getId() const noexcept -> std::string override {
return m_identifyingOption;
}
/**
* Returns the argument options for this option, in addition to the one
* returned by getId().
*
* \returns A collection of additional option keys associated with this
* option
*/
virtual auto getArgumentOptions() const noexcept -> const ArgumentOptions& {
return m_argumentOptions;
}
/**
* Returns the explanation associated with this option
*
* \returns The explanation
*/
virtual auto getExplanation() const noexcept -> const std::string& {
return m_explanation;
}
auto toMap(config::VariablesMap& /*variablesMap*/,
const boost::program_options::variables_map& /*optionsMap*/)
const noexcept -> bool override {
ensuresMessage(false, "We should not get here: all children should "
"implement this function");
return false;
}
auto getTypeValue() const noexcept
-> const boost::program_options::value_semantic* override {
ensuresMessage(false, "We should not get here: all children should "
"implement this function");
return boost::program_options::value<char>();
}
protected:
/**
* \param[in] identifyingOption The unique string to identify this option
* with \param[in] argumentOptions A collection of additional,
* non-identifying options \param[in] explanation The explanation
* associated with this option
*/
OptionBase(const std::string& identifyingOption,
const ArgumentOptions& argumentOptions,
const std::string& explanation)
: OptionInterface(),
m_identifyingOption(identifyingOption),
m_argumentOptions(argumentOptions),
m_explanation(explanation) {
expectsMessage(argumentOptions.size() <= 1U,
"Currently we support only one additional, "
"non-identifying option: the associated short option");
}
std::string m_identifyingOption; //!< The identifying option associated with
//!< this option
ArgumentOptions m_argumentOptions; //!< Additional non-identifying options
//!< associated with this option
std::string m_explanation; //!< The associated explanation
};
typedef std::vector<OptionBase> OptionCollection;
/**
* \brief Class implementing an argument option that fits an option value of
* type T
*/
template <typename T> class Option : public OptionBase {
public:
/*! @copydoc OptionBase::OptionBase(const std::string&, const ArgumentOptions&, const std::string&)
*/
Option(const std::string& identifyingOption,
const ArgumentOptions& argumentOptions,
const std::string& explanation)
: OptionBase(identifyingOption, argumentOptions, explanation) {
;
}
virtual ~Option() {}
virtual auto toMap(config::VariablesMap& variablesMap,
const boost::program_options::variables_map& optionsMap)
const noexcept -> bool override {
try {
return variablesMap.replace(
m_identifyingOption,
optionsMap[m_identifyingOption].template as<T>());
} catch(const boost::bad_any_cast& e) {
user_feedback_error("Bad_any_cast exception caught: " << e.what());
return false;
}
}
virtual auto getTypeValue() const noexcept
-> const boost::program_options::value_semantic* override {
return boost::program_options::value<T>();
}
};
/**
* \brief Class implementing a flag
*/
template <> class Option<bool> : public OptionBase {
public:
/*! @copydoc OptionBase::OptionBase(const std::string&, const ArgumentOptions&, const std::string&)
*/
Option(const std::string& identifyingOption,
const ArgumentOptions& argumentOptions,
const std::string& explanation)
: OptionBase(identifyingOption, argumentOptions, explanation) {
;
}
virtual ~Option() {}
virtual auto toMap(config::VariablesMap& variablesMap,
const boost::program_options::variables_map& optionsMap)
const noexcept -> bool override {
if(optionsMap.count(m_identifyingOption) > 0) {
try {
if(optionsMap[m_identifyingOption].as<bool>()) {
return variablesMap.replace(m_identifyingOption, "1");
}
} catch(const boost::bad_any_cast& e) {
user_feedback_error(
"Bad_any_cast exception caught: " << e.what());
return false;
}
}
return variablesMap.replace(m_identifyingOption, "0");
}
virtual auto getTypeValue() const noexcept
-> const boost::program_options::value_semantic* override {
return boost::program_options::bool_switch();
}
};
/**
* \brief Partial implementation an argument option that fits an option value
* of type vector<T>
*/
template <typename T> class Option<std::vector<T>> : public OptionBase {
public:
/*! @copydoc OptionBase::OptionBase(const std::string&, const ArgumentOptions&, const std::string&)
*/
Option(const std::string& identifyingOption,
const ArgumentOptions& argumentOptions,
const std::string& explanation)
: OptionBase(identifyingOption, argumentOptions, explanation) {
;
}
virtual ~Option() {}
virtual auto toMap(config::VariablesMap& variablesMap,
const boost::program_options::variables_map& optionsMap)
const noexcept -> bool override {
try {
return variablesMap.replace(
m_identifyingOption,
optionsMap[m_identifyingOption].template as<std::vector<T>>());
} catch(const boost::bad_any_cast& e) {
user_feedback_error("Bad_any_cast exception caught: " << e.what());
return false;
}
}
virtual auto getTypeValue() const noexcept
-> boost::program_options::value_semantic* override {
return boost::program_options::value<std::vector<T>>()->multitoken();
}
};
/**
* \brief Represents the descriptions of the options. This description is used
* for interpreting e.g. command line arguments.
*/
class OptionDescriptions {
public:
OptionDescriptions() noexcept;
/**
* Returns the currently registered option descriptions
*
* \returns A collection of the registered option descriptions
*/
[[nodiscard]] auto getOptionDescriptions() const noexcept
-> boost::program_options::options_description;
/**
* Add an option description
*
* \param[in] option The option do add
*/
template <typename T> void addOption(const Option<T>& option) noexcept {
auto id = option.getId();
m_optionKeys.emplace_back("--" + id);
std::string option_code(id);
for(const auto& argumentOption : option.getArgumentOptions()) {
option_code.append(",");
option_code.append(argumentOption);
if(argumentOption.size() == 1U) {
m_optionKeys.emplace_back("-" + argumentOption);
} else {
m_optionKeys.emplace_back("--" + argumentOption);
}
}
m_optionDescription.add_options()(option_code.c_str(),
option.getTypeValue(),
option.getExplanation().c_str());
m_options.emplace(std::pair<std::string, std::unique_ptr<Option<T>>>(
id, std::make_unique<Option<T>>(option)));
}
/**
* Set the given option as the positional argument (meaning that if no
* associated option is specified for an argument on the command line, it is
* assumed to belong to this specific option.
*
* \param[in] option The option to use
* \returns true if the positional argument was successfully set
* false otherwise
*/
auto setPositionalArgument(const OptionInterface& option) noexcept -> bool;
/**
* Returns a map containing the parsed option descriptions for the given
* command line arguments
*
* \param[out] variablesMap The variables map to add the values to
* \param[in] argv A collection of input arguments
* \param[in] allowUnregistered Whether to allow options in argv that are
* not described in this option description \returns True if the options
* map was successfully constructed False otherwise
*/
auto getOptionsMap(config::VariablesMap& variablesMap, const Argv& argv,
bool allowUnregistered = false) const noexcept -> bool;
/**
* Returns all the option keys
*
* \returns A list of option keys
*/
auto getOptionKeys() const noexcept -> const std::vector<std::string>& {
return m_optionKeys;
}
private:
void toMap(
config::VariablesMap& variablesMap,
const boost::program_options::variables_map& optionsMap) const noexcept;
boost::program_options::options_description m_optionDescription;
std::vector<std::string> m_optionKeys;
std::map<std::string, std::unique_ptr<OptionBase>> m_options;
std::optional<std::string> m_positional;
};
} // namespace config
} // namespace execHelper
#endif /* __OPTION_DESCRIPTIONS_H__ */
<file_sep>.. _exec-helper-plugins:
Plugins
*******
.. toctree::
:maxdepth: 0
:hidden:
:glob:
exec-helper-custom-plugins
exec-helper-plugins-*
Description
===========
This document describes the list of :program:`plugins` that can be used in the associated :program:`exec-helper` binaries.
General plugins
===============
.. program:: plugins
.. describe:: command-line-command
The command-line-command plugin is used for executing arbitrary command line commands.
See :ref:`exec-helper-plugins-command-line-command` (5).
.. describe:: command-plugin
The command plugin is used for executing the given command as it is configured in the :program:`exec-helper` configuration file. See :ref:`exec-helper-plugins-command-plugin` (5).
.. describe:: execute-plugin
The execute plugin is used for executing other commands and/or targets configured in the :program:`exec-helper` configuration file. See :ref:`exec-helper-plugins-execute-plugin` (5).
.. describe:: memory
The memory plugin remembers all calls and state the plugin was called with. This plugin is mainly useful for testing and debugging purposes. See :ref:`exec-helper-plugins-memory` (5).
.. describe:: selector
The selector plugin is used for selecting certain configuration paths based on the value of a pattern. See :ref:`exec-helper-plugins-selector` (5).
.. describe:: docker
The docker plugin is used for running commands inside a Docker container. See :ref:`exec-helper-plugins-docker` (5).
Build plugins
=============
.. program:: plugins
.. describe:: bootstrap
The bootstrap plugin is used for calling bootstrap scripts, typically used as a step in a build chain. See :ref:`exec-helper-plugins-bootstrap` (5).
.. describe:: make
The make plugin is used for running the make build system. See :ref:`exec-helper-plugins-make` (5).
.. describe:: scons
The scons plugin is used for running the scons build system. See :ref:`exec-helper-plugins-scons` (5).
.. describe:: cmake
The cmake plugin is used for running the CMake build system. See :ref:`exec-helper-plugins-cmake` (5).
Analysis plugins
================
.. program:: plugins
.. describe:: clang-static-analyzer
The clang-static-analyzer plugin is used for applying the clang static analyzer tool on source code files. See :ref:`exec-helper-plugins-clang-static-analyzer` (5).
.. describe:: clang-tidy
The clang-tidy plugin is used for applying the clang tidy tool on source code files. See :ref:`exec-helper-plugins-clang-tidy` (5).
.. describe:: cppcheck
The cppcheck plugin is used for applying cppcheck on source code files. See :ref:`exec-helper-plugins-cppcheck` (5).
.. describe:: lcov
The lcov plugin is used for applying the lcov code coverage analysis tool. See :ref:`exec-helper-plugins-lcov` (5).
.. describe:: pmd
The pmd plugin is used for applying pmd analysis on source code files. See :ref:`exec-helper-plugins-pmd` (5).
.. describe:: valgrind
The valgrind plugin is used for applying valgrind analysis. See :ref:`exec-helper-plugins-valgrind` (5).
Custom plugins
==============
You can write your own plugins and integrate them with :program:`exec-helper`. These plugins are first-class citizens: you can write plugins that overwrite the system plugins themselves. See :ref:`exec-helper-custom-plugins` (5) for more information on writing your own plugins.
.. include:: see-also.rst
<file_sep>#ifndef __EXECUTOR_STUB_H__
#define __EXECUTOR_STUB_H__
#include <vector>
#include "core/shell.h"
#include "core/task.h"
namespace execHelper {
namespace core {
namespace test {
const Shell::ShellReturnCode RETURN_CODE_SUCCESS = 0U;
class ShellStub final : public Shell {
public:
using TaskQueue = std::vector<Task>;
explicit ShellStub(ShellReturnCode returnCode = RETURN_CODE_SUCCESS)
: m_returnCode(returnCode) {
;
}
ShellReturnCode execute(const Task& task) noexcept override {
m_executedTasks.push_back(task);
return m_returnCode;
}
bool
isExecutedSuccessfully(ShellReturnCode returnCode) const noexcept override {
return returnCode == RETURN_CODE_SUCCESS;
}
const TaskQueue& getExecutedTasks() const noexcept {
return m_executedTasks;
}
private:
TaskQueue m_executedTasks;
ShellReturnCode m_returnCode;
};
} // namespace test
} // namespace core
} // namespace execHelper
#endif /* __EXECUTOR_STUB_H__ */
<file_sep>if(BUILD_XML_DOCUMENTATION)
set(GENERATE_XML "YES")
endif()
set(doxyfile_in ${CMAKE_CURRENT_SOURCE_DIR}/../Doxyfile.in)
set(doxyfile ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile)
set(XML_INDEX_FILE index.xml)
configure_file(${doxyfile_in} ${doxyfile} @ONLY)
add_custom_command(OUTPUT ${XML_INDEX_FILE}
COMMAND ${DOXYGEN_EXECUTABLE} ${doxyfile}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
MAIN_DEPENDENCY ${doxyfile_in}
VERBATIM)
add_custom_target(docs-xml
SOURCES ${XML_INDEX_FILE}
COMMENT "Generating API XML documentation with Doxygen"
VERBATIM)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} DESTINATION share/doc/exec-helper COMPONENT docs-xml)
add_dependencies(docs docs-xml)
<file_sep>#ifndef __IMMEDIATE_EXECUTOR_H__
#define __IMMEDIATE_EXECUTOR_H__
#include "executorInterface.h"
#include <functional>
#include <memory>
#include "shell.h"
namespace execHelper {
namespace core {
class Task;
} // namespace core
} // namespace execHelper
namespace execHelper {
namespace core {
/**
* \brief Implements a straight forward executor that immediately executes the
* given task. This executor blocks during executes.
*/
class ImmediateExecutor : public ExecutorInterface {
public:
using Callback = std::function<void(
Shell::ShellReturnCode)>; //!< Brief Callback function signature
/**
* Create an executor
*
* \param[in] shell The shell to execute the command with
* \param[in] callback The function to call with the result when a command
* finishes
*/
ImmediateExecutor(std::shared_ptr<Shell> shell, Callback callback) noexcept;
void execute(const Task& task) noexcept override;
private:
const std::shared_ptr<Shell> m_shell;
Callback m_callback;
};
} // namespace core
} // namespace execHelper
#endif /* __IMMEDIATE_EXECUTOR_H__ */
<file_sep>list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/modules)
# Define a target that downloads all required dependencies
add_custom_target(download)
include(stl.CMakeLists.txt)
include(boost.CMakeLists.txt)
include(yaml-cpp.CMakeLists.txt)
include(gsl.CMakeLists.txt)
include(luajit.CMakeLists.txt)
include(luawrapper.CMakeLists.txt)
if(${ENABLE_TESTING})
include(catch.CMakeLists.txt)
include(rapidcheck.CMakeLists.txt)
endif()
<file_sep>#include <filesystem>
#include <string>
#include <vector>
#include "config/environment.h"
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "plugins/luaPlugin.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::optional;
using std::string;
using std::to_string;
using std::vector;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Jobs_t;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::test::ExecutorStub;
using execHelper::test::propertyTest;
using execHelper::test::utils::getExpectedTasks;
namespace filesystem = std::filesystem;
namespace execHelper::plugins::test {
SCENARIO("Testing the configuration settings of the cppcheck plugin",
"[cppcheck]") {
propertyTest("", [](const optional<vector<filesystem::path>>& srcDir,
const optional<vector<string>>& checks,
const optional<filesystem::path>& workingDir,
const optional<vector<string>>& commandLine,
const optional<EnvironmentCollection>& environment,
const optional<bool> verbose,
const optional<Jobs_t> jobs) {
const Task task;
Task expectedTask(task);
Patterns patterns;
VariablesMap config("cppcheck-test");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/cppcheck.lua");
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
expectedTask.append("cppcheck");
if(workingDir) {
handleWorkingDirectory(*workingDir, config, expectedTask);
}
if(environment) {
handleEnvironment(*environment, config, expectedTask);
}
string enableString("--enable=");
if(checks) {
REQUIRE(config.add("enable-checks", *checks));
for(auto it = checks->begin(); it != checks->end(); ++it) {
if(it != checks->begin()) {
enableString.append(",");
}
enableString.append(*it);
}
expectedTask.append(enableString);
} else {
enableString.append("all");
expectedTask.append(enableString);
}
if(verbose) {
handleVerbosity(*verbose, "--verbose", config, expectedTask);
}
if(jobs) {
REQUIRE(config.add("jobs", std::to_string(*jobs)));
expectedTask.append({"-j", std::to_string(*jobs)});
} else {
const std::string defaultNumberOfJobs{"1"};
expectedTask.append({"-j", defaultNumberOfJobs});
}
if(commandLine) {
handleCommandLine(*commandLine, config, expectedTask);
}
if(srcDir) {
REQUIRE(config.add("src-dir"));
for_each(srcDir->begin(), srcDir->end(),
[&config, &expectedTask](const auto& src) {
REQUIRE(config.add("src-dir", src.string()));
expectedTask.append(src.string());
});
} else {
expectedTask.append(".");
}
ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(expectedTask, patterns);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
});
}
} // namespace execHelper::plugins::test
<file_sep>#ifndef CONSOLE_LOGGER_INCLUDE
#define CONSOLE_LOGGER_INCLUDE
#include <boost/log/sinks/sync_frontend.hpp>
#include <boost/log/sinks/text_ostream_backend.hpp>
#include <boost/shared_ptr.hpp>
#include "logLevel.h"
#include "logMessage.h"
namespace execHelper {
namespace log {
/**
* \brief Frontend wrapper for the logger that logs to the console
*/
class ConsoleLogger {
public:
/**
* Create a new console logger
*
* \param[in] logStream The console stream to log to
*/
explicit ConsoleLogger(std::ostream& logStream);
/*! @copydoc config::Argv::Argv(const Argv&)
*/
ConsoleLogger(const ConsoleLogger& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
*/
ConsoleLogger(ConsoleLogger&& other) = delete;
~ConsoleLogger();
/*! @copydoc config::Argv::operator=(const Argv&)
*/
ConsoleLogger& operator=(const ConsoleLogger& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
*/
ConsoleLogger& operator=(ConsoleLogger&& other) = delete;
/**
* Set the maximum severity of the messages to a channel to log
*
* \param[in] channel The channel for which to set the severity
* \param[in] severity The maximum severity to set for the specified channel
* \returns True If the severity was correctly set
* False Otherwise
*/
bool setSeverity(const Channel& channel, LogLevel severity) noexcept;
private:
boost::log::expressions::channel_severity_filter_actor<Channel, LogLevel>
m_logMessageFilter; //!< Sets the filters for channels and severities
boost::shared_ptr<boost::log::sinks::synchronous_sink<
boost::log::sinks::basic_text_ostream_backend<char>>>
m_consoleSink; //!< The backend for this logger
};
} // namespace log
} // namespace execHelper
#endif /* CONSOLE_LOGGER_INCLUDE */
<file_sep>.. _exec-helper-plugins-clang-tidy:
Clang-tidy plugin
*****************
Description
===========
The clang-tidy plugin is used for executing the clang-tidy static code analysis tool.
Mandatory settings
==================
There are no mandatory settings for the clang-tidy plugin.
Optional settings
=================
The configuration of the clang-tidy plugin may contain the following settings:
.. program:: exec-helper-plugins-clang-tidy
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: sources
A list of sources that must be checked by the clang-tidy plugin. The sources may contain wildcards.
.. describe:: checks
A list of checks that should be enabled or disabled. Enabling or disabling checks is done the same way as they are enabled on the clang-tidy command line. Default: no checks will be enabled or disabled on the command line, meaning the default checks enabled by clang will be checked.
.. describe:: warning-as-errors
Threat warnings as errors.
The value associated with this key is either:
* A list of checks, defining which warnings will be threated as errors. See **checks** for the format.
* The single keyword *all*: means that all enabled checks will be threated as errors.
**Note**: This options is only supported if the clang-tidy binary supports the :code:`-warnings-as-error=<string>` option.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/clang-tidy.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following files need to be created in the *src* directory:
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>#ifndef __PATTERNS_HANDLER_H__
#define __PATTERNS_HANDLER_H__
#include <map>
#include <optional>
#include "config/variablesMap.h"
#include "pattern.h"
namespace execHelper::config {
/**
* \brief Handles the patterns it is given
*/
class PatternsHandler {
private:
using PatternCollection = std::map<PatternKey, Pattern>;
public:
PatternsHandler() = default;
/*! @copydoc config::Argv::Argv(const Argv& patterns)
*/
explicit PatternsHandler(const Patterns& other);
/*! @copydoc config::Argv::Argv(Argv&& patterns)
*/
explicit PatternsHandler(Patterns&& other) noexcept;
/*! @copydoc config::Argv::operator==(const config::Argv& other) const
*/
auto operator==(const PatternsHandler& other) const noexcept -> bool;
/*! @copydoc config::Argv::operator!=(const config::Argv& other) const
*/
auto operator!=(const PatternsHandler& other) const noexcept -> bool;
/**
* Returns whether the pattern associated with the given key is registered
*
* \param[in] key The key of the pattern
* \returns True If the handler handles the pattern
* False Otherwise
*/
[[nodiscard]] auto contains(const PatternKey& key) const noexcept -> bool;
/**
* Registers a pattern with the handler
*
* \param[in] pattern The pattern to register
*/
void addPattern(const Pattern& pattern) noexcept;
/**
* Returns the pattern associated with the given key
*
* \pre \ref contains(const PatternKey&) const for the given key returns true
*
* \param[in] key The key of the pattern \returns The found pattern
*/
auto getPattern(const PatternKey& key) const noexcept -> const Pattern&;
/**
* Returns the default pattern map for a pattern
*
* \param[in] key The root key to use for the newly created variables map
* \returns The default variables map
*/
static auto getDefaultPatternMap(const PatternKey& key) noexcept
-> config::VariablesMap;
/**
* Converts the given patternMap to a pattern
*
* \param[in] key The key associated with the pattern
* \param[in] patternMap The variables map containing the values for the
* pattern to create \returns The created pattern if the variables map is
* valid boost::none otherwise
*/
static auto toPattern(const PatternKey& key,
const config::VariablesMap& patternMap) noexcept
-> std::optional<Pattern>;
private:
PatternCollection m_patterns;
};
} // namespace execHelper::config
#endif /* __PATTERNS_HANDLER_H__ */
<file_sep>CXX=g++
CXXFLAGS+=-O0 -g --coverage
LDFLAGS+=
SRC_DIR=src
BUILD_DIR=build
hello:
mkdir -p $(BUILD_DIR)
$(CXX) $(CXXFLAGS) $(LDFLAGS) -o $(BUILD_DIR)/hello $(SRC_DIR)/hello.cpp
world:
mkdir -p $(BUILD_DIR)
$(CXX) $(CXXFLAGS) $(LDFLAGS) -o $(BUILD_DIR)/world $(SRC_DIR)/world.cpp
clean:
rm -rf $(BUILD_DIR)
.PHONY: clean
<file_sep>#ifndef __COMPILER_STUB_H__
#define __COMPILER_STUB_H__
#include "core/compiler.h"
namespace execHelper {
namespace core {
namespace test {
class CompilerStub : public Compiler {
public:
CompilerStub() : Compiler("compilerStub") { ; }
};
} // namespace test
} // namespace core
} // namespace execHelper
#endif /* __COMPILER_STUB_H__ */
<file_sep>Working directory
*****************
.. highlight:: gherkin
.. literalinclude:: settings-file-location.feature
<file_sep>.. _INSTALL:
Installation instructions
*************************
Installing from package
=======================
Arch Linux based distributions
------------------------------
Arch linux users can use the `exec-helper (AUR) package <https://aur.archlinux.org/packages/exec-helper>`_ or checkout the `exec-helper-package <https://github.com/bverhagen/exec-helper-package>`_ project for building the package from source.
Debian based distributions
--------------------------
Add the `PPA on Launchpad <https://launchpad.net/~bverhagen/+archive/ubuntu/exec-helper>`_ to your sources or checkout the `exec-helper-package <https://github.com/bverhagen/exec-helper-package>`_ project for building the package from source.
Other distributions
-------------------
Checkout the `Building from source`_ section.
Building from source
====================
Requirements
------------
Build tools
~~~~~~~~~~~
* A C++ 17 compatible compiler. Tested with: *g++*, *clang++* and MSVC 2017 (14.1)
* cmake (3.15 or newer)
* make or ninja
* make for the quick install
* Sphinx for generating man-pages and general documentation
* Doxygen (1.8.15 or newer) for building API documentation (optional)
* gitchangelog for building the changelog (optional)
Build dependencies
~~~~~~~~~~~~~~~~~~
* POSIX compliant operating system
* `boost-program-options <https://github.com/boostorg/program_options>`_ (1.64 or newer) development files
* `boost-log <https://github.com/boostorg/log>`_ (1.64 or newer) development files
* `yaml-cpp <https://github.com/jbeder/yaml-cpp>`_ (0.5.3 or newer) development files (optional, will be downloaded and compiled in statically if missing)
* `Microsoft gsl <https://github.com/Microsoft/GSL>`_ development files (optional, will be downloaded and compiled in statically if missing)
* `luaJIT <https://luajit.org/>`_ development files (optional, will be downloaded an compiled in statically if missing)
Quick installation
------------------
.. code-block:: sh
$ make
$ sudo make install
Use
.. code-block:: sh
$ make help
for an overview of the available quick installation targets and for an overview of the (very limited) available configuration options.
Advanced installation
---------------------
CMake is the build system. The *Makefile* mentioned in the quick installation is a simple wrapper around a more complex - and more configurable - CMake invocation.
It has the following project-specific configuration options:
.. describe:: USE_SYSTEM_YAML_CPP
Use the `yaml-cpp <https://github.com/jbeder/yaml-cpp>`_ library installed on the system. If switched off, the library will be automatically installed locally during the build. Default: *auto*
.. describe:: USE_SYSTEM_LUAJIT
Use the `luaJIT <https://luajit.org/>`_ library installed on the system. If switched off, the library will be automatically installed locally during the build. Default: *auto*
.. describe:: USE_SYSTEM_GSL
Use the `Microsoft gsl <https://github.com/Microsoft/GSL>`_ library installed on the system. If switched off, the library will be automaically installed locally during the build. Default: *auto*
.. describe:: POSITION_INDEPENDENT_CODE
Build using `position independent code <https://cmake.org/cmake/help/latest/variable/CMAKE_POSITION_INDEPENDENT_CODE.html>`_. Default: *ON*
.. describe:: ENABLE_TESTING
Enable building the testing infrastructure. Default: *ON*
.. describe:: BUILD_MAN_DOCUMENTATION
Generate the man-pages for this project
.. describe:: BUILD_HTML_DOCUMENTATION
Generate the HTML documentation for this project
.. describe:: BUILD_XML_DOCUMENTATION
Generate the XML documentation for this project
Build tests
-----------
Testing is enabled by setting the CMake configuration option *ENABLE_TESTING* to *ON*.
The tests require, in addition to all dependencies above, the following dependencies:
* `Catch2 <https://github.com/catchorg/Catch2>`_ unittest framework development files (optional, for building the tests)
* `Rapidcheck <https://github.com/emil-e/rapidcheck>`_ property based unittest framework development files (optional, for building the tests)
Testing related configuration options:
.. describe:: ENABLE_WERROR
Enable warning as error during compilation (only supported for *GCC* and *clang*)
.. describe:: LIMITED_OPTIMIZATION
Build with limited optimization (typically -O1, only supported for *GCC* and *clang*). This is typically used for running tools like valgrind.
.. describe:: TERMINATE_ON_ASSERT_FAILURE
Explicitly terminate when an assert fires.
.. describe:: USE_SYSTEM_CATCH
Use the `Catch2 <https://github.com/catchorg/Catch2>`_ library installed on the system. If switched off, the library will be automatically installed locally during the build. Default: *auto*
.. describe:: USE_SYSTEM_RAPIDCHECK
Use the `Rapidcheck <https://github.com/emil-e/rapidcheck>`_ library installed on the system. If switched off, the library will be automatically installed locally during the build. Default: *auto*
Cross compilation
=================
Exec-helper supports both native and cross compilation (including building with a custom sysroot) builds. Cross compilation requires invoking cmake directly and appending **-DCMAKE_TOOLCHAIN_FILE=<toolchain-file>** to the cmake initialization command. Check the *toolchain.cmake.in* file for a template on setting up the toolchain file for cross compilation and the *Makefile* for a template of the cmake initialization command.
<file_sep>#include "executionHandler.h"
#include <cassert>
using std::move;
using std::string;
using gsl::not_null;
namespace execHelper {
namespace test {
namespace baseUtils {
void ExecutionHandler::add(const string& key,
ExecutionContent&& content) noexcept {
m_outputs.emplace(key, move(content));
}
const ExecutionContent& ExecutionHandler::at(const string& key) const noexcept {
assert(m_outputs.count(key) > 0U);
return m_outputs.at(key);
}
ExecutionHandler::ExecutionHandlerIterationRAII
ExecutionHandler::startIteration() noexcept {
return ExecutionHandlerIterationRAII(
gsl::not_null<ExecutionContentCollection*>(&m_outputs));
}
ExecutionHandler::ExecutionHandlerIterationRAII::ExecutionHandlerIterationRAII(
not_null<ExecutionContentCollection*> outputs)
: m_outputs(outputs) {
;
}
ExecutionHandler::ExecutionHandlerIterationRAII::
~ExecutionHandlerIterationRAII() {
for(auto& output : *m_outputs) {
output.second.clear();
}
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>#ifndef ADD_TO_CONFIG_INCLUDE
#define ADD_TO_CONFIG_INCLUDE
#include <optional>
#include <stdexcept>
#include <string>
#include <utility>
#include <vector>
#include <gsl/pointers>
#include "config/variablesMap.h"
#include "base-utils/nonEmptyString.h"
namespace execHelper::test {
inline void
addToConfig(const execHelper::config::SettingsKeys& key,
const std::string& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
if(!config->add(key, value)) {
throw std::runtime_error("Failed to add key " + key.back() +
" with value '" + value + "' to config");
}
}
inline void
addToConfig(const execHelper::config::SettingsKeys& key,
const NonEmptyString& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
if(!config->add(key, *value)) {
throw std::runtime_error("Failed to add key " + key.back() +
" with value '" + *value + "' to config");
}
}
template <typename T>
inline void
addToConfig(const execHelper::config::SettingsKeys& key,
const std::vector<T>& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
if(!config->add(key, value)) {
throw std::runtime_error("Failed to add key " + key.back() +
" with first value '" + value.front() +
"' to config");
}
}
inline void
addToConfig(const execHelper::config::SettingsKeys& key, bool value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
if(value) {
if(!config->add(key, "yes")) {
throw std::runtime_error("Failed to add key " + key.back() +
" with value 'true' to config");
}
}
if(!config->add(key, "no")) {
throw std::runtime_error("Failed to add key " + key.back() +
" with value 'false' to config");
}
}
inline void
addToConfig(execHelper::config::SettingsKeys key,
const std::pair<std::string, std::string>& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
key.push_back(value.first);
if(!config->add(key, value.second)) {
throw std::runtime_error("Failed to add key " + value.first +
" with first value '" + value.second +
"' to config");
}
}
template <typename T>
inline void
addToConfig(const execHelper::config::SettingsKeys& key,
const std::optional<T>& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
if(value) {
addToConfig(key, *value, config);
}
}
template <typename T>
inline void
addToConfig(const execHelper::config::SettingsKey& key, const T& value,
gsl::not_null<execHelper::config::VariablesMap*> config) {
addToConfig(execHelper::config::SettingsKeys({key}), value, config);
}
} // namespace execHelper::test
#endif /* ADD_TO_CONFIG_INCLUDE */
<file_sep>#ifndef LOG_LEVEL_INCLUDE
#define LOG_LEVEL_INCLUDE
#include <string_view>
#include <vector>
namespace execHelper {
namespace log {
/**
* \brief List of the potential log levels
*/
enum LogLevel {
all, // Use this to enable all logging
test, // Use this for printing (debug) output in tests
trace,
debug,
info,
warning,
error,
fatal,
none // Use this to disable logging
};
/**
* Convert the given log level string to a log level
*
* \param[in] level The log level string to convert
* \returns The associated log level
* \throws InvalidLogLevel If the given level string is not associated with any
* log level
*/
LogLevel toLogLevel(std::string_view level);
/**
* Get all available log levels. This is convenient for looping over all values in the enumeration.
*
* \returns A collection of all log levels, sorted from most restrictive to least restrictive
*/
const std::vector<LogLevel>& getLogLevels();
/**
* Convert the log level to a string
*
* \param[in] level The level to convert
* \returns The string associated with the log level
*/
std::string_view toString(LogLevel level) noexcept;
std::ostream& operator<<(std::ostream& os, LogLevel level) noexcept;
/**
* \brief Thrown if an unknown log level is used
*/
struct InvalidLogLevel : public std::exception {};
} // namespace log
} // namespace execHelper
#endif /* LOG_LEVEL_INCLUDE */
<file_sep>_eh_complete() {
COMPREPLY=()
local word="${COMP_WORDS[COMP_CWORD]}"
local completions="$(exec-helper --auto-complete "$word")"
COMPREPLY=( $(compgen -W "$completions" -- "$word") )
}
complete -f -F _eh_complete exec-helper
complete -f -F _eh_complete eh
<file_sep>#ifndef __ARGV_H__
#define __ARGV_H__
#include <cstddef>
#include <string>
#include <vector>
namespace execHelper::config {
/**
* \brief Wraps the default arguments when calling other programs
*/
class Argv {
public:
/**
* Create an Argv object using the default argc and argv input argument
* combo
*
* \param[in] argc The number of entries present in the argv argument
* \param[in] argv An array of input arguments. Each array element is
* considered a separate argument.
*/
Argv(int argc, const char* const* argv) noexcept;
/**
* Convert task to the argv/argc structure
*
* \param[in] task The task to take the arguments from
*/
explicit Argv(const std::vector<std::string>& task) noexcept;
/**
* Copy constructor
*
* \param[in] other The other object to copy from
*/
Argv(const Argv& other) noexcept;
/**
* Move constructor
*
* \param[in] other The other object to copy from
*/
Argv(Argv&& other) noexcept;
~Argv() noexcept;
/**
* Copy assignment operator
*
* \param[in] other The other object to assign from
* \returns A reference to this object
*/
auto operator=(const Argv& other) noexcept -> Argv&;
/**
* Move assignment operator
*
* \param[in] other The other object to assign from
* \returns A reference to this object
*/
auto operator=(Argv&& other) noexcept -> Argv&;
/**
* Equality operator
*
* \param[in] other The other object to compare with
* \returns True If the objects are considered equal
* False Otherwise
*/
auto operator==(const Argv& other) const noexcept -> bool;
/**
* Inequality operator
*
* \param[in] other The other object to compare with
* \returns !operator==(other)
*/
auto operator!=(const Argv& other) const noexcept -> bool;
/**
* Access operator
*
* \param[in] index The index of the element to access
* \returns A C-style string
*/
auto operator[](size_t index) const noexcept -> char*;
/**
* Swap contents
*
* \param[in] other The other object to swap with
*/
void swap(Argv& other) noexcept;
/**
* Clear all content
*/
void clear() noexcept;
/**
* Getter for the argc argument
*
* \returns The total number of arguments
*/
[[nodiscard]] auto getArgc() const noexcept -> size_t;
/**
* Getter for the argv argument
*
* \returns An array of pointers to C-style strings of length \ref getArgc()
*/
[[nodiscard]] auto getArgv() noexcept -> char**;
/*! @copydoc getArgv()
*/
[[nodiscard]] auto getArgv() const noexcept -> const char* const*;
private:
using Argv_t = std::vector<char*>;
/**
* Create a deep copy
*
* \param[in] other The other object to copy
*/
void deepCopy(const Argv& other) noexcept;
Argv_t m_argv;
};
/**
* Streaming operator
*
* \param[in] os The stream to stream to
* \param[in] argv The object to stream
* \returns The stream that was streamed to
*/
auto operator<<(std::ostream& os, const Argv& argv) noexcept -> std::ostream&;
} // namespace execHelper::config
#endif /* __ARGV_H__ */
<file_sep>#include "logger.h"
BOOST_LOG_GLOBAL_LOGGER_INIT( // NOLINT(modernize-use-trailing-return-type)
exec_helper_config_logger, execHelper::log::LoggerType) {
return execHelper::log::LoggerType(boost::log::keywords::channel =
LOG_CHANNEL);
}
<file_sep>#ifndef YAML_INCLUDE
#define YAML_INCLUDE
#include <yaml-cpp/yaml.h>
namespace execHelper {
namespace test {
namespace baseUtils {
using YamlReader = YAML::Node;
using YamlWriter = YAML::Node;
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* YAML_INCLUDE */
<file_sep>#ifndef LOGGER_INCLUDE
#define LOGGER_INCLUDE
#include <gsl/string_span>
#include "log/log.h"
BOOST_LOG_GLOBAL_LOGGER(
exec_helper_yaml_logger,
execHelper::log::LoggerType); // NOLINT(modernize-use-using)
static gsl::czstring<> LOG_CHANNEL = "yaml";
#define LOG(x) \
BOOST_LOG_STREAM_CHANNEL_SEV(exec_helper_yaml_logger::get(), LOG_CHANNEL, \
execHelper::log::x) \
<< boost::log::add_value(fileLog, __FILE__) \
<< boost::log::add_value(lineLog, __LINE__)
#endif /* LOGGER_INCLUDE */
<file_sep>#ifndef DIRECT_STATEMENT_INCLUDE
#define DIRECT_STATEMENT_INCLUDE
#include <iostream>
#include "plugins.h"
#include "yaml.h"
namespace execHelper {
namespace test {
namespace baseUtils {
template <typename Plugin> class DirectStatement : public Statement {
public:
DirectStatement(ReturnCode returnCode) : Statement(returnCode) { ; }
virtual ~DirectStatement() = default;
StatementKey getKey() const noexcept override { return Plugin::getKey(); }
void write(gsl::not_null<YamlWriter*> yaml,
const std::string& command) const noexcept override {
try {
(*yaml)[getKey()][command][getStatementKey()] = getStatement();
} catch(const YAML::InvalidNode&) {
std::cerr << "Somehow the used YAML node is invalid" << std::endl;
assert(false);
}
};
private:
StatementCollection getStatement() const noexcept {
return m_execution.getConfigCommand();
};
std::string getStatementKey() const noexcept {
return Plugin::getStatementKey();
}
};
using SimpleStatement = DirectStatement<CommandLineCommandSettings>;
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* DIRECT_STATEMENT_INCLUDE */
<file_sep>#ifndef UNITTEST_RAPIDCHECK_INCLUDE
#define UNITTEST_RAPIDCHECK_INCLUDE
#include <string>
#include <utility>
#include <rapidcheck.h>
#include "catch.h"
namespace execHelper {
namespace test {
template <typename Testable>
void propertyTest(const std::string& description, Testable&& testable) {
GIVEN(description) {
const auto result =
rc::detail::checkTestable(std::forward<Testable>(testable));
if(result.template is<rc::detail::SuccessResult>()) {
const auto success =
result.template get<rc::detail::SuccessResult>();
if(!success.distribution.empty()) {
std::cout << "- " << description << std::endl;
rc::detail::printResultMessage(result, std::cout);
std::cout << std::endl;
}
} else {
std::ostringstream ss;
rc::detail::printResultMessage(result, ss);
INFO(ss.str() << "\n");
FAIL();
}
}
}
} // namespace test
} // namespace execHelper
#endif /* UNITTEST_RAPIDCHECK_INCLUDE */
<file_sep>.. _exec-helper-plugins-valgrind:
Valgrind plugin
***************
Description
===========
The valgrind plugin is used for executing code coverage analysis using valgrind.
Mandatory settings
==================
The configuration of the valgrind plugin must contain the following settings:
.. program:: exec-helper-plugins-valgrind
.. describe:: run-command
The exec-helper command or plugin to use for running the binaries which need to be analyzed.
Optional settings
=================
The configuration of the valgrind plugin may contain the following settings:
.. program:: exec-helper-plugins-valgrind
.. include:: patterns.rst
.. include:: command-line.rst
.. describe:: tool
The valgrind tool to use. Default: the :code:`tool` is omitted.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/valgrind.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*Makefile*:
.. literalinclude:: ../examples/Makefile
:language: none
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
.. include:: see-also.rst
<file_sep>#include "tmpFile.h"
#include <algorithm>
#include <cassert>
#include <filesystem>
#include <fstream>
#include <iostream>
#include <string>
#include <system_error>
#include "base-utils/generateRandom.h"
using std::count;
using std::error_code;
using std::fstream;
using std::string;
using execHelper::test::baseUtils::generateRandomChar;
using execHelper::test::baseUtils::Path;
namespace filesystem = std::filesystem;
namespace {
/**
* Since valgrind reports boost::filesystem::unique_path to leak, we implement
* our own version
*/
inline Path unique_path(const Path& model) {
string resultPathName = model.filename().string();
auto nbOfReplacements =
count(resultPathName.begin(), resultPathName.end(), '%');
auto replacements = generateRandomChar(nbOfReplacements);
size_t pos = 0U;
auto index = 0U;
while((pos = resultPathName.find('%', pos)) != std::string::npos) {
assert(index < replacements.size());
resultPathName.replace(pos, 1U, string(1U, replacements[index]));
++pos;
++index;
}
return resultPathName;
}
} // namespace
namespace execHelper {
namespace test {
namespace baseUtils {
TmpFile::TmpFile(const string& model)
: m_path(filesystem::temp_directory_path() / ::unique_path(model)) {
;
}
TmpFile::~TmpFile() noexcept {
if(exists()) {
remove(m_path);
}
}
bool TmpFile::exists() const noexcept { return is_regular_file(m_path); }
bool TmpFile::create(const std::string& content) const noexcept {
createDirectories();
fstream file;
file.open(m_path, fstream::out | fstream::trunc);
file << content;
file.close();
return true;
}
bool TmpFile::createDirectories() const noexcept {
error_code error;
create_directories(m_path.parent_path(), error);
return error.operator bool();
}
Path TmpFile::getPath() const noexcept { return m_path; }
std::string TmpFile::toString() const noexcept { return m_path.string(); }
std::string TmpFile::getFilename() const noexcept {
return m_path.filename().string();
}
std::string TmpFile::getParentDirectory() const noexcept {
return m_path.parent_path().string();
}
void TmpFile::clear() noexcept { m_path.clear(); }
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>set(PKG_NAME Luawrapper)
find_package(PkgConfig QUIET)
if(${PKG_CONFIG_FOUND})
pkg_check_modules(${PKG_NAME} luawrapper)
endif()
# Attempt to find it if not configured in pkgconfig
if(NOT ${PKG_NAME}_FOUND)
MESSAGE(STATUS "Looking manually")
find_path(${PKG_NAME}_INCLUDE_DIRS NAMES LuaContext.hpp PATH_SUFFIXES luawrapper)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(${PKG_NAME}
FOUND_VAR ${PKG_NAME}_FOUND
REQUIRED_VARS ${PKG_NAME}_INCLUDE_DIRS
)
mark_as_advanced(${PKG_NAME}_INCLUDE_DIRS)
endif()
<file_sep>#include "commandPlugin.h"
#include <gsl/string_span>
#include "config/variablesMap.h"
#include "log/assertions.h"
#include "commandLine.h"
#include "executePlugin.h"
#include "logger.h"
#include "pluginUtils.h"
using std::string;
using gsl::czstring;
using execHelper::config::CommandCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
namespace {
const czstring<> PLUGIN_NAME = "commands";
} // namespace
namespace execHelper::plugins {
auto CommandPlugin::getVariablesMap(
const FleetingOptionsInterface& fleetingOptions) const noexcept
-> VariablesMap {
VariablesMap defaults(PLUGIN_NAME);
if(!defaults.add(PLUGIN_NAME, fleetingOptions.getCommands())) {
LOG(error) << "Failed to add key '" << PLUGIN_NAME << "'";
}
return defaults;
}
auto CommandPlugin::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept -> bool {
ensures(variables.get<CommandCollection>(PLUGIN_NAME) != std::nullopt);
auto commands = *(variables.get<CommandCollection>(PLUGIN_NAME));
ExecutePlugin executePlugin(commands);
return executePlugin.apply(task, variables, patterns);
}
auto CommandPlugin::summary() const noexcept -> std::string {
return "Command (internal)";
}
} // namespace execHelper::plugins
<file_sep>#include "yaml.h"
#include <iostream>
#include "logger.h"
using std::initializer_list;
using std::string;
using std::vector;
using execHelper::config::Path;
using execHelper::config::SettingsNode;
namespace execHelper::yaml {
Yaml::Yaml(const Path& file) : m_yaml(file) { ; }
Yaml::Yaml(const string& yamlConfig) : m_yaml(yamlConfig) { ; }
auto Yaml::getValue(const initializer_list<string>& keys) -> string {
try {
return m_yaml.get<string>(keys);
} catch(YAML::Exception& e) {
LOG(error) << "Yaml parser threw an exception: " << e.what();
return "";
}
}
auto Yaml::getValueCollection(const initializer_list<string>& keys)
-> vector<string> {
try {
return m_yaml.get<vector<string>>(keys);
} catch(YAML::Exception& e) {
LOG(error) << "Yaml parser threw an exception: " << e.what();
return vector<string>();
}
}
auto Yaml::getTree(const initializer_list<string>& keys,
SettingsNode* settings) const noexcept -> bool {
return m_yaml.getTree(keys, settings);
}
} // namespace execHelper::yaml
<file_sep>#include "indirectStatement.h"
#include <iostream>
using std::cerr;
using std::endl;
using std::shared_ptr;
using gsl::not_null;
namespace execHelper {
namespace test {
namespace baseUtils {
IndirectStatement::IndirectStatement(StatementKey key,
Statements initialStatements)
: Statement(SUCCESS),
m_key(std::move(key)),
m_statements(std::move(initialStatements)) {
;
}
void IndirectStatement::add(shared_ptr<Statement> statement) noexcept {
m_statements.emplace_back(statement);
}
unsigned int IndirectStatement::getNumberOfExecutions() const noexcept {
return accumulate(m_statements.begin(), m_statements.end(), 0U,
[](const unsigned int& subTotal,
const std::shared_ptr<Statement>& statement) {
return subTotal + statement->getNumberOfExecutions();
});
};
void IndirectStatement::resetExecutions() noexcept {
for(auto& statement : m_statements) {
statement->resetExecutions();
}
}
StatementKey IndirectStatement::getKey() const noexcept { return m_key; }
void IndirectStatement::write(not_null<YamlWriter*> yaml,
const std::string& /*command*/) const noexcept {
try {
if(!(*yaml)[m_key]) {
for(const auto& statement : m_statements) {
(*yaml)[m_key].push_back(statement->getKey());
statement->write(yaml, m_key);
}
}
} catch(const YAML::InvalidNode&) {
cerr << "The given YAML node is invalid" << endl;
assert(false);
}
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>#include <algorithm>
#include <filesystem>
#include "base-utils/generateRandom.h"
#include "base-utils/tmpFile.h"
#include "config/pathManipulation.h"
#include "unittest/catch.h"
using std::reverse;
using std::string;
using execHelper::test::baseUtils::generateRandomString;
using execHelper::test::baseUtils::TmpFile;
namespace filesystem = std::filesystem;
namespace execHelper::config::test {
SCENARIO("Test listing the parent paths", "[config][path-manipulation]") {
GIVEN("A list of parent paths ordered from the root directory to the "
"longest path") {
const uint8_t NB_OF_SUBDIRECTORIES = 8U;
Paths pathsUnderTest;
pathsUnderTest.reserve(NB_OF_SUBDIRECTORIES);
pathsUnderTest.emplace_back(
Path("/")); // Make sure to make an absolute path
for(uint8_t i = 1U; i < NB_OF_SUBDIRECTORIES; ++i) {
Path additionalPath =
pathsUnderTest.back() / generateRandomString(i);
pathsUnderTest.emplace_back(additionalPath);
}
WHEN("We request the parent paths for the longest generated path") {
auto result = getAllParentDirectories(pathsUnderTest.back());
THEN("We should find the paths under test in reverse order") {
Paths actualPaths = pathsUnderTest;
reverse(actualPaths.begin(), actualPaths.end());
REQUIRE(result == actualPaths);
}
}
}
GIVEN("The root directory") {
Path rootPath("/");
WHEN("We request the parent paths") {
auto result = getAllParentDirectories(rootPath);
THEN("We should find the rootPath once") {
const Paths actualPaths({rootPath});
REQUIRE(result == actualPaths);
}
}
}
GIVEN("A relative path") {
constexpr uint8_t stringLength = 6U;
Path relativePath(generateRandomString(stringLength));
WHEN("We request the parent paths") {
auto result = getAllParentDirectories(relativePath);
THEN("We should find all the parent directories of the current "
"path and the relative path on top of the current path") {
Paths actualPaths =
getAllParentDirectories(filesystem::current_path());
actualPaths.insert(actualPaths.begin(),
filesystem::absolute(relativePath));
REQUIRE(result == actualPaths);
}
}
}
GIVEN("A path to a file") {
TmpFile file;
file.create();
WHEN("We request the parent paths") {
auto result = getAllParentDirectories(file.getPath());
THEN("We should find only the parent directories") {
const Paths actualPaths =
getAllParentDirectories(file.getParentDirectory());
REQUIRE(result == actualPaths);
}
}
}
}
SCENARIO("Test getting the home directory", "[config][path-manipulation]") {
GIVEN("An environment where the home directory is set") {
const std::string HOME_KEY("HOME");
EnvironmentCollection env;
constexpr uint8_t stringLength = 6U;
env.emplace(HOME_KEY, generateRandomString(stringLength));
WHEN("We request the home directory") {
auto result = getHomeDirectory(env);
THEN("The result should not be none") {
REQUIRE(result != std::nullopt);
}
THEN("The result should be equal to the chosen home directory") {
REQUIRE(result.value() == env.at(HOME_KEY));
}
}
}
GIVEN("An environment where the home directory is not set") {
EnvironmentCollection env;
WHEN("We request the home directory") {
auto result = getHomeDirectory(env);
THEN("The result should be none") {
REQUIRE(result == std::nullopt);
}
}
}
}
} // namespace execHelper::config::test
<file_sep>#include <algorithm>
#include <cstdlib>
#include <filesystem>
#include <iomanip>
#include <memory>
#include <optional>
#include <stdexcept>
#include <string>
#include <utility>
#include <vector>
#include "commander/commander.h"
#include "config/argv.h"
#include "config/commandLineOptions.h"
#include "config/config.h"
#include "config/configFileSearcher.h"
#include "config/envp.h"
#include "config/fleetingOptions.h"
#include "config/fleetingOptionsInterface.h"
#include "config/optionDescriptions.h"
#include "config/pathManipulation.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/executorInterface.h"
#include "core/immediateExecutor.h"
#include "core/posixShell.h"
#include "core/reportingExecutor.h"
#include "core/task.h"
#include "log/assertions.h"
#include "log/log.h"
#include "plugins/commandLineCommand.h"
#include "plugins/commandPlugin.h"
#include "plugins/executePlugin.h"
#include "plugins/lcov.h"
#include "plugins/luaPlugin.h"
#include "plugins/memory.h"
#include "plugins/plugin.h"
#include "plugins/pluginUtils.h"
#include "plugins/pmd.h"
#include "plugins/valgrind.h"
#include "logger.h"
#include "version.h"
using std::make_pair;
using std::make_shared;
using std::make_unique;
using std::move;
using std::optional;
using std::runtime_error;
using std::setw;
using std::shared_ptr;
using std::string;
using std::stringstream;
using std::transform;
using std::vector;
using execHelper::commander::Commander;
using execHelper::config::APPEND_SEARCH_PATH_KEY;
using execHelper::config::AppendSearchPathOption_t;
using execHelper::config::ArgumentOption;
using execHelper::config::ArgumentOptions;
using execHelper::config::Argv;
using execHelper::config::AUTO_COMPLETE_KEY;
using execHelper::config::AutoCompleteOption_t;
using execHelper::config::COMMAND_KEY;
using execHelper::config::CommandCollection;
using execHelper::config::ConfigFileSearcher;
using execHelper::config::DRY_RUN_KEY;
using execHelper::config::DryRunOption_t;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Envp;
using execHelper::config::FleetingOptions;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::getAllParentDirectories;
using execHelper::config::getHomeDirectory;
using execHelper::config::HELP_OPTION_KEY;
using execHelper::config::HelpOption_t;
using execHelper::config::JOBS_KEY;
using execHelper::config::JobsOption_t;
using execHelper::config::KEEP_GOING_KEY;
using execHelper::config::KeepGoingOption_t;
using execHelper::config::LIST_PLUGINS_KEY;
using execHelper::config::ListPluginsOption_t;
using execHelper::config::LOG_LEVEL_KEY;
using execHelper::config::LogLevelOption_t;
using execHelper::config::Option;
using execHelper::config::OptionDescriptions;
using execHelper::config::parseSettingsFile;
using execHelper::config::Path;
using execHelper::config::Paths;
using execHelper::config::Patterns;
using execHelper::config::PatternSettingsPair;
using execHelper::config::PatternValues;
using execHelper::config::SETTINGS_FILE_KEY;
using execHelper::config::SettingsFileOption_t;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::config::VariablesMap;
using execHelper::config::VERBOSE_KEY;
using execHelper::config::VerboseOption_t;
using execHelper::config::VERSION_KEY;
using execHelper::config::VersionOption_t;
using execHelper::core::ExecutorInterface;
using execHelper::core::ImmediateExecutor;
using execHelper::core::PosixShell;
using execHelper::core::ReportingExecutor;
using execHelper::core::Shell;
using execHelper::core::Task;
using execHelper::log::LogLevel;
using execHelper::plugins::Plugin;
using execHelper::plugins::Plugins;
namespace filesystem = std::filesystem;
namespace {
vector<string> logModules({"log", "yaml", "config", "core", "plugins",
"commander", "application"});
const auto settingsFileOption = Option<SettingsFileOption_t>(
SETTINGS_FILE_KEY, {"s"}, "Set the settings file");
const auto commandOption =
Option<CommandCollection>(COMMAND_KEY, {"z"}, "Commands to execute");
struct ConfigOptions {
ConfigOptions(Path&& settingsFile, FleetingOptions&& fleetingOptions,
SettingsNode&& settings, Patterns&& patterns) noexcept
: settingsFile(settingsFile),
fleetingOptions(fleetingOptions),
settings(settings),
patterns(patterns) {
;
}
Path settingsFile;
FleetingOptions fleetingOptions;
SettingsNode settings;
Patterns patterns;
};
inline EnvironmentCollection toEnvCollection(char** envp) {
static const string DELIMITER("=");
EnvironmentCollection result;
size_t index = 0;
char* envValue;
while((envValue = envp[index]) != nullptr) {
string newEnv(envValue);
size_t pos = newEnv.find_first_of(DELIMITER);
ensures(pos != newEnv.npos);
string key = newEnv.substr(0, pos);
string value = newEnv.substr(pos + DELIMITER.size(), newEnv.npos);
result.emplace(make_pair(key, value));
++index;
}
return result;
}
inline Paths getSearchPaths(const EnvironmentCollection& env) noexcept {
Paths searchPaths = getAllParentDirectories(filesystem::current_path());
auto homeDir = getHomeDirectory(env);
if(homeDir) {
searchPaths.emplace_back(homeDir.value());
}
return searchPaths;
}
inline execHelper::config::Paths
addAdditionalSearchPaths(execHelper::config::Paths& base,
const execHelper::config::Paths& toAdd,
const Path& basePath) noexcept {
transform(toAdd.rbegin(), toAdd.rend(), back_inserter(base),
[&basePath](auto path) {
if(path.is_relative()) {
path = basePath / path;
}
LOG(trace) << "Adding search path " << path;
return path;
});
return base;
}
inline execHelper::config::Paths
addAdditionalSearchPaths(execHelper::config::Paths& base,
const vector<string>& toAdd,
const Path& basePath) noexcept {
Paths paths;
transform(toAdd.begin(), toAdd.end(), back_inserter(paths),
[](const auto& path) { return Path{path}; });
return addAdditionalSearchPaths(base, paths, basePath);
}
inline execHelper::config::Paths
getAdditionalSearchPaths(const FleetingOptionsInterface& fleetingOptions,
const SettingsNode& settings,
const Path& basePath) noexcept {
constexpr std::string_view configKey{"additional-search-paths"};
Paths pluginSearchPath{PLUGINS_INSTALL_PATH};
if(settings.contains(string(configKey))) {
pluginSearchPath = addAdditionalSearchPaths(
pluginSearchPath,
settings.get<vector<string>>(string(configKey), {}), basePath);
}
return addAdditionalSearchPaths(
pluginSearchPath, fleetingOptions.appendedSearchPaths(), basePath);
}
inline auto printAutoComplete(const std::string& /*word*/,
const OptionDescriptions& options,
const vector<string>& commands) noexcept -> void {
for(const auto& key : options.getOptionKeys()) {
user_feedback(key);
}
for(const auto& command : commands) {
user_feedback(command);
}
}
/**
* Discover all compatible plugins in the given search paths. This function does *not* recursively seek in these paths.
*
* \param[in] searchPaths The search paths from the lowest priority to the hightest (collisions of plugins in later paths overwrite the ones from earlier ones)
* \returns A mapping of the discovered plugins
*/
inline Plugins discoverPlugins(const Paths& searchPaths) noexcept {
Plugins plugins{
{"commands",
shared_ptr<Plugin>(new execHelper::plugins::CommandPlugin())},
{"command-line-command",
shared_ptr<Plugin>(new execHelper::plugins::CommandLineCommand())},
{"memory", shared_ptr<Plugin>(new execHelper::plugins::Memory())},
{"valgrind", shared_ptr<Plugin>(new execHelper::plugins::Valgrind())},
{"pmd", shared_ptr<Plugin>(new execHelper::plugins::Pmd())},
{"lcov", shared_ptr<Plugin>(new execHelper::plugins::Lcov())},
};
/**
* We search the searchpaths in reverse and overwrite plugins with the same name in later search paths
*/
LOG(debug) << "Discovering plugins...";
for(const auto& path : searchPaths) {
LOG(trace) << "Discovering plugins for path " << path;
try {
for(const auto& entry : filesystem::directory_iterator(path)) {
if(entry.is_regular_file() &&
entry.path().extension() == ".lua") {
LOG(trace) << "Module " << entry.path().stem()
<< " found at " << path;
auto newId = entry.path().stem();
auto newPlugin = shared_ptr<const Plugin>(
new execHelper::plugins::LuaPlugin(entry));
if(plugins.count(newId) == 0) {
plugins.emplace(
make_pair(move(newId), move(newPlugin)));
} else {
plugins[newId] = std::move(newPlugin);
}
}
}
} catch(const filesystem::filesystem_error& e) {
user_feedback_error("Failed to discover plugins for path "
<< path << ". Skipping it");
LOG(warning) << "Failed to discover plugins for path " << path
<< ": " << e.what();
}
}
return plugins;
}
inline void printHelp(const std::string& binaryName,
const OptionDescriptions& options,
const SettingsNode& settings) noexcept {
user_feedback("Usage: " + binaryName + " [Optional arguments] COMMANDS...");
user_feedback("");
user_feedback("Optional arguments:");
user_feedback(options.getOptionDescriptions());
static const string COMMANDS_KEY("commands");
if(settings.contains(COMMANDS_KEY)) {
user_feedback("Configured commands:");
for(const auto& command :
settings.get<SettingsValues>(COMMANDS_KEY, SettingsValues())) {
stringstream commandStream;
commandStream << " " << std::left << setw(20) << command;
CommandCollection commmandDescription =
settings[COMMANDS_KEY].get<CommandCollection>({command}, {});
if(!commmandDescription.empty()) {
// Add an extra whitespace in case the key is longer than the minimum width that was set
commandStream << " " << commmandDescription.back();
}
user_feedback(commandStream.str());
}
}
}
inline void printVersion() noexcept {
user_feedback(BINARY_NAME << " " << VERSION);
user_feedback(COPYRIGHT);
}
inline void printPlugins(const Plugins& plugins) noexcept {
user_feedback("Registered plugins:");
for(const auto& plugin : plugins) {
user_feedback(std::left << std::setfill('.') << std::setw(25)
<< plugin.first << " " << *(plugin.second));
}
}
inline bool verifyOptions(const FleetingOptions& options) noexcept {
if(options.getJobs() == 0U) {
user_feedback_error("Invalid value passed for the number of jobs (0): "
"it must be strictly positive.");
return false;
}
return true;
}
inline auto getSettingsFile(const std::string& settingsFilename,
const EnvironmentCollection& env) -> Path {
ConfigFileSearcher configFileSearcher(getSearchPaths(env));
auto settingsFile = configFileSearcher.find(settingsFilename);
if(settingsFile == std::nullopt) {
throw std::runtime_error("Could not find a settings file");
}
return *settingsFile;
}
PatternSettingsPair addPatternsFromSettingsFile(const Path& settingsFile,
OptionDescriptions& options) {
auto patternSettingsPair = parseSettingsFile(settingsFile);
if(!patternSettingsPair) {
throw std::invalid_argument("Could not parse settings file '" +
settingsFile.string() + "'");
}
auto patterns = patternSettingsPair.value().first;
for(const auto& pattern : patterns) {
ArgumentOptions additionalArguments;
const auto shortOption = pattern.getShortOption();
if(shortOption) {
additionalArguments.emplace_back(
ArgumentOption(1, shortOption.value()));
}
const auto longOption = pattern.getLongOption();
if(longOption) {
options.addOption(Option<PatternValues>(
longOption.value(), additionalArguments,
"Values for pattern '" + longOption.value() + "'"));
}
}
return *patternSettingsPair;
}
inline OptionDescriptions getDefaultOptions() noexcept {
OptionDescriptions options;
options.addOption(
Option<HelpOption_t>(HELP_OPTION_KEY, {"h"}, "Produce help message"));
options.addOption(Option<HelpOption_t>(VERSION_KEY, {},
"Print the version of this binary"));
options.addOption(
Option<VerboseOption_t>(VERBOSE_KEY, {"v"}, "Set verbosity"));
options.addOption(Option<JobsOption_t>(
JOBS_KEY, {"j"}, "Set number of jobs to use. Default: auto"));
options.addOption(
Option<DryRunOption_t>(DRY_RUN_KEY, {"n"}, "Dry run exec-helper"));
options.addOption(
Option<KeepGoingOption_t>(KEEP_GOING_KEY, {"k"}, "Keep going, even when commands fail"));
options.addOption(
Option<ListPluginsOption_t>(LIST_PLUGINS_KEY, {}, "List all plugins"));
options.addOption(Option<AppendSearchPathOption_t>(
APPEND_SEARCH_PATH_KEY, {},
"Append to plugin search path. Plugins discovered earlier in the list "
"overwrite plugins with the same name in later ones."));
options.addOption(settingsFileOption);
options.addOption(
Option<LogLevelOption_t>(LOG_LEVEL_KEY, {"d"}, "Set the log level"));
options.addOption(Option<AutoCompleteOption_t>(
string(AUTO_COMPLETE_KEY), {},
"List autocomplete options for the given word"));
options.addOption(commandOption);
return options;
}
inline VariablesMap handleConfiguration(const Argv& argv,
const EnvironmentCollection& /*env*/,
OptionDescriptions& options) {
options.setPositionalArgument(commandOption);
VariablesMap optionsMap = FleetingOptions::getDefault();
if(!options.getOptionsMap(optionsMap, argv, false)) {
throw std::invalid_argument(
"Could not properly parse the command line options");
}
return optionsMap;
}
} // namespace
int execHelperMain(int argc, char** argv, char** envp) {
const Argv args(argc, argv);
const EnvironmentCollection env = toEnvCollection(envp);
auto firstPassOptions = getDefaultOptions();
VariablesMap firstPassOptionsMap = FleetingOptions::getDefault();
if(!firstPassOptions.getOptionsMap(firstPassOptionsMap, args, true)) {
user_feedback_error(
"Could not properly parse the command line options");
printHelp(args[0], firstPassOptions, SettingsNode("Options"));
return EXIT_FAILURE;
}
FleetingOptions firstPassFleetingOptions(firstPassOptionsMap);
Path settingsFile;
try {
#ifdef _WIN32
SettingsFileOption_t settingsFileValue =
firstPassOptionsMap.get<SettingsFileOption_t>(
SETTINGS_FILE_KEY, ".windows.exec-helper");
#else
SettingsFileOption_t settingsFileValue =
firstPassOptionsMap.get<SettingsFileOption_t>(SETTINGS_FILE_KEY,
".exec-helper");
#endif
settingsFile = getSettingsFile(settingsFileValue, env);
} catch(const runtime_error& e) {
if(firstPassFleetingOptions.getHelp()) {
printHelp(args[0], firstPassOptions, SettingsNode("Options"));
return EXIT_SUCCESS;
}
if(firstPassFleetingOptions.getVersion()) {
printVersion();
return EXIT_SUCCESS;
}
if(firstPassFleetingOptions.listPlugins()) {
auto pluginSearchPath = getAdditionalSearchPaths(
firstPassFleetingOptions, SettingsNode("error"),
filesystem::current_path());
auto plugins = discoverPlugins(pluginSearchPath);
printPlugins(plugins);
return EXIT_SUCCESS;
}
if(firstPassFleetingOptions.getAutoComplete()) {
printAutoComplete(*(firstPassFleetingOptions.getAutoComplete()),
firstPassOptions, {});
return EXIT_SUCCESS;
}
user_feedback_error("Could not find an exec-helper settings file");
printHelp(args[0], firstPassOptions, SettingsNode("Options"));
return EXIT_FAILURE;
}
auto optionDescriptions = getDefaultOptions();
auto patternSettingsPair =
addPatternsFromSettingsFile(settingsFile, optionDescriptions);
auto patterns = patternSettingsPair.first;
auto settings = patternSettingsPair.second;
VariablesMap optionsMap("options");
try {
optionsMap = handleConfiguration(args, env, optionDescriptions);
} catch(const std::invalid_argument&) {
user_feedback_error(
"Could not properly parse the command line options");
printHelp(args[0], optionDescriptions, settings);
return EXIT_FAILURE;
}
const FleetingOptions fleetingOptions(optionsMap);
execHelper::log::LogInit logInit;
auto level = fleetingOptions.getLogLevel();
for(const auto& logModule : logModules) {
logInit.setSeverity(logModule, level);
}
// All files are taken relative to this basePath
const auto basePath = settingsFile.parent_path();
LOG(debug) << "Base path is " << basePath;
if(fleetingOptions.getHelp()) {
printHelp(args[0], optionDescriptions, settings);
return EXIT_SUCCESS;
}
if(fleetingOptions.getVersion()) {
printVersion();
return EXIT_SUCCESS;
}
auto pluginSearchPath =
getAdditionalSearchPaths(fleetingOptions, settings, basePath);
auto plugins = discoverPlugins(pluginSearchPath);
if(fleetingOptions.listPlugins()) {
printPlugins(plugins);
return EXIT_SUCCESS;
}
if(firstPassFleetingOptions.getAutoComplete()) {
vector<string> commands = settings.get<vector<string>>("commands", {});
printAutoComplete(*(fleetingOptions.getAutoComplete()),
optionDescriptions, move(commands));
return EXIT_SUCCESS;
}
if(!verifyOptions(fleetingOptions)) {
return EXIT_FAILURE;
}
for(auto& pattern : patterns) {
const auto longOption = pattern.getLongOption();
if(longOption && optionsMap.contains(longOption.value())) {
if(!pattern.setValues(optionsMap.get<PatternValues>(
longOption.value(), pattern.getValues()))) {
LOG(error) << "Failed to add long options for '"
<< longOption.value() << "'";
}
}
}
auto shell = make_shared<PosixShell>();
std::unique_ptr<ExecutorInterface> executor;
auto lastReturnCode = EXIT_SUCCESS;
if(fleetingOptions.getDryRun()) {
executor.reset(new ReportingExecutor());
} else if(fleetingOptions.getKeepGoing()) {
executor = make_unique<ImmediateExecutor>(shell, [&lastReturnCode](Shell::ShellReturnCode returnCode) {
lastReturnCode = returnCode;
user_feedback_error("Error executing command!");
});
} else {
executor = make_unique<ImmediateExecutor>(shell, [](Shell::ShellReturnCode returnCode) {
user_feedback_error("Error executing command!");
{ execHelper::log::LogInit logInit; } // Make sure the loggers are destroyed before exiting
exit(returnCode);
});
}
execHelper::plugins::ExecuteCallback executeCallback =
[executor = executor.get()](const Task& task) {
try {
executor->execute(task);
} catch(const std::runtime_error& e) {
user_feedback_error("Runtime error: " << e.what());
}
};
execHelper::plugins::registerExecuteCallback(executeCallback);
Commander commander;
if(commander.run(fleetingOptions, settings, patterns,
settingsFile.parent_path(), move(env), move(plugins))) {
return lastReturnCode;
} else {
user_feedback_error("Error executing commands");
return EXIT_FAILURE;
}
}
int main(int argc, char** argv, char** envp) {
return execHelperMain(argc, argv, envp);
}
<file_sep>#include "task.h"
#include <algorithm>
#include <filesystem>
#include <iostream>
#include <numeric>
#include <ostream>
#include <utility>
#include "logger.h"
using std::accumulate;
using std::back_inserter;
using std::endl;
using std::make_pair;
using std::move;
using std::ostream;
using std::string;
using std::vector;
using execHelper::config::EnvironmentCollection;
using execHelper::config::EnvironmentValue;
using execHelper::config::Path;
namespace filesystem = std::filesystem;
namespace {
inline auto implodeVector(const vector<string>& toImplode,
const string& delimiter) -> string {
string result;
return accumulate(toImplode.begin(), toImplode.end(), string(),
[&delimiter](std::string& a, const std::string& b) {
if(a.empty()) {
return string(b);
}
return a.append(delimiter).append(b);
});
}
inline auto implodeVector(const vector<string>& toImplode) -> string {
return implodeVector(toImplode, " ");
}
} // namespace
namespace execHelper::core {
// cppcheck-suppress passedByValue symbolName=subtasks
Task::Task(std::vector<std::string> subtasks,
// cppcheck-suppress passedByValue symbolName=environment
config::EnvironmentCollection environment,
config::Path workingDirectory) noexcept
: m_task(std::move(subtasks)),
m_env(std::move(environment)),
m_workingDirectory(std::move(workingDirectory)) {
setWorkingDirectory(m_workingDirectory);
}
auto Task::getTask() const noexcept -> const execHelper::core::TaskCollection& {
return m_task;
}
auto Task::toString() const -> string { return implodeVector(m_task); }
auto Task::getEnvironment() const noexcept -> const EnvironmentCollection& {
return m_env;
}
void Task::setWorkingDirectory(const Path& workingDirectory) noexcept {
LOG(trace) << "Changing working directory of task to " << workingDirectory;
m_workingDirectory = workingDirectory;
// Set the PWD environment variable to the working directory
appendToEnvironment(
make_pair("PWD", filesystem::absolute(m_workingDirectory).string()));
}
auto Task::getWorkingDirectory() const noexcept -> const Path& {
return m_workingDirectory;
}
auto Task::append(const string& taskPart) noexcept -> bool {
m_task.push_back(taskPart);
return true;
}
auto Task::append(string&& taskPart) noexcept -> bool {
m_task.push_back(move(taskPart));
return true;
}
auto Task::append(const TaskCollection& taskPart) noexcept -> bool {
m_task.reserve(m_task.size() + taskPart.size());
m_task.insert(std::end(m_task), std::begin(taskPart), std::end(taskPart));
return true;
}
auto Task::append(TaskCollection&& taskPart) noexcept -> bool {
m_task.reserve(m_task.size() + taskPart.size());
move(taskPart.begin(), taskPart.end(), back_inserter(m_task));
return true;
}
auto Task::setEnvironment(const EnvironmentCollection& env) noexcept -> bool {
m_env = env;
setWorkingDirectory(m_workingDirectory);
return true;
}
auto Task::setEnvironment(EnvironmentCollection&& env) noexcept -> bool {
m_env = move(env);
setWorkingDirectory(m_workingDirectory);
return true;
}
auto Task::appendToEnvironment(EnvironmentValue&& newValue) noexcept -> bool {
if(m_env.count(newValue.first) > 0U) {
m_env.erase(newValue.first);
}
m_env.emplace(newValue);
return true;
}
auto Task::appendToEnvironment(EnvironmentCollection&& newValue) noexcept
-> bool {
for(auto&& value : newValue) {
appendToEnvironment(value);
}
return true;
}
auto Task::operator==(const Task& other) const noexcept -> bool {
return (m_task == other.m_task &&
m_workingDirectory == other.m_workingDirectory &&
m_env == other.m_env);
}
auto Task::operator!=(const Task& other) const noexcept -> bool {
return !(*this == other);
}
auto operator<<(ostream& os, const Task& task) noexcept -> ostream& {
TaskCollection subtasks = task.getTask();
EnvironmentCollection environment = task.getEnvironment();
os << string("Task {");
os << string("Environment(") << environment.size() << "): {";
for(const auto& envVar : environment) {
os << string(" ") << envVar.first << ": " << envVar.second << ";";
}
os << string("} ");
os << string("Command(") << subtasks.size() << "): {";
for(const auto& subTask : subtasks) {
os << string(" ") << subTask;
}
os << string("} ");
os << string("Working-dir: {") << task.getWorkingDirectory().string()
<< "}";
os << string("}");
os << endl;
return os;
}
} // namespace execHelper::core
<file_sep>#include <map>
#include <string>
#include <gsl/string_span>
#include "config/commandLineOptions.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/commandLine.h"
#include "plugins/executePlugin.h"
#include "plugins/memory.h"
#include "plugins/pluginUtils.h"
#include "plugins/valgrind.h"
#include "plugins/verbosity.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
using std::move;
using std::shared_ptr;
using std::string;
using gsl::czstring;
using execHelper::config::CommandCollection;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::ExecutePlugin;
using execHelper::plugins::Memory;
using execHelper::plugins::MemoryHandler;
using execHelper::plugins::Valgrind;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::utils::getExpectedTasks;
namespace {
const czstring<> PLUGIN_NAME = "valgrind";
const czstring<> MEMORY_KEY = "memory";
const czstring<> TOOL_KEY = "tool";
const czstring<> RUN_COMMAND_KEY = "run-command";
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Obtaining the default variables map of the valgrind plugin",
"[valgrind]") {
GIVEN("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
Valgrind plugin;
VariablesMap actualVariables(PLUGIN_NAME);
REQUIRE(actualVariables.add(COMMAND_LINE_KEY, CommandLineArgs()));
REQUIRE(actualVariables.add(VERBOSITY_KEY, "no"));
WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO("Test the variables map of the valgrind plugin", "[valgrind]") {
MAKE_COMBINATIONS("Of several configurations") {
const Pattern pattern1("PATTERN1", {"value1a", "value1b"});
const Pattern pattern2("PATTERN2", {"value2a", "value2b"});
const Patterns patterns({pattern1, pattern2});
Valgrind plugin;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
CommandCollection runCommands({MEMORY_KEY});
REQUIRE(variables.add(RUN_COMMAND_KEY, MEMORY_KEY));
CommandLineArgs commandLine;
string tool;
MemoryHandler memory;
SettingsNode settings(PLUGIN_NAME);
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
COMBINATIONS("Add an additional run command") {
runCommands.emplace_back(MEMORY_KEY);
REQUIRE(variables.add(RUN_COMMAND_KEY, MEMORY_KEY));
}
COMBINATIONS("Set the tool") {
tool = "tool1";
REQUIRE(variables.replace(TOOL_KEY, tool));
}
COMBINATIONS("Set the command line") {
commandLine = {"{" + pattern1.getKey() + "}",
"{" + pattern2.getKey() + "}"};
REQUIRE(variables.add(COMMAND_LINE_KEY, commandLine));
}
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(move(settings));
ExecutePlugin::push(Patterns(patterns));
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
ExecutorStub::TaskQueue expectedTasks;
for(const auto& command : runCommands) {
(void)
command; // Command is unused, as we only want to consider the part of the command associated with this command
Task expectedTask({PLUGIN_NAME});
if(!tool.empty()) {
expectedTask.append("--tool=" + tool);
}
expectedTask.append(commandLine);
expectedTasks.emplace_back(expectedTask);
}
ExecutorStub::TaskQueue replacedTasks =
getExpectedTasks(expectedTasks, patterns);
THEN_WHEN("We apply the plugin") {
Task task;
bool returnCode = plugin.apply(task, variables, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
const Memory::Memories& memories =
MemoryHandler::getExecutions();
REQUIRE(memories.size() == replacedTasks.size());
auto replacedTask = replacedTasks.begin();
for(auto memory = memories.begin(); memory != memories.end();
++memory, ++replacedTask) {
REQUIRE(memory->task == *replacedTask);
REQUIRE(memory->patterns.empty());
}
}
}
ExecutePlugin::popPlugins();
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
}
}
SCENARIO("Test erroneous scenarios", "[valgrind]") {
GIVEN("A configuration without a configured run command") {
Valgrind plugin;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
WHEN("We call the plugin") {
bool returnCode = plugin.apply(Task(), variables, Patterns());
THEN("It should fail") { REQUIRE_FALSE(returnCode); }
}
}
}
} // namespace execHelper::plugins::test
<file_sep>#ifndef __EXECUTOR_STUB_H__
#define __EXECUTOR_STUB_H__
#include <vector>
#include "core/executorInterface.h"
#include "core/task.h"
namespace execHelper {
namespace core {
namespace test {
class ExecutorStub : public ExecutorInterface {
public:
using TaskQueue = std::vector<Task>;
void execute(const Task& task) noexcept override {
m_executedTasks.push_back(task);
}
const TaskQueue& getExecutedTasks() const noexcept {
return m_executedTasks;
}
private:
TaskQueue m_executedTasks;
};
} // namespace test
} // namespace core
} // namespace execHelper
#endif /* __EXECUTOR_STUB_H__ */
<file_sep>#include "configFileSearcher.h"
#include <filesystem>
using std::optional;
using std::string;
namespace filesystem = std::filesystem;
namespace {
/**
* Checks whether the given path exists
*
* \param[in] pathToCheck The path to check
* \returns True If the path exists on the current system
* False Otherwise
*/
auto fileExist(const execHelper::config::Path& pathToCheck) noexcept -> bool {
return filesystem::exists(pathToCheck);
}
} // namespace
namespace execHelper::config {
ConfigFileSearcher::ConfigFileSearcher(Paths searchPaths) noexcept
: m_searchPaths(std::move(searchPaths)) {
;
}
auto ConfigFileSearcher::find(const Path& filename) noexcept -> optional<Path> {
for(const auto& searchPath : m_searchPaths) {
Path pathToCheck = searchPath;
pathToCheck /= filename;
if(fileExist(pathToCheck)) {
return pathToCheck;
}
}
return std::nullopt;
}
} // namespace execHelper::config
<file_sep>set(LIB_NAME config-generators)
add_library(${LIB_NAME} INTERFACE)
target_include_directories(${LIB_NAME} INTERFACE include)
set(EXE_NAME ${PROJECT_NAME}-config-unittest)
set(SRCS src/settingsNodeTest.cpp
src/configFileSearcherTest.cpp
src/patternTest.cpp
src/patternHandlerTest.cpp
src/configTest.cpp
src/fleetingOptionsTest.cpp
src/commandLineOptionsTest.cpp
src/argvTest.cpp
src/envpTest.cpp
src/pathManipulationTest.cpp
)
set(DEPENDENCIES
filesystem
boost-program-options
config
unittest
rpcheck
test-utils
log-generators
config-generators
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES} include)
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
add_test(${EXE_NAME} ${EXE_NAME})
install(TARGETS ${EXE_NAME} DESTINATION ${UNITTEST_BIN_DIR})
<file_sep>#include "optionDescriptions.h"
#include "argv.h"
#include "logger.h"
using std::string;
using boost::program_options::command_line_parser;
using boost::program_options::notify;
using boost::program_options::options_description;
using boost::program_options::positional_options_description;
using boost::program_options::store;
using boost::program_options::variables_map;
using execHelper::config::VariablesMap;
namespace execHelper::config {
OptionDescriptions::OptionDescriptions() noexcept { ; }
auto OptionDescriptions::getOptionDescriptions() const noexcept
-> options_description {
return m_optionDescription;
}
auto OptionDescriptions::setPositionalArgument(
const OptionInterface& option) noexcept -> bool {
m_positional = option.getId();
return true;
}
auto OptionDescriptions::getOptionsMap(VariablesMap& variablesMap,
const Argv& argv,
bool allowUnregistered) const noexcept
-> bool {
variables_map optionsMap;
auto commandLineParser =
command_line_parser(static_cast<int>(argv.getArgc()), argv.getArgv());
commandLineParser.options(m_optionDescription);
// Assign positional arguments
positional_options_description positionalOptionsDesc;
if(m_positional) {
positionalOptionsDesc.add(m_positional.value().c_str(), -1);
commandLineParser.positional(positionalOptionsDesc);
}
if(allowUnregistered) {
commandLineParser.allow_unregistered();
}
try {
store(commandLineParser.run(), optionsMap);
} catch(const boost::program_options::unknown_option& e) {
user_feedback_error(
"Could not parse command line arguments: " << e.what());
return false;
} catch(const std::exception& e) {
LOG(error) << e.what();
return false;
}
notify(optionsMap);
toMap(variablesMap, optionsMap);
return true;
}
void OptionDescriptions::toMap(VariablesMap& variablesMap,
const variables_map& optionsMap) const noexcept {
for(const auto& option : optionsMap) {
if(m_options.count(option.first) > 0U) {
m_options.at(option.first)->toMap(variablesMap, optionsMap);
}
}
}
} // namespace execHelper::config
<file_sep>#ifndef __PLUGIN_UTILS_H__
#define __PLUGIN_UTILS_H__
#include <ostream>
#include <string>
#include "config/commandLineOptions.h"
#include "config/path.h"
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/mapPermutator.h"
#include "core/task.h"
namespace execHelper {
namespace core {
class TargetDescriptionElement;
class CompilerDescriptionElement;
} // namespace core
namespace plugins {
class Plugin;
} // namespace plugins
} // namespace execHelper
namespace execHelper {
namespace plugins {
using PatternPermutator =
core::MapPermutator<config::PatternKey, config::PatternValue>;
const config::PatternKey& getPatternsKey() noexcept;
const std::string& getWorkingDirKey() noexcept;
config::EnvironmentCollection
getEnvironment(const config::VariablesMap& variables) noexcept;
PatternPermutator
makePatternPermutator(const config::Patterns& patterns) noexcept;
config::EnvironmentCollection replacePatternsInEnvironment(
const config::EnvironmentCollection& env,
const config::PatternCombinations& patternCombinations) noexcept;
core::Task replacePatternCombinations(
const core::Task& task,
const config::PatternCombinations& patternCombinations) noexcept;
auto replacePatternCombinations(
std::string element,
const config::PatternCombinations& patternCombinations) noexcept
-> std::string;
auto toString(const config::PatternKeys& values) noexcept -> std::string;
} // namespace plugins
} // namespace execHelper
#endif /* __PLUGIN_UTILS_H__ */
<file_sep>#include "commandLine.h"
#include "logger.h"
namespace execHelper::plugins {
void CommandLine::getVariables(
config::VariablesMap& variables,
const config::FleetingOptionsInterface& /*options*/) noexcept {
if(!variables.add(COMMAND_LINE_KEY, CommandLineArgs())) {
LOG(warning) << "Failed to add key '" << COMMAND_LINE_KEY << "'";
}
}
} // namespace execHelper::plugins
<file_sep>FROM debian:testing
RUN apt-get update && apt-get install --yes cmake make libboost-dev libboost-program-options-dev libboost-log-dev libyaml-cpp-dev libmsgsl-dev pkg-config g++ git python catch sudo curl && apt-get clean --yes
# Install gitchangelog using the standalone installer
RUN curl -sSL https://raw.githubusercontent.com/vaab/gitchangelog/master/src/gitchangelog/gitchangelog.py > /usr/local/bin/gitchangelog && chmod +x /usr/local/bin/gitchangelog
<file_sep>.. _exec-helper-plugins-command-plugin:
Command plugin
**************
Description
===========
The command-plugin is used for executing a command that is listed under the *commands* key in the configuration. It is used internally as a starting point for traversing the configuration.
Settings
========
There are no settings associated with this plugin.
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>/**
*@file Tests properties that each plugin should have
*/
#include <catch.hpp>
#include <filesystem>
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "pluginsGenerators.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/commandLineCommand.h"
#include "plugins/commandPlugin.h"
#include "plugins/executePlugin.h"
#include "plugins/lcov.h"
#include "plugins/logger.h"
#include "plugins/luaPlugin.h"
#include "plugins/memory.h"
#include "plugins/plugin.h"
#include "plugins/pluginUtils.h"
#include "plugins/pmd.h"
#include "plugins/valgrind.h"
#include "core/coreGenerators.h"
using std::shared_ptr;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::PatternValue;
using execHelper::config::PatternValues;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::Tasks;
using execHelper::plugins::MemoryHandler;
using execHelper::plugins::Plugin;
using execHelper::plugins::Plugins;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::propertyTest;
namespace filesystem = std::filesystem;
namespace {
constexpr std::string_view patternKey{"BLAAT"};
auto getPlugins() noexcept -> Plugins {
Plugins plugins{
{"command-line-command",
shared_ptr<Plugin>(new execHelper::plugins::CommandLineCommand())},
{"memory", shared_ptr<Plugin>(new execHelper::plugins::Memory())},
{"valgrind", shared_ptr<Plugin>(new execHelper::plugins::Valgrind())},
{"pmd", shared_ptr<Plugin>(new execHelper::plugins::Pmd())},
{"lcov", shared_ptr<Plugin>(new execHelper::plugins::Lcov())},
};
auto searchPaths = {PLUGINS_INSTALL_PATH};
for(const auto& path : searchPaths) {
for(const auto& entry : filesystem::directory_iterator(path)) {
if(entry.is_regular_file() && entry.path().extension() == ".lua") {
plugins.emplace(std::make_pair(
entry.path().stem(),
shared_ptr<const Plugin>(
new execHelper::plugins::LuaPlugin(entry))));
}
}
}
return plugins;
}
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Test the pattern keyword for each plugin") {
REQUIRE(Plugin::getPatternsKey() == "patterns");
}
SCENARIO("Check that all plugins are found") {
GIVEN("The expected number of plugins") {
constexpr auto expectedNbOfPlugins = 15U;
WHEN("We request all plugins") {
const auto plugins = getPlugins();
THEN("The number of plugins must equal the expected number of "
"plugins") {
REQUIRE(plugins.size() == expectedNbOfPlugins);
}
}
}
}
SCENARIO("Every call to a plugin must lead to at least one registered task") {
FleetingOptionsStub options;
options.m_commands.push_back("memory");
execHelper::plugins::ExecutePlugin::push(
gsl::not_null<FleetingOptionsInterface*>(&options));
Patterns patterns = {Pattern(std::string(patternKey), {"memory"})};
execHelper::plugins::ExecutePlugin::push(Patterns(patterns));
execHelper::plugins::ExecutePlugin::push(SettingsNode("test"));
execHelper::plugins::ExecutePlugin::push(getPlugins());
propertyTest(
"Every call to a plugin must lead to at least one registered task",
[&patterns](shared_ptr<const Plugin>&& plugin) {
uint32_t nbOfRegisteredTasks = 0U;
REQUIRE(plugin);
registerExecuteCallback(
[&nbOfRegisteredTasks](const core::Task& /*task*/) {
++nbOfRegisteredTasks;
});
MemoryHandler memory;
auto variablesMap = plugin->getVariablesMap(FleetingOptionsStub());
REQUIRE(variablesMap.add("command-line", "blaat"));
REQUIRE(variablesMap.add("build-command", "memory"));
REQUIRE(variablesMap.add("run-command", "memory"));
REQUIRE(variablesMap.add("container", "blaat"));
REQUIRE(variablesMap.add("targets", "memory"));
THEN_WHEN("We apply the plugin") {
bool result =
plugin->apply(core::Task(), variablesMap, patterns);
THEN_CHECK("The call must succeed") { REQUIRE(result); }
THEN_CHECK(
"The executor should have been called at least once") {
REQUIRE(nbOfRegisteredTasks +
memory.getExecutions().size() >
0U);
}
}
});
execHelper::plugins::ExecutePlugin::popPlugins();
execHelper::plugins::ExecutePlugin::popSettingsNode();
execHelper::plugins::ExecutePlugin::popPatterns();
execHelper::plugins::ExecutePlugin::popFleetingOptions();
}
SCENARIO("A plugin must not alter the arguments before a given task") {
FleetingOptionsStub options;
options.m_commands.push_back("memory");
execHelper::plugins::ExecutePlugin::push(
gsl::not_null<FleetingOptionsInterface*>(&options));
Patterns patterns = {Pattern(std::string(patternKey), {"memory"})};
execHelper::plugins::ExecutePlugin::push(Patterns(patterns));
execHelper::plugins::ExecutePlugin::push(SettingsNode("test"));
execHelper::plugins::ExecutePlugin::push(getPlugins());
propertyTest(
"A plugin must not alter the arguments already in a given task",
[&patterns](std::shared_ptr<const Plugin>&& plugin, const Task& task) {
RC_PRE(!task.getTask().empty());
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
auto variablesMap = plugin->getVariablesMap(FleetingOptionsStub());
REQUIRE(variablesMap.add("command-line", "blaat"));
REQUIRE(variablesMap.add("build-command", "memory"));
REQUIRE(variablesMap.add("run-command", "memory"));
REQUIRE(variablesMap.add("container", "blaat"));
REQUIRE(variablesMap.add("targets", "memory"));
MemoryHandler memory;
THEN_WHEN("We apply the plugin") {
bool result = plugin->apply(task, variablesMap, patterns);
THEN_CHECK("The call must succeed") { REQUIRE(result); }
Tasks executedTasks = executor.getExecutedTasks();
auto memories = MemoryHandler::getExecutions();
std::transform(memories.begin(), memories.end(),
std::back_inserter(executedTasks),
[](const auto& mem) { return mem.task; });
THEN_CHECK("The arguments before the task must remain") {
auto task_remains = std::any_of(
executedTasks.begin(), executedTasks.end(),
[&task](const auto& executedTask) {
// Check that any of the executed tasks starts with the content of task
auto actual = executedTask.getTask().begin();
return std::all_of(
task.getTask().begin(), task.getTask().end(),
[&actual](const std::string& expected) {
return (*actual++ == expected);
});
});
REQUIRE(task_remains);
}
}
});
execHelper::plugins::ExecutePlugin::popPlugins();
execHelper::plugins::ExecutePlugin::popSettingsNode();
execHelper::plugins::ExecutePlugin::popPatterns();
execHelper::plugins::ExecutePlugin::popFleetingOptions();
}
SCENARIO("Print the plugin summary", "[generic-plugin][success]") {
FleetingOptionsStub options;
options.m_commands.push_back("memory");
execHelper::plugins::ExecutePlugin::push(
gsl::not_null<FleetingOptionsInterface*>(&options));
Patterns patterns = {Pattern(std::string(patternKey), {"memory"})};
execHelper::plugins::ExecutePlugin::push(Patterns(patterns));
execHelper::plugins::ExecutePlugin::push(SettingsNode("test"));
execHelper::plugins::ExecutePlugin::push(getPlugins());
propertyTest("A plugin", [](std::shared_ptr<const Plugin>&& plugin) {
WHEN("We request the summary of the plugin") {
auto summary = plugin->summary();
THEN("The summary must not be empty") { REQUIRE(!summary.empty()); }
}
});
execHelper::plugins::ExecutePlugin::popPlugins();
execHelper::plugins::ExecutePlugin::popSettingsNode();
execHelper::plugins::ExecutePlugin::popPatterns();
execHelper::plugins::ExecutePlugin::popFleetingOptions();
}
} // namespace execHelper::plugins::test
<file_sep>#ifndef GENERATE_RANDOM_INCLUDE
#define GENERATE_RANDOM_INCLUDE
#include <random>
#include <string>
namespace execHelper {
namespace test {
namespace baseUtils {
template <typename T, typename Engine>
T generateRandomInt(size_t min, size_t max, Engine& engine) noexcept {
std::uniform_int_distribution<> dis(min, max);
return uint8_t(dis(engine));
}
template <typename T> T generateRandomInt(size_t min, size_t max) noexcept {
static std::mt19937 gen(0);
return generateRandomInt<T, std::mt19937>(min, max, gen);
}
char generateRandomChar() noexcept;
std::vector<char> generateRandomChar(size_t length) noexcept;
std::string generateRandomString(size_t length) noexcept;
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* GENERATE_RANDOM_INCLUDE */
<file_sep>import pytest
from pytest_bdd import scenarios
from scenarios_run import *
scenarios('../feature/execution-order/execution-order.feature', example_converters=dict(command = str, nb_of_times = int))
<file_sep>#ifndef INDIRECT_STATEMENT_INCLUDE
#define INDIRECT_STATEMENT_INCLUDE
#include <memory>
#include <vector>
#include "statement.h"
namespace execHelper {
namespace test {
namespace baseUtils {
using Statements = std::vector<std::shared_ptr<Statement>>;
class IndirectStatement : public Statement {
public:
IndirectStatement(StatementKey key, Statements initialStatements = {});
virtual ~IndirectStatement() = default;
void add(std::shared_ptr<Statement> statement) noexcept;
unsigned int getNumberOfExecutions() const noexcept override;
void resetExecutions() noexcept override;
StatementKey getKey() const noexcept override;
void write(gsl::not_null<YamlWriter*> yaml,
const std::string& command) const noexcept override;
private:
StatementKey m_key;
Statements m_statements;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* INDIRECT_STATEMENT_INCLUDE */
<file_sep>#include "log.h"
#include <boost/log/utility/setup/common_attributes.hpp>
#include <boost/log/utility/setup/formatter_parser.hpp>
#include "assertions.h"
using std::make_unique;
namespace execHelper {
namespace log {
LogInit::LogInit() noexcept { init(std::clog); }
LogInit::LogInit(std::ostream& logStream) noexcept { init(logStream); }
LogInit::~LogInit() {
m_consoleLogger.reset();
} // NOLINT(fuchsia-default-arguments-calls)
void LogInit::init(std::ostream& logStream) noexcept {
boost::log::add_common_attributes();
boost::log::register_simple_formatter_factory<LogLevel, char>("Severity");
m_consoleLogger = make_unique<ConsoleLogger>(logStream);
}
void LogInit::setSeverity(const Channel& channel, LogLevel severity) {
m_consoleLogger->setSeverity(channel, severity);
}
} // namespace log
namespace color {
auto operator<<(std::ostream& os, const Modifier& mod) -> std::ostream& {
return os << "\033[" << mod.code << "m";
}
auto operator<<(std::wostream& os, const Modifier& mod) -> std::wostream& {
return os << "\033[" << mod.code << "m";
}
} // namespace color
} // namespace execHelper
<file_sep>set(MODULE_NAME log)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/log.cpp
src/logLevel.cpp
src/logger.cpp
src/consoleLogger.cpp
)
set(DEPENDENCIES
gsl
boost-log
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/log)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>#include "yamlWrapper.h"
#include <iostream>
#include <vector>
#include <yaml-cpp/yaml.h>
#include "log/assertions.h"
#include "logger.h"
using std::initializer_list;
using std::string;
using execHelper::config::Path;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValue;
namespace execHelper::yaml {
YamlWrapper::YamlWrapper(const Path& file)
: m_node(YAML::LoadFile(file.string())) {
;
}
YamlWrapper::YamlWrapper(const string& yamlConfig)
: m_node(YAML::Load(yamlConfig)) {
;
}
YamlWrapper::YamlWrapper(const YamlWrapper& other)
: m_node(Clone(other.m_node)) {
;
}
YamlWrapper::YamlWrapper(YamlWrapper&& other) noexcept
: m_node(Clone(other.m_node)) {
;
}
auto YamlWrapper::operator=(const YamlWrapper& other) -> YamlWrapper& {
if(this != &other) {
m_node = Clone(other.m_node);
}
return *this;
}
auto YamlWrapper::operator=(YamlWrapper&& other) noexcept -> YamlWrapper& {
swap(other);
return *this;
}
void YamlWrapper::swap(const YamlWrapper& other) noexcept {
try {
m_node = Clone(other.m_node);
} catch(const YAML::InvalidNode&) {
LOG(fatal) << "Swap failed";
expectsMessage(false, "Should not get here");
}
}
auto YamlWrapper::getSubNode(
const std::initializer_list<std::string>& keys) const -> YAML::Node {
return std::accumulate(
keys.begin(), keys.end(), Clone(m_node),
[](const auto& node, const auto& key) { return node[key]; });
}
auto YamlWrapper::getTree(const initializer_list<string>& keys,
SettingsNode* settings) const noexcept -> bool {
try {
const YAML::Node& node = getSubNode(keys);
if(node.size() == 0 || node.IsNull()) {
return false;
}
return getTree(node, settings);
} catch(YAML::Exception& e) {
LOG(error) << "YAML parser threw error: " << e.what();
return false;
}
}
auto YamlWrapper::getTree(const YAML::Node& rootNode,
SettingsNode* settings) noexcept -> bool {
YAML::Node node = Clone(rootNode);
return getSubTree(node, settings, {});
}
auto YamlWrapper::getSubTree(const YAML::Node& node, SettingsNode* yamlNode,
const SettingsKeys& keys) noexcept -> bool {
YAML::NodeType::value type = YAML::NodeType::Null;
try {
type = node.Type();
} catch(const YAML::InvalidNode&) {
expectsMessage(false, "Should not get here");
LOG(error) << "Using an invalid YAML node";
return false;
}
switch(type) {
case YAML::NodeType::Null:
case YAML::NodeType::Undefined:
break;
case YAML::NodeType::Scalar:
try {
if(!yamlNode->add(keys, node.as<string>())) {
LOG(warning) << "Failed to add key '" << keys.back() << "'";
}
} catch(const YAML::TypedBadConversion<string>&) {
return false;
} catch(const YAML::InvalidNode&) {
return false;
}
break;
case YAML::NodeType::Map:
for(const auto& element : node) {
SettingsValue key;
try {
key = element.first.as<string>();
} catch(const YAML::TypedBadConversion<string>&) {
return false;
} catch(const YAML::InvalidNode&) {
return false;
}
if(!yamlNode->add(keys, key)) {
LOG(warning) << "Failed to add key '" << key << "'";
}
SettingsKeys newKeys = keys;
newKeys.push_back(key);
if(!YamlWrapper::getSubTree(element.second, yamlNode, newKeys)) {
return false;
}
}
break;
case YAML::NodeType::Sequence:
if(!std::all_of(node.begin(), node.end(),
[&yamlNode, &keys](const auto& element) {
return YamlWrapper::getSubTree(element, yamlNode,
keys);
})) {
return false;
}
break;
}
return true;
}
} // namespace execHelper::yaml
<file_sep>#ifndef __PMD_H__
#define __PMD_H__
#include "plugin.h"
namespace execHelper {
namespace plugins {
/**
* \brief Implements the PMD plugin
*/
class Pmd : public Plugin {
public:
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
};
} // namespace plugins
} // namespace execHelper
#endif /* __PMD_H__ */
<file_sep>cmake_minimum_required(VERSION 3.0)
project(cmake-example CXX)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
set(SOURCES
src/hello.cpp
src/world.cpp
)
add_executable(hello src/hello.cpp)
add_executable(world src/world.cpp)
install(TARGETS hello world DESTINATION bin COMPONENT runtime)
<file_sep>set(MODULE_NAME test-utils)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/utils.cpp
)
set(DEPENDENCIES
filesystem
boost-program-options
log
core
plugins
test-base-utils
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/utils)
target_include_directories(${LIBRARY_NAME} PUBLIC ../stubs include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>#ifndef COMMON_GENERATORS_INCLUDE
#define COMMON_GENERATORS_INCLUDE
#include <filesystem>
#include <optional>
#include "rapidcheck.h"
namespace rc {
template <> struct Arbitrary<std::filesystem::path> {
static Gen<std::filesystem::path> arbitrary() {
// TODO: Make this more challenging
return gen::just(std::filesystem::temp_directory_path());
}
};
template <typename T> struct Arbitrary<std::optional<T>> {
static Gen<std::optional<T>> arbitrary() {
return gen::oneOf(
gen::construct<std::optional<
T>>(), // Create optional without content (std::nullopt)
gen::construct<std::optional<T>>(
gen::arbitrary<T>()) // Create optional with content
);
}
};
} // namespace rc
#endif /* COMMON_GENERATORS_INCLUDE */
<file_sep>task:add_args({'cmake'})
local mode = one(config['mode']) or 'generate'
if mode ~= 'generate' and mode ~= 'build' and mode ~= 'install' then
user_feedback_error('You must define a valid mode! Options are: generate, build or install.')
input_error('You must define a valid mode!')
end
if mode == 'generate' then
task:add_args({'-S', one(config['source-dir']) or '.'})
task:add_args({'-B', one(config['build-dir']) or '.'})
local generator = one(config['generator'])
if generator then
task:add_args({'-G', generator})
end
local defines = config['defines']
if defines then
for k, v in pairs(defines) do
task:add_args({'-D', '"' .. k .. '=' .. v .. '"'})
end
end
task:add_args(get_verbose('--log-level=VERBOSE'))
end
if mode == 'build' then
task:add_args({'--build', one(config['build-dir']) or '.'})
local target = one(config['target'])
if target then
task:add_args({'--target', target})
end
local configuration = one(config['configuration'])
if configuration then
task:add_args({'--config', configuration})
end
task:add_args({'--parallel', one(config['jobs']) or jobs})
task:add_args(get_verbose('--verbose'))
end
if mode == 'install' then
task:add_args({'--install', one(config['build-dir']) or '.'})
local configuration = one(config['configuration'])
if configuration then
task:add_args({'--config', configuration})
end
local prefix = one(config['prefix'])
if prefix then
task:add_args({'--prefix', prefix})
end
local component = one(config['component'])
if component then
task:add_args({'--component', component})
end
task:add_args(get_verbose('--verbose'))
end
task:add_args(get_commandline())
register_task(task)
<file_sep>#ifndef ENVP_INCLUDES
#define ENVP_INCLUDES
#include "environment.h"
namespace execHelper::config {
/**
* \brief Wrapper for the envp argument
*/
class Envp {
public:
/**
* Create an object from the contents of an environment collection
*
* \param[in] env The environment collection to take the content from
*/
explicit Envp(const EnvironmentCollection& env) noexcept;
/*! @copydoc Argv::Argv(const Argv&)
*/
Envp(const Envp& other) noexcept;
/*! @copydoc Argv::Argv(Argv&&)
*/
Envp(Envp&& other) noexcept;
~Envp() noexcept;
/*! @copydoc Argv::operator=(const Argv&)
*/
auto operator=(const Envp& other) noexcept -> Envp&;
/*! @copydoc Argv::operator=(Argv&&)
*/
auto operator=(Envp&& other) noexcept -> Envp&;
/*! @copydoc Argv::swap(Argv&)
*/
void swap(Envp& other) noexcept;
/**
* Returns the size of the collection
*
* \returns The size of the collection
*/
[[nodiscard]] auto size() const noexcept -> size_t;
/**
* Clears the current content of the collection
*/
void clear() noexcept;
/**
* Get the envp pointer as an array of C-style strings. The array is
* delimited by a nullptr.
*
* \returns A pointer to an array of pointers to environment variables
*/
[[nodiscard]] auto getEnvp() noexcept -> char**;
/*! @copydoc getEnvp()
*/
[[nodiscard]] auto getEnvp() const noexcept -> const char* const*;
private:
using Envp_t = std::vector<char*>;
/**
* Create a deep copy
*
* \param[in] other The other object to copy
*/
void deepCopy(const Envp& other) noexcept;
Envp_t m_envp;
};
auto operator<<(std::ostream& os, const Envp& envp) noexcept -> std::ostream&;
} // namespace execHelper::config
#endif /* ENVP_INCLUDES */
<file_sep>#ifndef ASSERT_INCLUDE
#define ASSERT_INCLUDE
#ifdef TERMINATE_ON_ASSERT_FAILURE
#include <iostream>
#include <string>
namespace execHelper {
namespace log {
static inline void assertHelper(bool cond,
const std::string& message) noexcept {
if(!cond) {
std::cerr << message << std::endl;
std::terminate();
}
}
} // namespace log
} // namespace execHelper
#define assertMessage(cond, prefix, message) \
do { \
::execHelper::log::assertHelper(cond, \
std::string(__FILE__) \
.append(":") \
.append(std::to_string(__LINE__)) \
.append(":0 ") \
.append(prefix) \
.append(": ") \
.append(message)); \
} while(false);
/**
* Checks that an argument does not violate certain conditions (nominal
* programming style)
*/
#define expectsMessage(cond, message) \
assertMessage(cond, "Precondition violated", message);
#define expects(cond) expectsMessage(cond, #cond);
/**
* Checks that an invariant still holds
*/
#define ensuresMessage(cond, message) \
assertMessage(cond, "Invariant violated", message);
#define ensures(cond) ensuresMessage(cond, #cond);
#else
#define expectsMessage(cond, message)
#define expects(cond)
#define ensuresMessage(cond, message)
#define ensures(cond)
#endif
#endif /* ASSERT_INCLUDE */
<file_sep>#include "valgrind.h"
#include <string>
#include <gsl/string_span>
#include "config/commandLineOptions.h"
#include "config/fleetingOptionsInterface.h"
#include "config/variablesMap.h"
#include "core/patterns.h"
#include "core/task.h"
#include "log/assertions.h"
#include "commandLine.h"
#include "executePlugin.h"
#include "logger.h"
#include "pluginUtils.h"
#include "verbosity.h"
using std::string;
using gsl::czstring;
using execHelper::config::CommandCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
namespace {
const czstring<> PLUGIN_NAME = "valgrind";
using RunCommand = CommandCollection;
const czstring<> RUN_COMMAND_KEY = "run-command";
using Tool = string;
const czstring<> TOOL_KEY = "tool";
} // namespace
namespace execHelper::plugins {
auto Valgrind::getVariablesMap(const FleetingOptionsInterface& fleetingOptions)
const noexcept -> VariablesMap {
VariablesMap defaults(PLUGIN_NAME);
if(!defaults.add(COMMAND_LINE_KEY, CommandLineArgs())) {
LOG(error) << "Failed to add key '" << COMMAND_LINE_KEY << "'";
}
const auto* const verbosity = fleetingOptions.getVerbosity() ? "yes" : "no";
if(!defaults.add(VERBOSITY_KEY, verbosity)) {
LOG(error) << "Failed to add key '" << VERBOSITY_KEY << "'";
}
return defaults;
}
auto Valgrind::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept -> bool {
task.append(PLUGIN_NAME);
auto runCommand = variables.get<RunCommand>(RUN_COMMAND_KEY);
if(runCommand == std::nullopt) {
user_feedback_error("Could not find the '"
<< RUN_COMMAND_KEY << "' setting in the '"
<< PLUGIN_NAME << "' settings");
return false;
}
auto tool = variables.get<Tool>(TOOL_KEY);
if(tool) {
task.append(string("--tool=").append(*tool));
}
if(*(variables.get<Verbosity>(VERBOSITY_KEY))) {
task.append("--verbose");
}
ensures(variables.get<CommandLineArgs>(COMMAND_LINE_KEY) != std::nullopt);
task.append(*(variables.get<CommandLineArgs>(COMMAND_LINE_KEY)));
for(const auto& combination : makePatternPermutator(patterns)) {
Task newTask = replacePatternCombinations(task, combination);
ExecutePlugin buildExecutePlugin(*runCommand);
if(!buildExecutePlugin.apply(newTask, variables, patterns)) {
return false;
}
}
return true;
}
auto Valgrind::summary() const noexcept -> std::string {
return "Valgrind (internal)";
}
} // namespace execHelper::plugins
<file_sep>#ifndef PATH_MANIPULATION_INCLUDE
#define PATH_MANIPULATION_INCLUDE
#include <optional>
#include "environment.h"
#include "path.h"
namespace execHelper::config {
/**
* Returns a list of the given path and all of its parents in reverse order.
*
* \param[in] path The path to start listing from
* \returns A list of the given path and its parent paths in reverse order.
*/
[[nodiscard]] auto getAllParentDirectories(Path path) noexcept -> Paths;
/**
* Returns the path to the home directory if it exists
*
* \returns The path to the home directory if it exists
* boost::none otherwise
*/
[[nodiscard]] auto getHomeDirectory(const EnvironmentCollection& env) noexcept
-> std::optional<Path>;
} // namespace execHelper::config
#endif /* PATH_MANIPULATION_INCLUDE */
<file_sep>#include "testCommand.h"
#include <iostream>
#include "plugins.h"
using std::cerr;
using std::endl;
using std::shared_ptr;
using gsl::not_null;
namespace execHelper {
namespace test {
namespace baseUtils {
TestCommand::TestCommand(std::string commandKey,
Statements initialStatements) noexcept
: m_command(std::move(commandKey)),
m_statements(std::move(initialStatements)) {
;
}
shared_ptr<Statement> TestCommand::operator[](size_t index) const noexcept {
return m_statements[index];
}
Statements::const_iterator TestCommand::begin() const noexcept {
return m_statements.begin();
}
Statements::const_iterator TestCommand::end() const noexcept {
return m_statements.end();
}
size_t TestCommand::size() const noexcept { return m_statements.size(); }
std::string TestCommand::get() const noexcept { return m_command; }
unsigned int TestCommand::getNbOfStatements() const noexcept {
return m_statements.size();
}
unsigned int TestCommand::getNumberOfStatementExecutions() const noexcept {
return accumulate(m_statements.begin(), m_statements.end(), 0U,
[](const unsigned int& subTotal,
const shared_ptr<Statement>& statement) {
return subTotal + statement->getNumberOfExecutions();
});
}
void TestCommand::add(std::shared_ptr<Statement> statement) noexcept {
m_statements.emplace_back(statement);
}
void TestCommand::resetExecutions() noexcept {
for(auto& statement : m_statements) {
statement->resetExecutions();
}
}
void TestCommand::write(not_null<YamlWriter*> yaml) const noexcept {
try {
(*yaml)["commands"][m_command] = "Execute the command";
for(const auto& statement : m_statements) {
(*yaml)[m_command].push_back(statement->getKey());
statement->write(yaml, m_command);
}
} catch(const YAML::InvalidNode&) {
cerr << "The given YAML node is invalid" << endl;
assert(false);
}
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>set(MODULE_NAME core)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/task.cpp
src/immediateExecutor.cpp
src/reportingExecutor.cpp
src/patterns.cpp
src/posixShell.cpp
src/logger.cpp
)
set(DEPENDENCIES
filesystem
boost-filesystem
boost-program-options
log
config
yaml
)
add_definitions(
-DBOOST_MOVE_USE_STANDARD_LIBRARY_MOVE
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/core)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-custom-plugins` (5) for the available plugins and their configuration options.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
<file_sep>#include <filesystem>
#include <memory>
#include <string>
#include <vector>
#include <gsl/string_span>
#include "commander/commander.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "log/log.h"
#include "plugins/commandPlugin.h"
#include "plugins/memory.h"
#include "plugins/pluginUtils.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
using std::shared_ptr;
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::CommandCollection;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Path;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::CommandPlugin;
using execHelper::plugins::getPatternsKey;
using execHelper::plugins::Memory;
using execHelper::plugins::MemoryHandler;
using execHelper::plugins::Plugin;
using execHelper::plugins::Plugins;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::utils::getPredefinedPatterns;
namespace filesystem = std::filesystem;
namespace {
const czstring<> COMMANDS_KEY = "commands";
const czstring<> MEMORY_KEY = "memory";
} // namespace
namespace execHelper::commander::test {
SCENARIO("Basic test the commander", "[commander]") {
MAKE_COMBINATIONS("Of different inputs for the commander") {
FleetingOptionsStub fleetingOptions;
ExecutorStub::TaskQueue expectedTasks;
SettingsNode settings("test");
Patterns patterns;
Patterns expectedPatterns = getPredefinedPatterns();
EnvironmentCollection env;
Path workingDirectory = filesystem::current_path();
MemoryHandler memory;
Commander commander;
const Command command1("command1");
fleetingOptions.m_commands.push_back(command1);
REQUIRE(settings.add(COMMANDS_KEY, command1));
REQUIRE(settings.add(command1, MEMORY_KEY));
Task expectedTask;
expectedTask.setWorkingDirectory(workingDirectory);
expectedTasks.emplace_back(expectedTask);
Plugins plugins = {
{COMMANDS_KEY, shared_ptr<Plugin>(new CommandPlugin())},
{MEMORY_KEY, shared_ptr<Plugin>(new Memory())}};
COMBINATIONS("Add multiple commands") {
const CommandCollection commands(
{"multiple-command1", "multiple-command2"});
for(const auto& command : commands) {
fleetingOptions.m_commands.push_back(command);
REQUIRE(settings.add(COMMANDS_KEY, command));
REQUIRE(settings.add(command, MEMORY_KEY));
Task expectedTask;
expectedTask.setWorkingDirectory(workingDirectory);
expectedTasks.emplace_back(expectedTask);
}
}
COMBINATIONS("Add patterns") {
patterns.emplace_back(Pattern("pattern1", {"value1a", "value1b"}));
patterns.emplace_back(Pattern("pattern2", {"value2a", "value2b"}));
patterns.emplace_back(Pattern("pattern3", {"value3a", "value3b"}));
}
COMBINATIONS("Change working directory") {
workingDirectory = "/tmp";
for(auto& expectedTask : expectedTasks) {
expectedTask.setWorkingDirectory(workingDirectory);
}
}
COMBINATIONS("Set environment") {
env.emplace("ENV1", "VALUE1");
env.emplace("ENV2", "VALUE2");
for(auto& expectedTask : expectedTasks) {
expectedTask.setEnvironment(env);
}
}
expectedPatterns.insert(expectedPatterns.end(), patterns.begin(),
patterns.end());
for(const auto& pattern : expectedPatterns) {
REQUIRE(
settings.add({MEMORY_KEY, getPatternsKey()}, pattern.getKey()));
}
THEN_WHEN("We apply the configuration and run the commander") {
bool returnCode =
commander.run(fleetingOptions, settings, patterns,
workingDirectory, env, Plugins{plugins});
THEN_CHECK("It must succeed") { REQUIRE(returnCode); }
THEN_CHECK("We should get the tasks executed") {
const Memory::Memories& memories =
plugins::MemoryHandler::getExecutions();
REQUIRE(memories.size() == expectedTasks.size());
auto expectedTask = expectedTasks.begin();
for(auto memory = memories.begin(); memory != memories.end();
++expectedTask, ++memory) {
REQUIRE(memory->task == *expectedTask);
REQUIRE(memory->patterns == expectedPatterns);
}
}
}
}
}
SCENARIO(
"Test what happens when an unknown command is passed on the command line",
"[commander]") {
GIVEN("A fully configured commander and a configuration file and a command "
"line with an invalid command") {
string command1("command1");
string command2("command2");
vector<string> commands({command1, command2});
FleetingOptionsStub fleetingOptions;
fleetingOptions.m_commands = {"command3"};
SettingsNode settings("test");
REQUIRE(settings.add(COMMANDS_KEY, commands));
REQUIRE(settings.add(command1, MEMORY_KEY));
REQUIRE(settings.add(command2, MEMORY_KEY));
Commander commander;
WHEN("We apply the configuration and run the commander") {
THEN("It should fail") {
REQUIRE_FALSE(
commander.run(fleetingOptions, settings, Patterns(),
filesystem::current_path(),
EnvironmentCollection(), Plugins()));
}
}
}
}
SCENARIO("Test when no commands are passed", "[commander]") {
GIVEN("A fully configured commander and no command set") {
string command1("command1");
string command2("command2");
vector<string> commands({command1, command2});
FleetingOptionsStub fleetingOptions;
SettingsNode settings("test");
REQUIRE(settings.add(COMMANDS_KEY, commands));
REQUIRE(settings.add(command1, MEMORY_KEY));
REQUIRE(settings.add(command2, MEMORY_KEY));
Commander commander;
Plugins plugins = {
{COMMANDS_KEY, shared_ptr<Plugin>(new CommandPlugin())},
};
WHEN("We apply the configuration and run the commander") {
THEN("It should fail") {
REQUIRE_FALSE(
commander.run(fleetingOptions, settings, Patterns(),
filesystem::current_path(),
EnvironmentCollection(), Plugins{plugins}));
}
}
}
}
} // namespace execHelper::commander::test
<file_sep>#include "configFileWriter.h"
#include <fstream>
using std::ofstream;
using std::string;
namespace execHelper {
namespace test {
namespace baseUtils {
ConfigFileWriter::ConfigFileWriter() noexcept
: m_file("exec-helper.config.%%%%") {
;
}
Path ConfigFileWriter::getPath() const noexcept { return m_file.getPath(); }
string ConfigFileWriter::getFilename() const noexcept {
return m_file.getFilename();
}
string ConfigFileWriter::getDirectory() const noexcept {
return m_file.getParentDirectory();
}
bool ConfigFileWriter::write(const YamlWriter& yaml) noexcept {
ofstream configStream;
configStream.open(m_file.getPath(), ofstream::out | ofstream::trunc);
configStream << yaml;
configStream.close();
return configStream.good();
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>set(MODULE_NAME Gitchangelog)
set(EXE_NAME gitchangelog)
if(NOT ${MODULE_NAME}_FOUND)
find_program(${MODULE_NAME} NAMES ${EXE_NAME} PATHS /bin /usr/bin /usr/local/bin)
mark_as_advanced(${MODULE_NAME})
if(${MODULE_NAME} MATCHES ".*-NOTFOUND")
MESSAGE(STATUS "Could NOT find " ${MODULE_NAME})
set(${MODULE_NAME}_FOUND FALSE)
unset(${MODULE_NAME})
else()
MESSAGE(STATUS "Found " ${MODULE_NAME} ": " ${${MODULE_NAME}})
set(${MODULE_NAME}_FOUND TRUE)
endif()
endif()
<file_sep>#ifndef CAST_IMPL_INCLUDE
#define CAST_IMPL_INCLUDE
#include <optional>
#include <vector>
#include <boost/lexical_cast.hpp>
#include "log/log.h"
#include "cast.h"
#include "path.h"
namespace execHelper {
namespace config {
namespace detail {
/**
* \brief Partial specialization for casting the given type U to an optional of
* type bool
*/
template <typename U> class Cast<bool, U> {
public:
/*! @copydoc Cast<T,U>::cast(const U& values)
*/
static std::optional<bool> cast(const U& values) noexcept;
};
/**
* \brief Partial specialization for casting the given type U to an optional of
* type vector<T>
*/
template <typename T, typename U> class Cast<std::vector<T>, U> {
public:
/*! \copydoc Cast<T,U>::cast(const U& values)
*/
static std::optional<std::vector<T>> cast(const U& values) noexcept;
};
/**
* \brief Partial specialization for casting the given type U to an optional of
* type Path
*/
template <typename U> class Cast<Path, U> {
public:
/*! \copydoc Cast<T,U>::cast(const U& values)
*/
static std::optional<Path> cast(const U& values) noexcept;
};
template <typename T, typename U>
inline std::optional<T> Cast<T, U>::cast(const U& values) noexcept {
if(values.size() == 0U) {
return std::nullopt;
}
try {
return std::make_optional(boost::lexical_cast<T>(values.back()));
} catch(boost::bad_lexical_cast& e) {
user_feedback_error("Internal error");
return std::nullopt;
}
}
template <typename U>
inline std::optional<bool> Cast<bool, U>::cast(const U& values) noexcept {
if(values.size() == 0U) {
return std::nullopt;
}
return (values.back() == "yes" || values.back() == "1" ||
values.back() == "true");
}
template <typename T, typename U>
inline std::optional<std::vector<T>>
Cast<std::vector<T>, U>::cast(const U& values) noexcept {
std::vector<T> result;
result.reserve(values.size());
for(const auto& value : values) {
result.push_back(boost::lexical_cast<T>(value));
}
return result;
}
template <typename U>
inline std::optional<Path> Cast<Path, U>::cast(const U& values) noexcept {
auto stringValue = Cast<std::string, U>::cast(values);
if(!stringValue) {
return std::nullopt;
}
return Path(stringValue.value());
}
} // namespace detail
} // namespace config
} // namespace execHelper
#endif /* CAST_IMPL_INCLUDE */
<file_sep>#ifndef LUA_PLUGIN_INCLUDE
#define LUA_PLUGIN_INCLUDE
#include "config/commandLineOptions.h"
#include "config/path.h"
#include "plugin.h"
namespace execHelper::plugins {
/**
* \brief Plugin for running a lua plugin as an exec-helper plugin
*/
class LuaPlugin : public Plugin {
public:
/**
* Create a lua plugin for the given script
*
* @param[in] script Absolute path to the lua script to run
*/
explicit LuaPlugin(config::Path script) noexcept;
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& config,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
private:
config::Path m_script; //!< Path to the lua script
};
} // namespace execHelper::plugins
#endif /* LUA_PLUGIN_INCLUDE */
<file_sep>from typing import List
import pytest
from pytest_bdd import scenarios, given, when, then
from scenarios_run import *
scenarios('../feature/cmd-args', example_converters=dict(command_line = CommandLineArgs, command = str, pattern = PatternType, nb_of_times = int, return_code = int))
@then('stdout should contain <command>')
def stdout_plugin_id(run_environment, command):
stdout_contains(run_environment, command)
@then('stdout should contain the full line <command>')
def stdout_plugin_regex_command(run_environment, command):
stdout_contains_regex(run_environment, f'^{command}$')
@then('stdout should contain the full line <pattern>')
def stdout_plugin_regex_pattern(run_environment, pattern):
for option in pattern.long_options:
stdout_contains_regex(run_environment, f'^--{option}$')
<file_sep>task:add_args({'scons'})
task:add_args({'--directory', one(config['build-dir']) or '.'})
task:add_args(get_verbose('--debug=explain'))
task:add_args({'--jobs', one(config['jobs']) or jobs})
task:add_args(get_commandline())
register_task(task)
<file_sep>ARG DOCKER_TAG=latest
FROM bverhagen/awesome-aur-wrapper:${DOCKER_TAG}
LABEL maintainer="<EMAIL>"
RUN sudo pacman -Sy --needed --noconfirm archlinux-keyring && sudo pacman -Scc --noconfirm # Fixes some pacman keyring issues
RUN sudo pacman -Sy --needed --noconfirm boost boost-libs yaml-cpp gcc clang cmake make libffi ninja doxygen python3 pkg-config catch2 curl git fakeroot patch file python-sphinx python-sphinx_rtd_theme && sudo pacman -Scc --noconfirm
RUN yay -S --needed --noconfirm --noprovides rapidcheck microsoft-gsl-git python-gitchangelog && sudo pacman -Scc --noconfirm && sudo rm -rf /.cache && sudo rm -rf /tmp/*
<file_sep>task:add_args({"docker"})
local mode = one(config['mode']) or 'exec'
local interactive = '--interactive=' .. (one(config['interactive']) == 'yes' and 'true' or 'false')
local tty = '--tty=' .. (one(config['tty']) == 'yes' and 'true' or 'false')
task:add_args({mode, interactive, tty})
local privileged = one(config['privileged'])
if privileged == 'yes' then
task:add_args({'--privileged'})
end
local user = one(config['user'])
if user then
task:add_args({'--user="' .. user .. '"'})
end
local env = config['env']
if env then
for key,value in pairs(config['env']) do
task:add_args({'"--env=' .. key .. '=' .. value .. '"' })
end
end
task:add_args(get_commandline())
if mode == 'run' then
local volumes = list(config['volumes'])
if volumes then
for _,volume in ipairs(volumes) do
task:add_args({'"--volume=' .. volume .. '"' })
end
end
local image = one(config['image'])
if image == nil then
user_feedback_error('You must define an image when you define the docker plugin in "run" mode!')
input_error('You must define an image when you define the docker plugin in "run" mode!')
end
task:add_args({image})
end
if mode == 'exec' then
local container = one(config['container'])
if container == nil then
user_feedback_error('You must define an container when you define the docker plugin in "exec" mode!')
input_error('You must define an container when you define the docker plugin in "exec" mode!')
end
task:add_args({container})
end
local targets = list(config['targets'])
if targets then
run_target(task, targets)
else
register_task(task)
end
<file_sep>#include "execution.h"
#include <cassert>
#include <fcntl.h>
#include <filesystem>
#include <sys/stat.h>
#include <sys/wait.h>
#include <unistd.h>
#include <boost/filesystem.hpp>
#include <boost/process.hpp>
#include "base-utils/executionContent.h"
#include "base-utils/path.h"
#include "unittest/logger.h"
namespace bp = boost::process;
namespace filesystem = boost::filesystem;
using boost::filesystem::exists;
using boost::process::search_path;
using boost::process::system;
using boost::system::error_code;
using execHelper::test::baseUtils::Path;
using execHelper::test::baseUtils::ReturnCode;
using execHelper::test::baseUtils::execution::CommandLine;
namespace execHelper {
namespace test {
namespace baseUtils {
namespace execution {
int execute(const CommandLine& commandLine, const Path& workingDir) noexcept {
filesystem::path binary = commandLine.front();
if(!filesystem::exists(binary)) {
binary = search_path(binary);
if(binary.empty()) {
LOG(error) << "Could not find binary '" << binary.native()
<< "' on this system";
assert(false); // Fix the test
}
}
CommandLine args;
args.reserve(commandLine.size() - 1);
args.insert(args.end(), commandLine.begin() + 1, commandLine.end());
return system(binary, bp::args = args,
bp::start_dir = filesystem::path(workingDir));
}
} // namespace execution
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>import json
from pathlib import Path
import re
import sys
import tempfile
import uuid
import pytest
from pytest_bdd import scenarios, given, when, then, parsers
from run_environment import RunEnvironment
from config import Config
from pattern import Pattern
from command import Command
# Conversion types
def Environment(string):
pairs = string.split(";")
return { pair.split(":")[0]: pair.split(":")[1] for pair in pairs }
def CommandLineArgs(string):
return string.split(" ")
def PatternType(string):
characteristics = json.loads(string)
key = characteristics['key']
default_values = characteristics['default_values']
long_options = []
if 'long_options' in characteristics:
long_options = characteristics['long_options']
return Pattern(key, default_values, long_options)
def add_command(config, command, return_code):
cmd = Command(command, 'command-line-command', config.directory, return_code)
config.add_command(cmd)
@given('a controlled environment')
def run_environment():
temp_directory = tempfile.gettempdir()
temp_folder = 'eh-' + str(uuid.uuid4())
root_dir = Path(f"{temp_directory}/{temp_folder}")
return RunEnvironment(root_dir)
@given('a valid configuration')
def config(run_environment):
run_environment.config = Config(run_environment.root_dir)
return run_environment.config
@given('the <command> command')
def a_command(config, command):
config.create_command(command)
return config.commands[command]
@given('the <pattern> pattern')
def a_pattern(config, pattern):
config.add_pattern(pattern)
return pattern
@when('we add the <command> that returns <return_code>')
def add_command_return_code(config, command, return_code):
add_command(config, command, return_code)
@when('we add the <command> command')
def add_simple_command(config, command):
add_command(config, command, 0)
@when('we run the <command> command')
def run_one_command(run_environment, command):
run_environment.run_application([command])
@when('we add the <command_line> as command line arguments')
def add_command_line(run_environment, command_line):
run_environment.add_commandline(command_line)
@when('we add the <command> <nb_of_times> to the command line options')
def add_command_nb_of_times(run_environment, command, nb_of_times):
run_environment.add_commandline([command] * nb_of_times)
@when('we add the <command> to the command line options')
def add_command_cli(run_environment, command):
run_environment.add_commandline([command])
@when('we call the application')
def call_no_option(run_environment):
run_environment.run_application()
@then('the call should succeed')
def call_succeeds(run_environment):
if run_environment.last_run.returncode != 0:
print(run_environment.last_run.stdout)
print(run_environment.last_run.stderr, file = sys.stderr)
raise AssertionError(f"Call was expected to succeed, but it failed with return code '{run_environment.last_run.returncode}'")
@then('the call should fail with return code <return_code>')
@then(parsers.cfparse('the call should fail with return code {return_code:Number}', extra_types=dict(Number=int)))
def call_fails(run_environment, return_code):
if run_environment.last_run.returncode != return_code:
print(run_environment.last_run.stdout)
print(run_environment.last_run.stderr, file = sys.stderr)
raise AssertionError(f"Call was expected to return code '{return_code}', but it returned '{run_environment.last_run.returncode}'")
@then(parsers.parse('stdout should contain {expected}'))
def stdout_contains(run_environment, expected):
expected = expected.strip("'")
if not expected.encode('utf-8') in run_environment.last_run.stdout:
print(run_environment.last_run.stdout, file = sys.stdout)
raise AssertionError(f"Stdout was expected to contain '{expected}', but it did not")
@then(parsers.parse('stdout should contain regex {expected}'))
def stdout_contains_regex(run_environment, expected):
expected = expected.strip("'")
regex = re.compile(expected.encode('utf-8'), re.MULTILINE)
if not regex.search(run_environment.last_run.stdout):
print(run_environment.last_run.stdout, file = sys.stdout)
raise AssertionError(f"Regex {expected} did not match stdout")
@then(parsers.parse('stdout should not contain {expected}'))
def stdout_not_contains(run_environment, expected):
expected = expected.strip("'")
if expected.encode('utf-8') in run_environment.last_run.stdout:
print(run_environment.last_run.stdout, file = sys.stdout)
raise AssertionError(f"Stdout was expected to _not_ contain '{expected}', but it did")
@then(parsers.parse('stderr should contain {expected}'))
def stderr_contains(run_environment, expected):
expected = expected.strip("'")
if not expected.encode('utf-8') in run_environment.last_run.stderr:
print(run_environment.last_run.stderr, file = sys.stderr)
raise AssertionError(f"Stderr was expected to contain '{expected}', but it did not")
@then(parsers.parse('stderr should not contain {expected}'))
def stderr_not_contains(run_environment, expected):
expected = expected.strip("'")
if expected.encode('utf-8') in run_environment.last_run.stderr:
print(run_environment.last_run.stderr, file = sys.stderr)
raise AssertionError(f"Stderr was expected to _not_ contain '{expected}', but it did")
@then('stderr should be empty')
def stderr_is_empty(run_environment):
stderr = run_environment.last_run.stderr
if stderr:
raise AssertionError(f"Stderr is expected to be empty, but contains: '{stderr}'")
@when('run the <command> command <nb_of_times> in the same statement')
def run_command_n_times(run_environment, command, nb_of_times):
args = [command for i in range(0, nb_of_times)]
run_environment.run_application(args)
@then('the <command> command should be called <nb_of_times> times')
@then(parsers.cfparse('the <command> command should be called {nb_of_times:Number} times', extra_types=dict(Number=int)))
def command_called_times(run_environment, command, nb_of_times):
assert(len(run_environment.config.commands[command].runs) == nb_of_times)
<file_sep>.. _mainpage:
Exec-helper
***********
Or How To Get Your Coffee In Peace.
|documentation| |nbsp| |build_badge| |nbsp| |coverage_badge| |nbsp| |code_quality_badge|
What
====
Exec-helper is a meta-wrapper for executing tasks on the command line.
Why
===
Exec-helper improves the main bottleneck in your development workflow: you.
It does this by:
* Reducing the number of keystrokes required to execute the same command over and over again
* Chaining multiple commands
All without sacrificing (much) flexibility or repeating useless work.
If this, together with *getting coffee in peace* is not a sufficient rationale for you, the main advantages of exec-helper over (simple) scripts or plain command line commands are:
* Easy permutation of multiple execution parameters (so-called *patterns* in exec-helper).
* Easy selection of a subset of execution parameters.
* Improved DRY: execution parameters are only changed on one spot, in stead of everywhere in your command line.
* Technology-agnostic approach: e.g. running the `exec-helper build` can build a C++ project in one directory structure and a JAVA project in another.
* Enables a self-documented workflow.
* Out of the box support for multi-valued options and default values.
* Searches for a suitable configuration in its parent folders.
* Fast to type using the `eh` alias
* Easy to find and/or list available commands using the `--help` option.
* Easy extensible with your own, first-class citizen, plugins.
* Automatic autocompletion of commands and patterns
Simple example
==============
This is a simple illustration of the concept behind exec-helper. More extensive information and examples can be found in the *.exec-helper* configuration file for this repository and in the `documentation <http://exec-helper.readthedocs.io>`_.
Use case
--------
Build a C++ project using g++ and clang++ using cmake in a *Debug* and *RelWithDebInfo* configuration
Configuration file
------------------
Copy the following to a file named '.exec-helper'::
commands:
init: Initialize build
build: Build-only + install
build-only: Build
install: Install
patterns:
COMPILER:
default-values:
- g++
- clang++
short-option: c
long-option: compiler
MODE:
default-values:
- debug
- release
short-option: m
long-option: mode
build:
- build-only
- install
init:
- command-line-command
build-only:
- make
install:
- make
command-line-command:
init:
patterns:
- COMPILER
- MODE
command-line: [ cmake, -H., "-Bbuild/{COMPILER}/{MODE}", "-DCMAKE_CXX_COMPILER={COMPILER}", "-DCMAKE_INSTALL_PREFIX=install/{COMPILER}/{MODE}", "-DCMAKE_BUILD_TYPE={MODE}"]
make:
patterns:
- COMPILER
- MODE
build-dir: "build/{COMPILER}/{MODE}"
install:
command-line: install
Example output
--------------
.. code-block:: bash
$ exec-helper --help
-h [ --help ] Produce help message
--version Print the version of this binary
-v [ --verbose ] Set verbosity
-j [ --jobs ] arg Set number of jobs to use. Default: auto
-n [ --dry-run ] Dry run exec-helper
-s [ --settings-file ] arg Set the settings file
-d [ --debug ] arg Set the log level
-z [ --command ] arg Commands to execute
-c [ --compiler ] arg Values for pattern 'compiler'
-m [ --mode ] arg Values for pattern 'mode'
Configured commands:
init Initialize build
build Build-only + install
build-only Build
install Install
$ exec-helper init build # Permutate all combinations of the default values
Executing "cmake -H. -Bbuild/g++/debug -DCMAKE_CXX_COMPILER=g++ -DCMAKE_INSTALL_PREFIX=install/g++/debug -DCMAKE_BUILD_TYPE=debug"
Executing "cmake -H. -Bbuild/g++/release -DCMAKE_CXX_COMPILER=g++ -DCMAKE_INSTALL_PREFIX=install/g++/release -DCMAKE_BUILD_TYPE=release"
Executing "cmake -H. -Bbuild/clang++/debug -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_INSTALL_PREFIX=install/clang++/debug -DCMAKE_BUILD_TYPE=debug"
Executing "cmake -H. -Bbuild/clang++/release -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_INSTALL_PREFIX=install/clang++/release -DCMAKE_BUILD_TYPE=release"
Executing "make --directory build/g++/debug --jobs 8"
Executing "make --directory build/g++/release --jobs 8"
Executing "make --directory build/clang++/debug --jobs 8"
Executing "make --directory build/clang++/release --jobs 8"
Executing "make --directory build/g++/debug --jobs 8 install"
Executing "make --directory build/g++/release --jobs 8 install"
Executing "make --directory build/clang++/debug --jobs 8 install"
Executing "make --directory build/clang++/release --jobs 8 install"
$ exec-helper build-only --compiler g++ --mode release # Only build the g++ build in release mode
Executing make --directory build/g++/release --jobs 8
$ exec-helper install --compiler g++ --mode debug RelWithDebInfo # Install a subset - even using ones not listed in the default values
Executing make --directory build/g++/debug --jobs 8 install
Executing make --directory build/g++/RelWithDebInfo --jobs 8 install
Installation
============
See :ref:`INSTALL` for more information on:
* Using one of the available packages or installers
* (Cross-)build from source
Documentation
=============
See `documentation <http://exec-helper.readthedocs.io>`_ for the latest documentation.
Usage
-----
see :ref:`exec-helper` for usage information.
Configuration
-------------
See :ref:`exec-helper-config` for information on the configuration file format.
Available plugins
-----------------
See :ref:`exec-helper-plugins` for a list of all available plugins.
Writing custom plugins
----------------------
See :ref:`exec-helper-custom-plugins` for a guide on writing your own plugins.
Code quality
============
The source code of this project is continuously analyzed by multiple tools in an attempt to catch and fix issues and bugs as quickly as possible. Released versions should have passed the analysis from the following tools:
* `AddressSanitizer <https://clang.llvm.org/docs/AddressSanitizer.html>`_ (ASan)
* `clang-format <https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html>`_
* `clang-static-analyzer <https://clang-analyzer.llvm.org>`_
* `clang-tidy <http://clang.llvm.org/extra/clang-tidy>`_
* `cppcheck <http://cppcheck.sourceforge.net>`_
* License Scanning (by Gitlab)
* `pmd <https://pmd.github.io>`_ (cpd)
* `Static Application Security Testing <https://docs.gitlab.com/ee/user/application_security/sast>`_ (SAST by Gitlab)
* `Valgrind <http://valgrind.org>`_ (memcheck)
* `UndefinedBehaviorSanitizer <https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html>`_ (UBSan)
Check the *.exec-helper* file for detailed information about how these analysis methods are configured and used. The analysis tools can be executed locally using exec-helper with this project.
.. |build_badge| image:: https://gitlab.com/bverhagen/exec-helper/badges/master/pipeline.svg
:alt: Commits
:target: https://gitlab.com/bverhagen/exec-helper/commits/master
.. |coverage_badge| image:: https://gitlab.com/bverhagen/exec-helper/badges/master/coverage.svg
:alt: Test report
:target: http://bverhagen.gitlab.io/exec-helper/coverage/index.html
.. |code_quality_badge| image:: https://api.codacy.com/project/badge/Grade/98d9b8174f0d4a8ba79adebda064093d
:alt: Code quality report
:target: https://www.codacy.com/app/bverhagen/exec-helper?utm_source=github.com&utm_medium=referral&utm_content=bverhagen/exec-helper&utm_campaign=Badge_Grade
.. |documentation| image:: https://readthedocs.org/projects/exec-helper/badge/?version=master
:target: https://exec-helper.readthedocs.io
:alt: Documentation Status
.. |nbsp| unicode:: 0xA0
:trim:
<file_sep>.. _exec-helper-plugins-execute-plugin:
Execute plugin
**************
Description
===========
The execute plugin is used for executing specific plugins or, if no associated plugin is found, following commands defined in the configuration. This plugin is mainly used by other plugins that want to execute other commands.
Mandatory settings
==================
There are no mandatory settings for this plugin.
Optional settings
=================
There are no optional settings for this plugin.
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>#include "lcov.h"
#include <string>
#include <gsl/string_span>
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "log/assertions.h"
#include "executePlugin.h"
#include "logger.h"
#include "pluginUtils.h"
using std::string;
using gsl::czstring;
using execHelper::config::CommandCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Path;
using execHelper::config::PatternCombinations;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::registerTask;
using execHelper::plugins::replacePatternCombinations;
namespace {
const czstring<> PLUGIN_NAME = "lcov";
using RunCommand = CommandCollection;
const czstring<> RUN_COMMAND = "run-command";
const czstring<> INFO_FILE_KEY = "info-file";
const czstring<> BASE_DIR_KEY = "base-directory";
const czstring<> DIR_KEY = "directory";
using ZeroCounters = bool;
const czstring<> ZERO_COUNTERS_KEY = "zero-counters";
using GenHtml = bool;
const czstring<> GEN_HTML_KEY = "gen-html";
using GenHtmlOutput = Path;
const czstring<> GEN_HTML_OUTPUT_KEY = "gen-html-output";
using GenHtmlTitle = string;
const czstring<> GEN_HTML_TITLE_KEY = "gen-html-title";
using GenHtmlCommandLine = execHelper::plugins::CommandLineArgs;
const czstring<> GEN_HTML_COMMAND_LINE_KEY = "gen-html-command-line";
const czstring<> EXCLUDES_KEY = "excludes";
[[nodiscard]] auto runTask(const Task& task,
const PatternCombinations& combination) -> bool {
Task replacedTask = replacePatternCombinations(task, combination);
return registerTask(replacedTask);
}
} // namespace
namespace execHelper::plugins {
auto Lcov::getVariablesMap(const FleetingOptionsInterface& /*fleetingOptions*/)
const noexcept -> VariablesMap {
VariablesMap defaults(PLUGIN_NAME);
if(!defaults.add(COMMAND_LINE_KEY, CommandLineArgs())) {
LOG(error) << "Failed to add key '" << COMMAND_LINE_KEY << "'";
}
if(!defaults.add(INFO_FILE_KEY, "lcov-plugin.info")) {
LOG(error) << "Failed to add key '" << INFO_FILE_KEY << "'";
}
if(!defaults.add(BASE_DIR_KEY, ".")) {
LOG(error) << "Failed to add key '" << BASE_DIR_KEY << "'";
}
if(!defaults.add(DIR_KEY, ".")) {
LOG(error) << "Failed to add key '" << DIR_KEY << "'";
}
if(!defaults.add(ZERO_COUNTERS_KEY, "no")) {
LOG(error) << "Failed to add key '" << ZERO_COUNTERS_KEY << "'";
}
if(!defaults.add(GEN_HTML_KEY, "no")) {
LOG(error) << "Failed to add key '" << GEN_HTML_KEY << "'";
}
if(!defaults.add(GEN_HTML_OUTPUT_KEY, ".")) {
LOG(error) << "Failed to add key '" << GEN_HTML_OUTPUT_KEY << "'";
}
if(!defaults.add(GEN_HTML_TITLE_KEY, "Hello")) {
LOG(error) << "Failed to add key '" << GEN_HTML_TITLE_KEY << "'";
}
if(!defaults.add(GEN_HTML_COMMAND_LINE_KEY, GenHtmlCommandLine())) {
LOG(error) << "Failed to add key '" << GEN_HTML_COMMAND_LINE_KEY << "'";
}
if(!defaults.add(EXCLUDES_KEY, Excludes())) {
LOG(error) << "Failed to add key '" << EXCLUDES_KEY << "'";
}
return defaults;
}
auto Lcov::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept -> bool {
auto runCommandOpt = variables.get<RunCommand>(RUN_COMMAND);
if(runCommandOpt == std::nullopt) {
user_feedback_error("Could not find the '"
<< RUN_COMMAND << "' setting in the '"
<< PLUGIN_NAME << "' settings");
return false;
}
auto runCommands = *runCommandOpt;
if(runCommands.empty()) {
user_feedback_error("The '" << RUN_COMMAND << "' list is empty");
return false;
}
ensures(variables.get<InfoFile>(INFO_FILE_KEY) != std::nullopt);
auto infoFile = *(variables.get<InfoFile>(INFO_FILE_KEY));
ensures(variables.get<BaseDir>(BASE_DIR_KEY) != std::nullopt);
auto baseDirectory = *(variables.get<BaseDir>(BASE_DIR_KEY));
ensures(variables.get<Dir>(DIR_KEY) != std::nullopt);
auto directory = *(variables.get<Dir>(DIR_KEY));
auto commandLine = *(variables.get<CommandLineArgs>(COMMAND_LINE_KEY));
bool zeroCounters = *(variables.get<ZeroCounters>(ZERO_COUNTERS_KEY));
Task zeroCountersTask;
if(zeroCounters) {
zeroCountersTask = generateZeroCountersTask(baseDirectory, directory,
commandLine, task);
}
bool genHtml = *(variables.get<GenHtml>(GEN_HTML_KEY));
Task genHtmlTask;
if(genHtml) {
genHtmlTask = generateGenHtmlTask(infoFile, variables, task);
}
Task captureTask = generateCaptureTask(baseDirectory, directory, infoFile,
commandLine, task);
auto exclude = *(variables.get<Excludes>(EXCLUDES_KEY));
Task excludeTask;
if(!exclude.empty()) {
excludeTask =
generateExcludeTask(variables, infoFile, commandLine, task);
}
for(const auto& combination : makePatternPermutator(patterns)) {
if(zeroCounters) {
if(!runTask(zeroCountersTask, combination)) {
return false;
}
}
ExecutePlugin execute(runCommands);
if(!execute.apply(task, variables, patterns)) {
return false;
}
if(!runTask(captureTask, combination)) {
return false;
}
if(!exclude.empty()) {
if(!runTask(excludeTask, combination)) {
return false;
}
}
if(genHtml) {
if(!runTask(genHtmlTask, combination)) {
return false;
}
}
}
return true;
}
auto Lcov::summary() const noexcept -> std::string { return "Lcov (internal)"; }
inline auto Lcov::generateGenHtmlTask(const InfoFile& infoFile,
const VariablesMap& variables,
const Task& task) noexcept -> Task {
Task result = task;
result.append("genhtml");
ensures(variables.get<GenHtmlOutput>(GEN_HTML_OUTPUT_KEY) != std::nullopt);
result.append(
{"--output-directory",
variables.get<GenHtmlOutput>(GEN_HTML_OUTPUT_KEY)->string()});
ensures(variables.get<GenHtmlTitle>(GEN_HTML_TITLE_KEY) != std::nullopt);
result.append(
{"--title", *(variables.get<GenHtmlTitle>(GEN_HTML_TITLE_KEY))});
ensures(variables.get<GenHtmlCommandLine>(GEN_HTML_COMMAND_LINE_KEY) !=
std::nullopt);
result.append(
*(variables.get<GenHtmlCommandLine>(GEN_HTML_COMMAND_LINE_KEY)));
result.append(infoFile.string());
return result;
}
inline auto Lcov::generateZeroCountersTask(const BaseDir& baseDirectory,
const Dir& directory,
const CommandLineArgs& commandLine,
const Task& task) noexcept -> Task {
Task result = task;
result.append(PLUGIN_NAME);
result.append({string("--").append(BASE_DIR_KEY), baseDirectory.string()});
result.append({string("--").append(DIR_KEY), directory.string()});
result.append("--zerocounters");
result.append(commandLine);
return result;
}
inline auto Lcov::getExcludes(const VariablesMap& variables) noexcept
-> Lcov::Excludes {
auto excludes = variables.get<Excludes>(EXCLUDES_KEY);
if(!excludes) {
return Excludes();
}
for(auto& exclude : *excludes) {
exclude.insert(0, R"(")");
exclude.append(R"(")");
}
return *excludes;
}
inline auto Lcov::generateExcludeTask(const VariablesMap& variables,
const InfoFile& infoFile,
const CommandLineArgs& commandLine,
const Task& task) noexcept -> Task {
Task result = task;
auto excludes = getExcludes(variables);
if(excludes.empty()) {
return result;
}
result.append(PLUGIN_NAME);
result.append({"--remove", infoFile.string()});
result.append(excludes);
result.append({"--output-file", infoFile.string()});
result.append(commandLine);
return result;
}
inline auto Lcov::generateCaptureTask(const BaseDir& baseDirectory,
const Dir& directory,
const InfoFile& infoFile,
const CommandLineArgs& commandLine,
const Task& task) noexcept -> Task {
Task result = task;
result.append(PLUGIN_NAME);
result.append({string("--").append(BASE_DIR_KEY), baseDirectory.string()});
result.append({string("--").append(DIR_KEY), directory.string()});
result.append("--capture");
result.append({"--output", infoFile.string()});
result.append(commandLine);
return result;
}
} // namespace execHelper::plugins
<file_sep>#include "pmd.h"
#include <string>
#include <vector>
#include <gsl/string_span>
#include "config/fleetingOptionsInterface.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/patterns.h"
#include "core/task.h"
#include "commandLine.h"
#include "logger.h"
#include "pluginUtils.h"
#include "verbosity.h"
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::PatternKeys;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
namespace {
const czstring<> PLUGIN_NAME = "pmd";
const czstring<> EXEC_KEY = "exec";
const czstring<> TOOL_KEY = "tool";
using MinimumTokens = string;
const czstring<> MINIMUM_TOKENS_KEY = "minimum-tokens";
using Files = vector<string>;
const czstring<> FILES_KEY = "files";
const czstring<> LANGUAGE_KEY = "language";
} // namespace
namespace execHelper::plugins {
auto Pmd::getVariablesMap(const FleetingOptionsInterface& fleetingOptions)
const noexcept -> VariablesMap {
VariablesMap defaults(PLUGIN_NAME);
if(!defaults.add(EXEC_KEY, PLUGIN_NAME)) {
LOG(error) << "Failed to add key '" << EXEC_KEY << "'";
}
if(!defaults.add(TOOL_KEY, "cpd")) {
LOG(error) << "Failed to add key '" << TOOL_KEY << "'";
}
if(!defaults.add(COMMAND_LINE_KEY, CommandLineArgs())) {
LOG(error) << "Failed to add key '" << COMMAND_LINE_KEY << "'";
}
const string verbosity = fleetingOptions.getVerbosity() ? "yes" : "no";
if(!defaults.add(VERBOSITY_KEY, verbosity)) {
LOG(error) << "Failed to add key '" << VERBOSITY_KEY << "'";
}
return defaults;
}
auto Pmd::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept -> bool {
auto binaryName = *(variables.get<string>(EXEC_KEY));
auto tool = *(variables.get<string>(TOOL_KEY));
binaryName.append("-").append(tool);
task.append(binaryName);
auto language = variables.get<string>(LANGUAGE_KEY);
if(language) {
task.append({"--language", *language});
}
if(*(variables.get<string>(VERBOSITY_KEY)) == "yes") {
task.append("-verbose");
}
if(tool == "cpd") {
auto minimumTokens = variables.get<MinimumTokens>(MINIMUM_TOKENS_KEY);
if(minimumTokens) {
task.append({"--minimum-tokens", *minimumTokens});
}
auto files = variables.get<Files>(FILES_KEY);
if(files) {
for(const auto& file : *files) {
task.append({"--files", file});
}
}
}
task.append(*(variables.get<CommandLineArgs>(COMMAND_LINE_KEY)));
for(const auto& combination : makePatternPermutator(patterns)) {
Task newTask = replacePatternCombinations(task, combination);
if(!registerTask(newTask)) {
return false;
}
}
return true;
}
auto Pmd::summary() const noexcept -> std::string { return "Pmd (internal)"; }
} // namespace execHelper::plugins
<file_sep>#ifndef __SHELL_INTERFACE_H__
#define __SHELL_INTERFACE_H__
#include <cstdint>
#include "pathNotFoundError.h"
namespace execHelper {
namespace core {
class Task;
} // namespace core
} // namespace execHelper
namespace execHelper {
namespace core {
/**
* \brief Represents a shell for executing tasks
*/
class Shell {
public:
using ShellReturnCode = uint8_t; //!< brief Shell return code abstraction
/**
* Executes the given task
*
* \param[in] task The task to execute
* \returns A shell return code
* \throws PathNotFoundError The binary to execute was not found
*/
virtual ShellReturnCode execute(const Task& task) = 0;
/**
* Checks whether the given shell return code can be associated with a
* successful execution
*
* \param[in] returnCode The return code to check
* \returns True If the return code implies a successful execution
* False Otherwise
*/
virtual bool
isExecutedSuccessfully(ShellReturnCode returnCode) const noexcept = 0;
protected:
Shell() = default;
};
} // namespace core
} // namespace execHelper
#endif /* __SHELL_INTERFACE_H__ */
<file_sep>#ifndef CONFIG_FILE_WRITER_INCLUDE
#define CONFIG_FILE_WRITER_INCLUDE
#include <string>
#include "path.h"
#include "tmpFile.h"
#include "yaml.h"
namespace execHelper {
namespace test {
namespace baseUtils {
class ConfigFileWriter {
public:
ConfigFileWriter() noexcept;
~ConfigFileWriter() = default;
ConfigFileWriter(const ConfigFileWriter& other) = delete;
ConfigFileWriter(ConfigFileWriter&& other) noexcept = delete;
ConfigFileWriter& operator=(const ConfigFileWriter& other) = delete;
ConfigFileWriter& operator=(ConfigFileWriter&& other) noexcept = delete;
Path getPath() const noexcept;
std::string getFilename() const noexcept;
std::string getDirectory() const noexcept;
bool write(const YamlWriter& yaml) noexcept;
private:
TmpFile m_file;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* CONFIG_FILE_WRITER_INCLUDE */
<file_sep>#include <iostream>
#include <sstream>
#include <string>
#include <utility>
#include <vector>
#include "config/settingsNode.h"
#include "yaml/yaml.h"
#include "base-utils/configFileWriter.h"
#include "unittest/catch.h"
#include "utils/utils.h"
using std::endl;
using std::pair;
using std::string;
using std::stringstream;
using std::vector;
using execHelper::config::Path;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::test::baseUtils::ConfigFileWriter;
using execHelper::test::utils::convertToConfig;
using execHelper::test::utils::writeSettingsFile;
namespace {
const execHelper::config:: // NOLINT(fuchsia-statically-constructed-objects)
SettingsValue DEFAULT_VALUE("blaat");
const execHelper::config:: // NOLINT(fuchsia-statically-constructed-objects)
SettingsValues DEFAULT_VALUES({DEFAULT_VALUE});
} // namespace
namespace execHelper::yaml::test {
SCENARIO("Yaml wrapper test", "[yaml][yamlwrapper]") {
GIVEN("A yaml config string to parse") {
const string key("commands");
const vector<string> values = {"command1", "command2", "command3"};
const string key2("command2");
const string values2("command2-option");
string yamlConfig;
yamlConfig += convertToConfig(key, values);
yamlConfig += convertToConfig(key2, values2);
WHEN("We pass the config to the yaml wrapper") {
Yaml yaml(yamlConfig);
THEN("We should find all combinations in order") {
REQUIRE(yaml.getValueCollection({key}) == values);
REQUIRE(yaml.getValue({key2}) == values2);
}
}
}
}
SCENARIO("Extensive Yaml file wrapper test", "[yaml][yamlwrapper]") {
GIVEN("A yaml config file to parse and the right result") {
vector<string> correctCommands = {"init", "build", "run", "analyze"};
vector<string> correctInit = {"git-submodules", "configure"};
vector<string> correctBuild = {"scons", "make"};
vector<string> correctRun = {"shellRunner", "command-line"};
vector<string> correctAnalyze = {"cppcheck", "clang-static-analyzer",
"pmd", "simian"};
vector<string> correctSubmodules = {"3rdparty/Catch",
"3rdparty/benchmark"};
vector<string> correctSconsPatterns = {"COMPILER", "MODE"};
string correctSconsBuildDir("build/{COMPILER}/{MODE}");
string correctSconsSingleThreaded("yes");
string correctSconsCommandLine("compiler={COMPILER} mode={MODE}");
string correctPmdAutoInstall("yes");
string correctRunCommandLine("command-line");
SettingsNode correctSettings("YamlTest");
REQUIRE(correctSettings.add({"commands"}, correctCommands));
REQUIRE(correctSettings.add({"init"}, correctInit));
REQUIRE(correctSettings.add({"build"}, correctBuild));
REQUIRE(correctSettings.add({"run"}, correctRun));
REQUIRE(correctSettings.add({"analyze"}, correctAnalyze));
REQUIRE(correctSettings.add({"git-submodules", "submodules"},
correctSubmodules));
REQUIRE(
correctSettings.add({"scons", "patterns"}, correctSconsPatterns));
REQUIRE(
correctSettings.add({"scons", "build-dir"}, correctSconsBuildDir));
REQUIRE(correctSettings.add({"scons", "single-threaded"},
correctSconsSingleThreaded));
REQUIRE(correctSettings.add({"scons", "command-line"},
correctSconsCommandLine));
REQUIRE(correctSettings.add({"pmd", "auto-install"},
correctPmdAutoInstall));
REQUIRE(correctSettings.add({"command-line", "run"},
correctRunCommandLine));
ConfigFileWriter file;
writeSettingsFile(gsl::not_null<ConfigFileWriter*>(&file),
correctSettings, {});
WHEN("We pass the config to the yaml wrapper") {
Yaml yaml(file.getPath());
THEN("We should find all values") {
REQUIRE(yaml.getValueCollection({"commands"}) ==
correctCommands);
REQUIRE(yaml.getValueCollection({"init"}) == correctInit);
REQUIRE(yaml.getValueCollection({"build"}) == correctBuild);
REQUIRE(yaml.getValueCollection({"run"}) == correctRun);
REQUIRE(yaml.getValueCollection({"analyze"}) == correctAnalyze);
REQUIRE(
yaml.getValueCollection({"git-submodules", "submodules"}) ==
correctSubmodules);
REQUIRE(yaml.getValueCollection({"scons", "patterns"}) ==
correctSconsPatterns);
REQUIRE(yaml.getValue({"scons", "build-dir"}) ==
correctSconsBuildDir);
REQUIRE(yaml.getValue({"scons", "single-threaded"}) ==
correctSconsSingleThreaded);
REQUIRE(yaml.getValue({"scons", "command-line"}) ==
correctSconsCommandLine);
REQUIRE(yaml.getValue({"pmd", "auto-install"}) ==
correctPmdAutoInstall);
REQUIRE(yaml.getValue({"command-line", "run"}) ==
correctRunCommandLine);
}
THEN("We should find them all in the subtree") {
SettingsNode settings(correctSettings.key());
yaml.getTree({}, &settings);
REQUIRE(settings.key() == correctSettings.key());
REQUIRE(settings.get<vector<string>>(
"commands", DEFAULT_VALUES) == correctCommands);
REQUIRE(settings.get<vector<string>>("init", DEFAULT_VALUES) ==
correctInit);
REQUIRE(settings.get<vector<string>>("build", DEFAULT_VALUES) ==
correctBuild);
REQUIRE(settings.get<vector<string>>("run", DEFAULT_VALUES) ==
correctRun);
REQUIRE(settings.get<vector<string>>(
"analyze", DEFAULT_VALUES) == correctAnalyze);
REQUIRE(settings.get<vector<string>>(
SettingsKeys({"git-submodules", "submodules"}),
DEFAULT_VALUES) == correctSubmodules);
REQUIRE(settings.get<vector<string>>(
SettingsKeys({"scons", "patterns"}),
DEFAULT_VALUES) == correctSconsPatterns);
REQUIRE(settings.get<string>(
SettingsKeys({"scons", "build-dir"}),
DEFAULT_VALUE) == correctSconsBuildDir);
REQUIRE(settings.get<string>(
SettingsKeys({"scons", "single-threaded"}),
DEFAULT_VALUE) == correctSconsSingleThreaded);
REQUIRE(settings.get<string>(
SettingsKeys({"scons", "command-line"}),
DEFAULT_VALUE) == correctSconsCommandLine);
REQUIRE(settings.get<string>(
SettingsKeys({"pmd", "auto-install"}),
DEFAULT_VALUE) == correctPmdAutoInstall);
REQUIRE(settings.get<string>(
SettingsKeys({"command-line", "run"}),
DEFAULT_VALUE) == correctRunCommandLine);
REQUIRE(settings == correctSettings);
}
}
}
}
SCENARIO("Test the parsing of a two dimensional array using lists",
"[yaml][yamlwrapper]") {
GIVEN("A two dimensional array") {
const string rootKey("root-key");
const vector<pair<string, vector<string>>> actualArray = {
{"key1", {"value1a", "value1b"}},
{"key2", {"value2a", "value2b"}},
{"key3", {"value3a", "value3b", "value3c"}},
{"key4", {"value4"}}};
string yamlConfig;
WHEN("Write it as a list of arrays and parse the tree") {
yamlConfig += rootKey + ":\n";
for(const auto& firstDimension : actualArray) {
yamlConfig += " - " + firstDimension.first + ": [";
for(const auto& secondDimension : firstDimension.second) {
yamlConfig += " " + secondDimension;
if(secondDimension != firstDimension.second.back()) {
yamlConfig += ",";
}
}
yamlConfig += "]\n";
}
Yaml yaml(yamlConfig);
SettingsNode settings(rootKey);
bool returnCode = yaml.getTree({rootKey}, &settings);
THEN("It should succeed") { REQUIRE(returnCode); }
THEN("We should get the same values") {
for(const auto& firstDimension : actualArray) {
REQUIRE(settings.get<vector<string>>({firstDimension.first},
{"blaat"}) ==
firstDimension.second);
}
}
}
WHEN("Write it as a list of a list and parse the tree") {
yamlConfig += rootKey + ":\n";
for(const auto& firstDimension : actualArray) {
yamlConfig += " - " + firstDimension.first + ":\n";
for(const auto& secondDimension : firstDimension.second) {
yamlConfig += " - " + secondDimension + "\n";
}
}
Yaml yaml(yamlConfig);
SettingsNode settings(rootKey);
bool returnCode = yaml.getTree({rootKey}, &settings);
THEN("It should succeed") { REQUIRE(returnCode); }
THEN("We should get the same values") {
for(const auto& firstDimension : actualArray) {
REQUIRE(settings.get<vector<string>>({firstDimension.first},
{"blaat"}) ==
firstDimension.second);
}
}
}
}
}
SCENARIO("Yaml does not find the supplied configuration file",
"[yaml][yamlwrapper]") {
GIVEN("A configuration file that does not exist") {
ConfigFileWriter file;
const string expectedErrorMessage("bad file");
WHEN("We pass it to yaml") {
THEN("We should not get here") {
REQUIRE_THROWS_AS(Yaml(file.getPath()), YAML::BadFile);
}
}
}
}
SCENARIO("Yaml got passed an invalid configuration", "[yaml][yamlwrapper]") {
GIVEN("Nothing in particular") {
WHEN("We forget a delimiter") {
stringstream configStream;
configStream << "commands" << endl
<< " - commands1" << endl
<< " - commands2" << endl;
Yaml yaml(configStream.str());
THEN("We should not be able to get any value") {
REQUIRE(yaml.getValue({"commands"}).empty());
REQUIRE(yaml.getValue({"commands", "command1"}).empty());
}
THEN("We should not be able to get any value collection") {
REQUIRE(yaml.getValueCollection({"commands"}).empty());
REQUIRE(
yaml.getValueCollection({"commands", "command1"}).empty());
}
THEN("We should not be able to get a tree and should give the "
"strong exception guarantee") {
string settingsKey("blaat");
SettingsNode settings(settingsKey);
REQUIRE_FALSE(yaml.getTree({"commands"}, &settings));
REQUIRE_FALSE(
yaml.getTree({"commands", "command1"}, &settings));
// Check the strong exception guarantee
REQUIRE(settings.key() == settingsKey);
REQUIRE(settings.get<SettingsValues>(SettingsKeys()) ==
std::nullopt);
}
}
}
}
SCENARIO("Requesting invalid values", "[yaml][yamlwrapper]") {
GIVEN("A configuration file") {
const string key("commands");
const vector<string> values = {"command1", "command2", "command3"};
const string key2("command2");
const string values2("command2-option");
string yamlConfig = convertToConfig(key, values);
yamlConfig += convertToConfig(key2, values2);
WHEN("We create the yaml file") {
Yaml yaml(yamlConfig);
THEN("We should not be able to get invalid values") {
REQUIRE(yaml.getValue({"invalid-key"}).empty());
REQUIRE(
yaml.getValue({"invalid-key", "invalid-subkey"}).empty());
}
THEN("We should not be able to get invalid values collections") {
REQUIRE(yaml.getValueCollection({"invalid-key"}).empty());
REQUIRE(
yaml.getValueCollection({"invalid-key", "invalid-subkey"})
.empty());
}
THEN("We should not be able to get invalid trees and the settings "
"node is unaltered") {
string settingsKey("some-key");
SettingsNode settings(settingsKey);
REQUIRE_FALSE(yaml.getTree({"invalid-key"}, &settings));
REQUIRE_FALSE(
yaml.getTree({"invalid-key", "invalid-subkey"}, &settings));
REQUIRE(settings.key() == settingsKey);
REQUIRE(settings.get<SettingsValues>(SettingsKeys()) ==
std::nullopt);
}
}
}
}
} // namespace execHelper::yaml::test
<file_sep>.. describe:: command-line
Additional command line parameters to pass as a list of separate arguments. By default no additional arguments are added.
<file_sep>.. _exec-helper-plugins-cmake:
CMake plugin
************
Description
===========
The cmake plugin is used for generating, building and installing software using the CMake build generator system.
Mandatory settings
==================
There are no mandatory settings for this plugin, though it is recommended to configure the *mode* setting explicitly.
Optional settings
=================
The configuration of the make plugin may contain the following settings:
Settings for all modes
----------------------
.. program:: exec-helper-plugins-cmake
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: mode
Set the mode of the CMake call for the specific command. Default: *generate*.
Supported modes are:
* **Generate**: For generating a build directory based on the CMake configuration in the source. This is often callend the *configure* or *build init* step.
* **Build**: Build the generated project
* **Install**: Install the generated project
.. describe:: build-dir
The path to the build directory. This is either an absolute path are a path relative to the location of this file. Default: *.* (the directory of the :program:`exec-helper` configuration).
Settings for the *generate* mode
--------------------------------
.. program:: exec-helper-plugins-cmake
.. describe:: source-dir
The directory containing the root CMakeLists.txt file of the sources. Default: *.* (the directory of the :program:`exec-helper` configuration).
.. describe:: generator
The generator to use for generating the build directory. See the CMake documentation on which generators are supported for your platform and the value(s) to explicitly set them. Default: the default one for your system and environment. See the CMake documentation on the details.
.. describe:: defines
A map of the *build generator settings* for configuring the generator.
Settings for the *build* mode
-----------------------------
.. program:: exec-helper-plugins-cmake
.. describe:: target
The specific CMake target to build. Default: the default target. See the CMake documentation for more details.
.. describe:: config
The configuration for multi-configuration tools. Default: the default configuration. See the CMake documentation for more details.
Settings for the *install* mode
-------------------------------
.. program:: exec-helper-plugins-cmake
.. describe:: config
The configuration for multi-configuration tools. Default: the default configuration. See the CMake documentation for more details.
.. describe:: prefix
Override the configured prefix set during the *generate* mode. Default: the default installation prefix. See the CMake documentation for more details.
.. describe:: component
Limit installation to the given component. Default: all installation targets.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/cmake.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*CMakeLists.txt*:
.. literalinclude:: ../examples/CMakeLists.txt
:language: none
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>add_subdirectory(log)
add_subdirectory(config)
add_subdirectory(yaml)
add_subdirectory(core)
add_subdirectory(plugins)
add_subdirectory(commander)
add_subdirectory(applications)
<file_sep>#ifndef __EXECUTOR_INTERFACE_H__
#define __EXECUTOR_INTERFACE_H__
namespace execHelper {
namespace core {
class Task;
} // namespace core
} // namespace execHelper
namespace execHelper {
namespace core {
/**
* \brief Interface for executing tasks
*/
class ExecutorInterface {
public:
virtual ~ExecutorInterface() = default;
/**
* Execute the given task
*
* \param[in] task The task to execute
*
* \note: this does not necessarily mean that the task got executed or will
* successfully execute.
*/
virtual void execute(const Task& task) noexcept = 0;
protected:
ExecutorInterface() = default;
};
} // namespace core
} // namespace execHelper
#endif /* __EXECUTOR_INTERFACE_H__ */
<file_sep>#include <iostream>
#include <vector>
#include <gsl/string_span>
#include "config/patternsHandler.h"
#include "unittest/catch.h"
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::PatternsHandler;
namespace {
const czstring<> DEFAULT_VALUES_KEY = "default-values";
const czstring<> SHORT_OPTION_KEY = "short-option";
const czstring<> LONG_OPTION_KEY = "long-option";
inline void checkContainsPatterns(const PatternsHandler& handler,
const Patterns& expectedPatterns) {
for(const auto& expectedPattern : expectedPatterns) {
REQUIRE(handler.contains(expectedPattern.getKey()));
}
}
inline void checkReturnsPatterns(const PatternsHandler& handler,
const Patterns& expectedPatterns) {
for(const auto& expectedPattern : expectedPatterns) {
REQUIRE(handler.getPattern(expectedPattern.getKey()) ==
expectedPattern);
}
}
inline void checkNotContainsPatterns(const PatternsHandler& handler,
const Patterns& /*expectedPatterns*/) {
const Pattern notAddedPattern("not-added-pattern");
REQUIRE_FALSE(handler.contains(notAddedPattern.getKey()));
}
} // namespace
namespace execHelper::config::test {
SCENARIO("Test the patterns the handler is holding",
"[patterns][PatternsHandler]") {
GIVEN("A patternshandler and some patterns to hold") {
const Pattern pattern1("pattern1", {"pattern1A", "pattern1B"}, 'a',
string("patternA"));
const Pattern pattern2("pattern2", {"pattern2A", "pattern2B"}, 'b',
string("patternB"));
const Pattern pattern3("pattern3", {"pattern3A", "pattern3B"}, 'c',
string("patternC"));
const Pattern pattern4("pattern4", {"pattern4A", "pattern4B"}, 'd',
string("patternD"));
const Patterns patterns = {pattern1, pattern2, pattern3, pattern4};
WHEN("We add the patterns separately") {
PatternsHandler handler;
for(const auto& pattern : patterns) {
handler.addPattern(pattern);
}
THEN("It should contain the pattern") {
checkContainsPatterns(handler, patterns);
}
THEN("It should return these patterns") {
checkReturnsPatterns(handler, patterns);
}
THEN("It should not contain other patterns") {
checkNotContainsPatterns(handler, patterns);
}
}
WHEN("We add the patterns at construction time") {
PatternsHandler handler(patterns);
THEN("It should contain the pattern") {
checkContainsPatterns(handler, patterns);
}
THEN("It should return these patterns") {
checkReturnsPatterns(handler, patterns);
}
THEN("It should not contain other patterns") {
checkNotContainsPatterns(handler, patterns);
}
}
WHEN("We move construct the patternshandler using the patterns") {
Patterns patternsToMove(patterns);
PatternsHandler handler(move(patternsToMove));
THEN("It should contain the pattern") {
checkContainsPatterns(handler, patterns);
}
THEN("It should return these patterns") {
checkReturnsPatterns(handler, patterns);
}
THEN("It should not contain other patterns") {
checkNotContainsPatterns(handler, patterns);
}
}
}
}
SCENARIO("Comparing two pattern handlers", "[patterns][PatternHandler]") {
GIVEN("A pattern handler") {
const Pattern pattern1("pattern1", {"pattern1A", "pattern1B"}, 'a',
string("patternA"));
const Pattern pattern2("pattern2", {"pattern2A", "pattern2B"}, 'b',
string("patternB"));
const Pattern pattern3("pattern3", {"pattern3A", "pattern3B"}, 'c',
string("patternC"));
const Pattern pattern4("pattern4", {"pattern4A", "pattern4B"}, 'd',
string("patternD"));
const vector<Pattern> patterns = {pattern1, pattern2, pattern3,
pattern4};
PatternsHandler handler;
for(const auto& pattern : patterns) {
handler.addPattern(pattern);
}
WHEN("We compare it with a similarly constructed handler") {
PatternsHandler other;
for(const auto& pattern : patterns) {
other.addPattern(pattern);
}
THEN("They should be equal") {
REQUIRE(handler == other);
REQUIRE_FALSE(handler != other);
}
}
WHEN("We compare it to a different handler") {
PatternsHandler other;
for(size_t i = 0U; i < patterns.size() - 2; ++i) {
other.addPattern(patterns[i]);
}
THEN("They should not be equal") {
REQUIRE_FALSE(handler == other);
REQUIRE(handler != other);
}
}
WHEN("We compare the handler with the default constructed handler") {
PatternsHandler other;
THEN("They should not be equal") {
REQUIRE_FALSE(handler == other);
REQUIRE(handler != other);
}
}
}
}
SCENARIO("Test valid conversions to a pattern", "[patterns][PatternHandler]") {
MAKE_COMBINATIONS("Of several valid combinations") {
const PatternKey key("pattern1");
PatternValues defaultValues({"default-value"});
ShortOption shortOption;
LongOption longOption;
VariablesMap variables("test");
// Add mandatory variable settings
REQUIRE(variables.add({DEFAULT_VALUES_KEY}, defaultValues));
COMBINATIONS("Add a short option") {
shortOption = 's';
REQUIRE(variables.add({SHORT_OPTION_KEY},
string(1, shortOption.value())));
}
COMBINATIONS("Add a long option") {
longOption = "some-long-option";
REQUIRE(variables.add({LONG_OPTION_KEY}, longOption.value()));
}
Pattern expectedPattern(key, defaultValues, shortOption, longOption);
THEN_WHEN("We try to create a pattern") {
auto actualPattern = PatternsHandler::toPattern(key, variables);
THEN_CHECK("It should succeed") {
REQUIRE(actualPattern != std::nullopt);
}
THEN_CHECK("We should find the expected pattern") {
REQUIRE(expectedPattern == actualPattern);
}
}
}
}
SCENARIO("Test invalid conversions to a pattern",
"[patterns][PatternHandler]") {
MAKE_COMBINATIONS("Of invalid combinations") {
const PatternKey key("pattern1");
VariablesMap variables("test");
COMBINATIONS("Add empty default values") {
REQUIRE(variables.add(DEFAULT_VALUES_KEY));
}
COMBINATIONS("Add a short option") {
REQUIRE(variables.add({SHORT_OPTION_KEY}, "some-short-option"));
}
COMBINATIONS("Add a long option") {
REQUIRE(variables.add({LONG_OPTION_KEY}, "some-long-option"));
}
THEN_WHEN("We try to create a pattern") {
auto returnCode = PatternsHandler::toPattern(key, variables);
THEN_CHECK("It should fail") {
REQUIRE(returnCode == std::nullopt);
}
}
}
}
} // namespace execHelper::config::test
<file_sep>env = Environment()
Export('env')
SConscript('src/SConscript', variant_dir='build', duplicate=0)
Default(None)
<file_sep>#ifndef STATEMENT_INCLUDE
#define STATEMENT_INCLUDE
#include <string>
#include <gsl/gsl>
#include "executionContent.h"
#include "yaml.h"
namespace execHelper {
namespace test {
namespace baseUtils {
using StatementKey = std::string;
using StatementCollection = ExecutionContent::ConfigCommand;
class Statement {
public:
virtual ~Statement() = default;
virtual StatementKey getKey() const noexcept = 0;
virtual void write(gsl::not_null<YamlWriter*> yaml,
const std::string& command) const noexcept = 0;
virtual inline unsigned int getNumberOfExecutions() const noexcept {
return m_execution.getNumberOfExecutions();
}
virtual inline void resetExecutions() noexcept { m_execution.clear(); }
protected:
Statement(ReturnCode returnCode) noexcept : m_execution(returnCode) {}
ExecutionContent m_execution;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* STATEMENT_INCLUDE */
<file_sep>set(MODULE_NAME config)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS src/settingsNode.cpp
src/configFileSearcher.cpp
src/logger.cpp
src/fleetingOptions.cpp
src/config.cpp
src/pattern.cpp
src/patternsHandler.cpp
src/optionDescriptions.cpp
src/argv.cpp
src/envp.cpp
src/cast.cpp
src/pathManipulation.cpp
)
set(DEPENDENCIES
filesystem
log
yaml
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/config)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>#ifndef __MAP_PERMUTATOR_H__
#define __MAP_PERMUTATOR_H__
#include <algorithm>
#include <map>
#include <vector>
namespace execHelper {
namespace core {
/**
* \brief Represents all permutations for the values of a given map
*/
template <typename T, typename U> class MapPermutator {
public:
/**
* Constructor
*
* \param[in] map The map to permutate over
*/
// cppcheck-suppress passedByValue
explicit MapPermutator(std::map<T, std::vector<U>> map) noexcept
: m_map(std::move(map)) {
;
}
/**
* \brief Iterator object for iterating over all permutations
*/
template <typename IteratorType> class Iterator {
private:
using value_type = IteratorType;
using pointer = IteratorType*;
using reference = IteratorType&;
using iterator = Iterator;
public:
/**
* Constructor for the iterator
*
* \param[in] map The map to permutate and iterate over
* \param[in] end Whether the constructed iterator is an end iterator
*/
explicit Iterator(const std::map<T, std::vector<U>>& map,
bool end = false) noexcept
: m_map(map), m_end(end) {
for(const auto& mapElement : map) {
uint32_t index = 0U;
if(mapElement.second.empty()) {
m_end = true;
}
if(m_end) {
index = mapElement.second.size();
}
m_indexes.emplace(mapElement.first, index);
}
}
/**
* Postcrement operator
*
* \returns The next iterator element
*/
iterator& operator++() noexcept {
if(m_end) {
return *this;
}
auto it = m_indexes.rbegin();
for(; it != m_indexes.rend(); ++it) {
++it->second;
if(it->second == m_map.at(it->first).size()) {
it->second = 0U;
} else {
break;
}
}
// Check if we went through the entire map
if(it == m_indexes.rend()) {
// Means we are at the end
m_end = true;
}
return *this;
}
/*! @copydoc config::Argv::operator==(const Argv&) const
*/
bool operator==(const iterator& other) const {
if(m_map != other.m_map) {
return false;
}
if(m_end && m_end == other.m_end) {
return true;
}
return std::all_of(m_indexes.begin(), m_indexes.end(),
[&other](const auto& element) {
return element.second ==
other.m_indexes[element.first];
});
}
/*! @copydoc config::Argv::operator!=(const Argv&) const
*/
bool operator!=(const iterator& other) const {
return !(*this == other);
}
/**
* Dereference operator
*
* \returns The dereferenced object
*/
value_type operator*() const {
value_type result;
for(const auto& index : m_indexes) {
result.insert(std::make_pair(
index.first, m_map.at(index.first)[index.second]));
}
return result;
}
private:
const std::map<T, std::vector<U>>& m_map;
mutable std::map<T, uint32_t> m_indexes;
bool m_end;
};
using iterator = Iterator<std::map<T, U>>; //!< brief iterator type
using const_iterator =
Iterator<const std::map<T, U>>; //!< brief const iterator type
/**
* Return iterator to beginning
*
* \returns A begin iterator
*/
iterator begin() noexcept { return iterator(m_map); }
/*! @copydoc begin()
*
*/
const_iterator begin() const noexcept { return const_iterator(m_map); }
/**
* Return iterator to end
*
* \returns An end iterator
*/
iterator end() noexcept { return iterator(m_map, true); }
/*! @copydoc end()
*
*/
const_iterator end() const noexcept { return const_iterator(m_map, true); }
private:
const std::map<T, std::vector<U>> m_map;
};
} // namespace core
} // namespace execHelper
#endif /* __MAP_PERMUTATOR_H__ */
<file_sep>#ifndef LOG_MESSAGE_INCLUDE
#define LOG_MESSAGE_INCLUDE
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/log/expressions.hpp>
#include <boost/log/sources/channel_feature.hpp>
#include <boost/log/sources/global_logger_storage.hpp>
#include <boost/log/sources/record_ostream.hpp>
#include <boost/log/sources/severity_channel_logger.hpp>
#include <boost/log/utility/manipulators/add_value.hpp>
#include "logLevel.h"
namespace execHelper {
namespace log {
using Channel = std::string;
using LoggerType =
boost::log::sources::severity_channel_logger_mt<LogLevel, std::string>;
} // namespace log
} // namespace execHelper
BOOST_LOG_ATTRIBUTE_KEYWORD(fileLog, "File",
std::string) // NOLINT(modernize-use-using)
BOOST_LOG_ATTRIBUTE_KEYWORD(lineLog, "Line",
unsigned int) // NOLINT(modernize-use-using)
BOOST_LOG_ATTRIBUTE_KEYWORD( // NOLINT(modernize-use-using)
timestamp, "TimeStamp", boost::posix_time::ptime)
BOOST_LOG_ATTRIBUTE_KEYWORD(severity, "Severity", // NOLINT(modernize-use-using)
execHelper::log::LogLevel)
BOOST_LOG_ATTRIBUTE_KEYWORD(channel, "Channel", // NOLINT(modernize-use-using)
execHelper::log::Channel)
#endif /* LOG_MESSAGE_INCLUDE */
<file_sep>.. Exec-helper documentation master file, created by
sphinx-quickstart on Tue May 26 17:19:46 2020.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to exec-helper's documentation!
=======================================
.. toctree::
:maxdepth: 0
:titlesonly:
:caption: Contents:
:glob:
Welcome <README>
INSTALL
Command-line usage <src/applications/exec-helper>
src/config/docs/exec-helper-config
src/plugins/docs/exec-helper-plugins
test/integration/feature/index
API documentation <https://bverhagen.gitlab.io/exec-helper/docs/html/index.html>
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
<file_sep>#ifndef __REPORTING_EXECUTOR_H__
#define __REPORTING_EXECUTOR_H__
#include "executorInterface.h"
#include <functional>
#include "shell.h"
namespace execHelper {
namespace core {
class Task;
}
} // namespace execHelper
namespace execHelper {
namespace core {
/**
* \brief Implements an executor that only reports what it is executing
*/
class ReportingExecutor : public ExecutorInterface {
public:
ReportingExecutor() noexcept;
void execute(const Task& task) noexcept override;
};
} // namespace core
} // namespace execHelper
#endif /* __REPORTING_EXECUTOR_H__ */
<file_sep>#ifndef __PLUGIN_H__
#define __PLUGIN_H__
#include <functional>
#include <utility>
#include "config/commandLineOptions.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
namespace execHelper {
namespace config {
class FleetingOptionsInterface;
}
namespace core {
class Task;
} // namespace core
} // namespace execHelper
namespace execHelper {
namespace plugins {
/**
* \brief Interface declaration to which plugins should comply
*/
class Plugin {
public:
/*! @copydoc config::Argv::Argv(const Argv&)
*/
Plugin(const Plugin& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
*/
Plugin(Plugin&& other) noexcept = delete;
virtual ~Plugin() = default;
/*! @copydoc config::Argv::operator=(const Argv&)
*/
Plugin& operator=(const Plugin& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
*/
Plugin& operator=(Plugin&& other) noexcept = delete;
/**
* Returns the default variables map based on the given fleeting options
*
* \param[in] fleetingOptions The fleeting options to base the defaults on
* \returns The default constructed variables map
*/
[[nodiscard]] virtual config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept = 0;
/**
* Returns the root settings key for the patterns of a plugin
*
* \returns The root settings key
*/
[[nodiscard]] static config::SettingsKey getPatternsKey() noexcept {
return "patterns";
}
/**
* Apply the plugin
*
* \param[in] task The task to extend
* \param[in] variables The variables map containing the values to use for
* the executed command for this specific plugin \param[in] patterns The
* patterns that were configured to use for the executed command for this
* specific plugin
*
* \returns True If the application was successful False
* Otherwise
*/
[[nodiscard]] virtual bool
apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept = 0;
/**
* Returns a summary of the specific plugin
*
* \returns A short description of the plugin
*/
[[nodiscard]] virtual std::string summary() const noexcept = 0;
protected:
Plugin() = default;
};
using ExecuteCallback = std::function<void(const core::Task&)>;
void registerExecuteCallback(const ExecuteCallback& callback) noexcept;
/**
* Register a finished task to an executor
*
* \param[in] task The finished task
* \returns True If the task was successfully registered
* False Otherwise
*/
[[nodiscard]] auto registerTask(const core::Task& task) noexcept -> bool;
/**
* Output specifics of the plugin
*/
inline auto operator<<(std::ostream& os, // NOLINT(fuchsia-overloaded-operator)
const Plugin& plugin) -> std::ostream& {
os << plugin.summary();
return os;
}
} // namespace plugins
} // namespace execHelper
#endif /* __PLUGIN_H__ */
<file_sep>#ifndef LOGGER_INCLUDE
#define LOGGER_INCLUDE
#include "log/log.h"
BOOST_LOG_GLOBAL_LOGGER(exec_helper_config_logger, execHelper::log::LoggerType)
static const std::string LOG_CHANNEL = "config";
#define LOG(x) \
BOOST_LOG_STREAM_CHANNEL_SEV(exec_helper_config_logger::get(), \
LOG_CHANNEL, execHelper::log::x) \
<< boost::log::add_value(fileLog, __FILE__) \
<< boost::log::add_value(lineLog, __LINE__)
#endif /* LOGGER_INCLUDE */
<file_sep># completion.zsh
_eh_complete() {
local word completions
word="$1"
completions="$(exec-helper --auto-complete "${word}")"
reply=( "${(ps:\n:)completions}" )
}
compctl -f -K _eh_complete exec-helper
compctl -f -K _eh_complete eh
<file_sep>#include <iostream>
#include <limits>
#include <map>
#include <memory>
#include <random>
#include <string>
#include "base-utils/configBuilder.h"
#include "base-utils/configFileWriter.h"
#include "base-utils/directStatement.h"
#include "base-utils/execution.h"
#include "base-utils/executionContent.h"
#include "base-utils/executionHandler.h"
#include "base-utils/indirectStatement.h"
#include "base-utils/plugins.h"
#include "base-utils/testCommand.h"
#include "log/assertions.h"
#include "unittest/catch.h"
using std::make_shared;
using std::move;
using std::mt19937;
using std::ofstream;
using std::ostream;
using std::string;
using std::stringstream;
using std::to_string;
using std::uniform_int_distribution;
using std::vector;
using execHelper::test::baseUtils::ConfigBuilder;
using execHelper::test::baseUtils::ConfigFileWriter;
using execHelper::test::baseUtils::createStatement;
using execHelper::test::baseUtils::EXEC_HELPER_BINARY;
using execHelper::test::baseUtils::ExecutionContent;
using execHelper::test::baseUtils::IndirectStatement;
using execHelper::test::baseUtils::ReturnCode;
using execHelper::test::baseUtils::SimpleStatement;
using execHelper::test::baseUtils::Statements;
using execHelper::test::baseUtils::SUCCESS;
using execHelper::test::baseUtils::TestCommand;
using execHelper::test::baseUtils::YamlWriter;
using execHelper::test::baseUtils::execution::execute;
namespace execHelper::test::integration {
/**
* Implements @ref execute-multiple-statements-predefined-order
*/
SCENARIO("Execution order of a command consisting of one predefined statement",
"[execute-multiple-statements][execute-multiple-statements-predefined-"
"order][scenario-execute-multiple-statements-predefined-order-"
"single]") {
GIVEN("A valid configuration with multiple commands consisting of one "
"predefined, valid, successful statement") {
const vector<string> commandStrings(
{"command1", "command2", "command3", "command4"});
ConfigBuilder execution;
ConfigFileWriter config;
YamlWriter yaml;
for(const auto& commandString : commandStrings) {
execution.add(TestCommand(
commandString, {createStatement<SimpleStatement>(SUCCESS)}));
}
execution.write(gsl::not_null<YamlWriter*>(&yaml));
config.write(yaml);
WHEN("we call each of these commands separately") {
for(const auto& command : execution) {
auto executionIterator = execution.startIteration();
const ReturnCode returnCode =
execute({EXEC_HELPER_BINARY, command.get(),
"--settings-file", config.getFilename()},
config.getDirectory());
THEN_CHECK("the call should succeed") {
REQUIRE(returnCode == SUCCESS);
}
THEN_CHECK("the associated predefined statement should be "
"called exactly once") {
REQUIRE(command.getNumberOfStatementExecutions() == 1U);
for(const auto& statement : command) {
REQUIRE(statement->getNumberOfExecutions() == 1U);
}
}
THEN_CHECK(
"no other predefined statements should have been called") {
for(const auto& otherCommand : execution) {
if(command.get() != otherCommand.get()) {
for(const auto& statement : otherCommand) {
REQUIRE(statement->getNumberOfExecutions() ==
0U);
}
}
}
}
}
}
}
}
/**
* Implements @ref scenario-execute-multiple-statements-predefined-order-multiple
*/
SCENARIO("Scenario: Execution order of commands consisting of multiple "
"predefined statements",
"[execute-multiple-statements][execute-multiple-statements-predefined-"
"order][scenario-execute-multiple-statements-predefined-order-"
"multiple]") {
GIVEN("A valid configuration with multiple commands consisting of "
"multiple, predefined, valid, successful statements") {
const vector<string> commandStrings(
{"command1", "command2", "command3", "command4"});
ConfigBuilder execution;
ConfigFileWriter config;
YamlWriter yaml;
const auto NB_OF_STATEMENTS = 3U;
for(const auto& commandString : commandStrings) {
TestCommand command(commandString);
for(auto i = 0U; i < NB_OF_STATEMENTS; ++i) {
string statementKey = string(commandString)
.append("-statement")
.append(to_string(i));
auto indirect = make_shared<IndirectStatement>(
statementKey,
Statements({createStatement<SimpleStatement>(SUCCESS)}));
command.add(indirect);
}
execution.add(move(command));
}
execution.write(gsl::not_null<YamlWriter*>(&yaml));
config.write(yaml);
WHEN("we call each of these commands separately") {
for(const auto& command : execution) {
auto executionIterator = execution.startIteration();
const ReturnCode returnCode =
execute({EXEC_HELPER_BINARY, command.get(),
"--settings-file", config.getFilename()},
config.getDirectory());
THEN_CHECK("the call should succeed") {
REQUIRE(returnCode == SUCCESS);
}
THEN_CHECK("the associated predefined statements should be "
"called the expected number of times in the right "
"order") {
REQUIRE(command.getNbOfStatements() == NB_OF_STATEMENTS);
REQUIRE(command.getNumberOfStatementExecutions() ==
NB_OF_STATEMENTS);
for(const auto& statement : command) {
REQUIRE(statement->getNumberOfExecutions() == 1U);
}
}
THEN_CHECK("no other predefined statements except should have "
"been called") {
for(const auto& otherCommand : execution) {
if(command.get() != otherCommand.get()) {
for(const auto& statement : otherCommand) {
REQUIRE(statement->getNumberOfExecutions() ==
0U);
}
}
}
}
}
}
}
}
/**
* Implements @ref scenario-execute-multiple-statements-predefined-order-duplicates
*/
SCENARIO("Duplicate configured statements",
"[execute-multiple-statements][scenario-execute-multiple-statements-"
"predefined-order-duplicates]") {
GIVEN("A valid configuration with a command consisting of multiple, "
"duplicate statements and some non-duplicate ones") {
const vector<string> commandStrings(
{"command1", "command2", "command3", "command4"});
const string duplicateCommandString = "command-with-duplicates";
const string duplicateStatementString("duplicate-statement");
ConfigBuilder execution;
ConfigFileWriter config;
YamlWriter yaml;
// Write other commands
for(const auto& commandString : commandStrings) {
execution.add(TestCommand(
commandString, {createStatement<SimpleStatement>(SUCCESS)}));
}
// Add duplicate ones
const auto NB_OF_DUPLICATE_STATEMENTS = 5U;
auto duplicateStatement = make_shared<IndirectStatement>(
duplicateStatementString,
Statements({createStatement<SimpleStatement>(SUCCESS)}));
TestCommand commandWithDuplicates(
duplicateCommandString,
Statements(NB_OF_DUPLICATE_STATEMENTS, duplicateStatement));
// Add non-duplicate ones
const auto NB_OF_UNIQUE_STATEMENTS = 3U;
for(auto i = 0U; i < NB_OF_UNIQUE_STATEMENTS; ++i) {
string statementKey = string(duplicateCommandString)
.append("-statement")
.append(to_string(i));
auto indirect = make_shared<IndirectStatement>(
statementKey,
Statements({createStatement<SimpleStatement>(SUCCESS)}));
commandWithDuplicates.add(indirect);
}
execution.add(commandWithDuplicates);
execution.write(gsl::not_null<YamlWriter*>(&yaml));
config.write(yaml);
WHEN("we call this command") {
auto iterator = execution.startIteration();
const ReturnCode returnCode =
execute({EXEC_HELPER_BINARY, commandWithDuplicates.get(),
"--settings-file", config.getFilename()},
config.getDirectory());
THEN("the call should succeed") { REQUIRE(returnCode == SUCCESS); }
THEN("the associated predefined statements should all be called "
"the expected number of times") {
REQUIRE(commandWithDuplicates.getNbOfStatements() ==
NB_OF_UNIQUE_STATEMENTS + NB_OF_DUPLICATE_STATEMENTS);
for(const auto& statement : commandWithDuplicates) {
auto expectedNumberOfExecutions = 1U;
if(statement->getKey() == duplicateStatementString) {
expectedNumberOfExecutions = NB_OF_DUPLICATE_STATEMENTS;
}
REQUIRE(statement->getNumberOfExecutions() ==
expectedNumberOfExecutions);
}
}
THEN("no other predefined commands should have been called") {
for(const auto& otherCommand : execution) {
if(commandWithDuplicates.get() != otherCommand.get()) {
for(const auto& statement : otherCommand) {
REQUIRE(statement->getNumberOfExecutions() == 0U);
}
}
}
}
}
}
}
/**
* Implements @ref scenario-execute-multiple-statements-predefined-order-failing-statements
*/
SCENARIO("Execute multiple statements: predefined order: failing statements",
"[execute-multiple-statements][scenario-execute-multiple-statements-"
"predefined-order-failing-statements]") {
GIVEN("A valid configuration with a command consisting of multiple, "
"predefined, valid, successful statements and a failing statement "
"that is not the first nor the last statement") {
const vector<string> otherCommands(
{"other-command1", "other-command2", "other-command3"});
ConfigBuilder execution;
ConfigFileWriter config;
YamlWriter yaml;
for(const auto& commandString : otherCommands) {
execution.add(TestCommand(
commandString, {createStatement<SimpleStatement>(SUCCESS)}));
}
const auto NB_OF_STATEMENTS = 5U;
const auto expectedReturnCode = EXIT_FAILURE;
ensures(
expectedReturnCode !=
SUCCESS); // Test invariant check: the expected return code should not be equal to the success return code
// Generate the index of the failing statement in a random yet predictable way
ensures(
NB_OF_STATEMENTS >=
3); // In order to have a failing command that is not the first nor the last statement, a statement size of at least 3 statements is required
mt19937 gen(0); // NOLINT(cert-msc32-c,cert-msc51-cpp)
uniform_int_distribution<> dis(
1,
NB_OF_STATEMENTS -
2U); // Find a random index that will fail, that is not the first nor the last one
auto failIndex = uint8_t(dis(gen));
// Define the failing command
const string commandString("failing-command");
TestCommand failingCommand(commandString);
for(uint8_t i = 0; i < NB_OF_STATEMENTS; ++i) {
if(i == failIndex) {
string statementKey = string(commandString)
.append("-statement")
.append(to_string(i));
auto indirect = make_shared<IndirectStatement>(
statementKey, Statements({createStatement<SimpleStatement>(
expectedReturnCode)}));
failingCommand.add(indirect);
} else {
string statementKey = string(commandString)
.append("-statement")
.append(to_string(i));
auto indirect = make_shared<IndirectStatement>(
statementKey,
Statements({createStatement<SimpleStatement>(SUCCESS)}));
failingCommand.add(indirect);
}
}
execution.add(failingCommand);
execution.write(gsl::not_null<YamlWriter*>(&yaml));
config.write(yaml);
WHEN("we call this command") {
auto iterator = execution.startIteration();
const ReturnCode returnCode =
execute({EXEC_HELPER_BINARY, failingCommand.get(),
"--settings-file", config.getFilename()},
config.getDirectory());
THEN("the call should fail") { REQUIRE(returnCode != SUCCESS); }
THEN("the call should exit with the same return code as the failed "
"statement") {
REQUIRE(returnCode == expectedReturnCode);
}
THEN("the statements configured before the failed statement and "
"the failed statements should have been executed") {
REQUIRE(failingCommand.getNumberOfStatementExecutions() ==
failIndex + 1U);
for(auto i = 0U; i <= failIndex; ++i) {
REQUIRE(failingCommand[i]->getNumberOfExecutions() == 1U);
}
for(size_t i = failIndex + 1UL; i < failingCommand.size();
++i) {
REQUIRE(failingCommand[i]->getNumberOfExecutions() == 0U);
}
}
THEN("no other predefined statements should have been called") {
for(const auto& otherCommand : execution) {
if(failingCommand.get() != otherCommand.get()) {
for(const auto& statement : otherCommand) {
REQUIRE(statement->getNumberOfExecutions() == 0U);
}
}
}
}
}
}
}
} // namespace execHelper::test::integration
<file_sep>#include <catch2/catch.hpp>
#define THEN_WHEN(x)
#define THEN_CHECK(x)
#define MAKE_COMBINATIONS(desc) \
uint32_t NUMBER_OF_COMBINATIONS = 0; \
const uint32_t CURRENT_COUNT = __COUNTER__; \
for(uint32_t make_combinations_index = 0U; \
(make_combinations_index < (1U << NUMBER_OF_COMBINATIONS) || \
make_combinations_index == 0U); \
++make_combinations_index)
#define COMBINATION \
if(make_combinations_index == 0) { \
++NUMBER_OF_COMBINATIONS; \
} \
if(make_combinations_index & \
(1U << (uint32_t(__COUNTER__) - CURRENT_COUNT - 1U)))
#define COMBINATIONS(desc) \
if(make_combinations_index == 0) { \
++NUMBER_OF_COMBINATIONS; \
} \
if(make_combinations_index & \
(1U << (uint32_t(__COUNTER__) - CURRENT_COUNT - 1U)))
#define COMBINATIONS_NO_BRANCH_TAKEN if(make_combinations_index <= 1U)
#define COMBINATIONS_ONLY_IF_BRANCH_TAKEN if(make_combinations_index > 1U)
<file_sep>#include "plugin.h"
#include "core/executorInterface.h"
#include "logger.h"
using execHelper::core::Task;
namespace {
void noExecuteCallback(const Task& /*task*/) noexcept {
LOG(warning) << "Execute callback is called while no execute callback was "
"registered";
}
auto getExecuteCallback() noexcept -> execHelper::plugins::ExecuteCallback& {
static execHelper::plugins::ExecuteCallback executeCallback(
noExecuteCallback);
return executeCallback;
}
} // namespace
namespace execHelper::plugins {
void registerExecuteCallback(const ExecuteCallback& callback) noexcept {
getExecuteCallback() = callback;
}
auto registerTask(const Task& task) noexcept -> bool {
getExecuteCallback()(task);
return true;
}
} // namespace execHelper::plugins
<file_sep>#ifndef PLUGINS_GENERATORS_INCLUDE
#define PLUGINS_GENERATORS_INCLUDE
#include <memory>
#include <string>
#include <vector>
#include <rapidcheck.h>
#include "plugins/executePlugin.h"
namespace rc {
template <>
struct Arbitrary<std::shared_ptr<const execHelper::plugins::Plugin>> {
static Gen<std::shared_ptr<const execHelper::plugins::Plugin>> arbitrary() {
const auto pluginNames =
execHelper::plugins::ExecutePlugin::getPluginNames();
return gen::map(
gen::elementOf(pluginNames), [](const std::string& name) {
return std::shared_ptr<const execHelper::plugins::Plugin>(
execHelper::plugins::ExecutePlugin::getPlugin(name));
});
};
};
} // namespace rc
#endif /* PLUGINS_GENERATORS_INCLUDE */
<file_sep>#include "config.h"
#include <string>
#include <string_view>
#include "yaml/yaml.h"
#include "commandLineOptions.h"
#include "logger.h"
#include "optionDescriptions.h"
#include "pattern.h"
#include "patternsHandler.h"
#include "variablesMap.h"
using std::optional;
using std::string;
using std::string_view;
using execHelper::config::Patterns;
using execHelper::config::PatternsHandler;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::config::VariablesMap;
using execHelper::yaml::Yaml;
namespace {
using namespace std::literals;
const string_view patternsKey = "patterns"sv;
auto processPatterns(const SettingsNode& settings) noexcept -> Patterns {
Patterns result;
if(settings.contains(string(patternsKey))) {
const SettingsNode& patternSettings = settings[string(patternsKey)];
for(const auto& patternKey : settings.get<SettingsValues>(
string(patternsKey), SettingsValues())) {
VariablesMap newPatternMap =
PatternsHandler::getDefaultPatternMap(patternKey);
newPatternMap.overwrite(patternSettings[patternKey]);
auto newPattern =
PatternsHandler::toPattern(patternKey, newPatternMap);
if(!newPattern) {
continue;
}
result.emplace_back(newPattern.value());
}
}
return result;
}
} // namespace
namespace execHelper::config {
auto parseSettingsFile(const Path& file) noexcept
-> optional<PatternSettingsPair> {
SettingsNode configuration("exec-helper");
Yaml yaml(file);
if(!yaml.getTree({}, &configuration)) {
LOG(error) << "Could not get settings tree";
return std::nullopt;
}
Patterns patterns = processPatterns(configuration);
configuration.clear(string(patternsKey));
return make_pair(patterns, configuration);
}
} // namespace execHelper::config
<file_sep>#ifndef __SETTINGS_NODE_H__
#define __SETTINGS_NODE_H__
#include <memory>
#include <optional>
#include <string>
#include <vector>
#include "cast.h"
namespace execHelper {
namespace config {
using SettingsKey = std::string; //!< The settings key type
using SettingsKeys = std::vector<SettingsKey>; //!< A SettingsKey collection
using SettingsValue = SettingsKey; //!< The settings value type
using SettingsValues =
std::vector<SettingsValue>; //!< A SettingsValue collection
/**
* \brief A class containing a configuration hierarchy
*/
class SettingsNode {
public:
/**
* Create a new node with the given key
*
* \param[in] key The key to associate with the root of the node
*/
explicit SettingsNode(SettingsKey key) noexcept;
/**
* Create a new node by making a deep copy of the given other node
*
* \param[in] other The other node to copy
*/
SettingsNode(const SettingsNode& other) noexcept;
/**
* Create a new node by moving the content of the given other node
*
* \param[in] other The other node to move from
*/
SettingsNode(SettingsNode&& other) noexcept;
~SettingsNode() noexcept;
/**
* Assign a deep copy of the content of the given node to this node
*
* \param[in] other The other node to copy from
* \returns A reference to this
*/
auto operator=(const SettingsNode& other) noexcept -> SettingsNode&;
/*! @copydoc operator=(const SettingsNode&)
*/
auto operator=(SettingsNode&& other) noexcept -> SettingsNode&;
/**
* Returns whether the given node equals this node
*
* \param[in] other The other node to compare with
* \returns True if the nodes are equal
* False otherwise
*/
auto operator==(const SettingsNode& other) const noexcept -> bool;
/**
*
* Returns whether the given node is not equal to this node
* \param[in] other The other node to compare with
* \returns ! #operator=="()"
*/
auto operator!=(const SettingsNode& other) const noexcept -> bool;
/**
* Get the node associated with the given key. If the key does not exist, it
* is created
*
* \param[in] key The key
* \return The node associated with the given key
*/
auto operator[](const SettingsKey& key) noexcept -> SettingsNode&;
/**
* Get the node associated with the given key
*
* \pre #contains() == true
* \warning This function is undefined if the precondition is not fulfilled
* \param[in] key The key
* \return The node associated with the given key
*/
auto operator[](const SettingsKey& key) const noexcept
-> const SettingsNode&;
/**
* Gets the key associated with the root of this node
*
* \returns The associated key
*/
[[nodiscard]] auto key() const noexcept -> const SettingsKey&;
/**
* Returns whether the given key exists as a direct child of this node
*
* \param[in] key The key to search for
* \returns True if the key is a value of this node
* False otherwise
*/
[[nodiscard]] auto contains(const SettingsKey& key) const noexcept -> bool;
/*! @copydoc contains(const SettingsKey&) const
*/
[[nodiscard]] auto contains(const SettingsKeys& key) const noexcept -> bool;
/**
* Get the direct values associated with the given key path
*
* \param[in] key A hierarchy key path
* \returns The associated values if the given hierarchy key path exists
* boost::none otherwise
*/
template <typename T>
[[nodiscard]] auto get(const SettingsKeys& key) const noexcept
-> std::optional<T> {
if(!contains(key)) {
return std::nullopt;
}
auto valuesOpt = at(key)->values();
if(!valuesOpt) {
return std::nullopt;
}
return detail::Cast<T, SettingsValues>::cast(valuesOpt.value());
}
/*! @copydoc get(const SettingsKeys&) const
*/
template <typename T>
[[nodiscard]] inline auto get(const SettingsKey& key) const noexcept
-> std::optional<T> {
return get<T>(SettingsKeys({key}));
}
/**
* Get the direct values associated with the given key path or the default
* value it does not exist
*
* \param[in] key A hierarchy key path
* \param[in] defaultValue The value to return if the key does not exist
* \returns The associated values if the given hierarchy key path exists
* boost::none otherwise
*/
template <typename T>
[[nodiscard]] auto get(const SettingsKeys& key,
const T& defaultValue) const noexcept -> T {
return get<T>(key).value_or(defaultValue);
}
/*! @copydoc get(const SettingsKeys&, const T& defaultValue) const
*/
template <typename T>
[[nodiscard]] inline auto get(const SettingsKey& key,
const T& defaultValue) const noexcept -> T {
return get<T>(SettingsKeys({key}), defaultValue);
}
/**
* Replace the current values of the given key with the given values
*
* \param[in] key A hierarchy key path
* \param[in] values The new values
* \returns Whether the values were successfully replaced. If false,
* there is no guarantee that the previous values are still present.
*/
template <typename T>
[[nodiscard]] inline auto replace(const SettingsKeys& key,
const T& values) noexcept -> bool {
if(contains(key)) {
clear(key);
}
return add(key, values);
}
/*! @copydoc replace(const SettingsKeys&, const T&)
*/
template <typename T>
[[nodiscard]] inline auto replace(const SettingsKey& key,
const T& values) noexcept -> bool {
return replace(SettingsKeys({key}), values);
}
/**
* Add the given value as a direct child of this root node
*
* \warning Passing duplicate values results in undefined behaviour
* \param[in] newValue The new value to add
* \returns True if the value was added successfully
* False otherwise
*/
[[nodiscard]] auto add(const SettingsValue& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsValue&)
*/
[[nodiscard]] auto add(const SettingsValues& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsValue&)
*/
[[nodiscard]] auto
add(const std::initializer_list<SettingsValue>& newValue) noexcept -> bool;
/**
* Add the given value in the given hierarchy key path. The hierarchy key
* path is created if it does not exist.
*
* \warning Passing duplicate values results in undefined behaviour
* \param[in] key A hierarchy key path
* \param[in] newValue The new value to add
* \returns True if the value was added successfully
* False otherwise
*/
[[nodiscard]] auto add(const SettingsKeys& key,
const SettingsValue& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKeys&, const SettingsValue&)
*/
[[nodiscard]] auto add(const SettingsKey& key,
const SettingsValue& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKeys&, const SettingsValue&)
*/
[[nodiscard]] auto
add(const SettingsKeys& key,
const std::initializer_list<SettingsValue>& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKeys&, const SettingsValue&)
*/
[[nodiscard]] auto
add(const std::initializer_list<SettingsKey>& key,
const std::initializer_list<SettingsValue>& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKeys&, const SettingsValue&)
*/
[[nodiscard]] auto add(const SettingsKeys& key,
const SettingsValues& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKey&, const SettingsValue&)
*/
[[nodiscard]] auto add(const SettingsKey& key,
const SettingsValues& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKey&, const SettingsValue&)
*/
[[nodiscard]] auto add(const std::initializer_list<SettingsKey>& key,
const SettingsValues& newValue) noexcept -> bool;
/*! @copydoc add(const SettingsKey&, const SettingsValue&)
*/
[[nodiscard]] auto add(const std::initializer_list<SettingsKey>& key,
const SettingsValue& newValue) noexcept -> bool;
/**
* Remove the direct child associated with the given key
*
* \param[in] key The key
* \returns True if the key was successfully removed. Note: if the key does
* not exist, it is considered to be removed successfully False otherwise
*/
auto clear(const SettingsKey& key) noexcept -> bool;
/**
* Remove the child at the end of the given hierarchy key path
*
* \param[in] keys The hierarchy key path
* \returns True if the key was successfully remove. Note: if the key does
* not exist, it is considered to be removed successfully False otherwise
*/
auto clear(const SettingsKeys& keys) noexcept -> bool;
/**
* Swap the other element with this one
*
* @param[in] other The element to swap with
*/
void swap(SettingsNode& other) noexcept;
/**
* Overwrite the elements in this settings node with the key/value pairs described in the given one.
* New keys will be added, existing keys will be overwritten with the new content.
*
* \param[in] newSettings The new settings to add
*/
void overwrite(const SettingsNode& newSettings) noexcept;
/**
* Get the values associated with the root of this node
*
* \returns The associated values if there are values associated with the root of this node
* boost::none otherwise
*/
[[nodiscard]] auto values() const noexcept -> std::optional<SettingsValues>;
private:
using SettingsNodeCollection =
std::vector<SettingsNode>; //!< A collection of nodes
/**
* Make a deep copy of the content of other to this node
*
* \param[in] other The other node to copy
*/
void deepCopy(const SettingsNode& other) noexcept;
/*! @copydoc operator[](const SettingsKey&) const
*/
auto at(const SettingsKey& key) noexcept -> SettingsNode*;
/*! @copydoc operator[](const SettingsKey&) const
*/
auto at(const SettingsKey& key) const noexcept -> const SettingsNode*;
/*! @copydoc operator[](const SettingsKey&) const
*/
auto at(const SettingsKeys& key) noexcept -> SettingsNode*;
/*! @copydoc operator[](const SettingsKey&) const
*/
auto at(const SettingsKeys& key) const noexcept -> const SettingsNode*;
SettingsKey m_key; //!< The root key associated with this node
std::unique_ptr<SettingsNodeCollection>
m_values; //!< The value hierarchy associated with this node
};
/**
* Streaming operator for settings nodes
*
* \param[in] os The stream to stream to
* \param[in] settings The settings to add to the stream
* \returns os
*/
auto operator<<(std::ostream& os, const SettingsNode& settings) noexcept
-> std::ostream&;
} // namespace config
} // namespace execHelper
#endif /* __SETTINGS_NODE_H__ */
<file_sep>.. describe:: enviroment
A list of environment variables that should be set before the commands are executed. See :ref:`exec-helper-config-environment` (5).
<file_sep>#ifndef __PERMUTATION_ITERATOR_H__
#define __PERMUTATION_ITERATOR_H__
#include <tuple>
#include <log/assertions.h>
#include "createObject.h"
namespace execHelper {
namespace core {
/**
* \brief Iterates over permutations of the given collection
*
* Currently the PermutationIterator implements std::forward_iterator_tag
* interface
*/
template <typename IteratorType, typename CollectionType1,
typename... CollectionTypes>
class PermutationIterator {
private:
using value_type = IteratorType;
using pointer = IteratorType*;
using reference = IteratorType&;
using iterator =
PermutationIterator<IteratorType, CollectionType1, CollectionTypes...>;
using Tuple = std::tuple<typename CollectionType1::value_type,
typename CollectionTypes::value_type...>;
const typename CollectionType1::const_iterator m_outerBeginIterator;
typename CollectionType1::const_iterator m_outerIterator;
const typename CollectionType1::const_iterator m_outerEndIterator;
PermutationIterator<IteratorType, CollectionTypes...> m_innerIterator;
public:
/**
* Constructor
*
* \param[in] c1BeginIterator The begin iterator for the first collection
* \param[in] otherBeginIterators The begin iterators for additional
* collections \param[in] c1EndIterator The end iterator for the first
* collection \param[in] otherEndIterators The end iterators for additional
* collections
*/
PermutationIterator(
const typename CollectionType1::const_iterator& c1BeginIterator,
const typename CollectionTypes::const_iterator&... otherBeginIterators,
const typename CollectionType1::const_iterator& c1EndIterator,
const typename CollectionTypes::
const_iterator&... otherEndIterators) noexcept
: m_outerBeginIterator(c1BeginIterator),
m_outerIterator(c1BeginIterator),
m_outerEndIterator(c1EndIterator),
m_innerIterator(otherBeginIterators..., otherEndIterators...) {
if(hasEmptyCollection()) {
m_outerIterator = m_outerEndIterator;
}
}
/**
* Increment operator
*
* \returns An iterator pointing to the next object
*/
iterator& operator++() noexcept {
// The iterator is at its end if the m_outerIterator is at its own end
if(m_outerIterator != m_outerEndIterator) {
if(!m_innerIterator.atEnd()) {
++m_innerIterator;
}
if(m_innerIterator.atEnd()) {
m_innerIterator.restart();
++m_outerIterator;
}
}
return *this;
}
/**
* Returns whether any of the given collections is empty
*
* \returns True If any of the given collections is empty
* False Otherwise
*/
bool hasEmptyCollection() const noexcept {
return m_outerBeginIterator == m_outerEndIterator ||
m_innerIterator.hasEmptyCollection();
}
/*! @copydoc config::Argv::operator==(const Argv&) const
*/
bool operator==(const iterator& other) const {
if(atEnd()) {
// If the outer iterator is at the end, we do not care about the
// state of the inner iterators
return m_outerIterator == other.m_outerIterator;
}
return (m_innerIterator == other.m_innerIterator &&
m_outerIterator == other.m_outerIterator);
}
/*! @copydoc config::Argv::operator!=(const Argv&) const
*/
bool operator!=(const iterator& other) const { return !(*this == other); }
/**
* Convert the current iterator position to a tuple of the actual objects
* they are pointing to
*
* \returns The actual objects the current iterator position is pointing to
*/
Tuple getElementValue() const {
expectsMessage(m_outerIterator != m_outerEndIterator,
"Do not dereference end iterator");
return std::tuple_cat(std::make_tuple(*m_outerIterator),
m_innerIterator.getElementValue());
}
/**
* Dereference operator
*
* \returns The object the current iterator is pointing to
*/
value_type operator*() const {
expectsMessage(m_outerIterator != m_outerEndIterator,
"Do not dereference outer end iterator");
expectsMessage(!m_innerIterator.atEnd(),
"Do not dereference inner end iterator");
using ttype = typename std::decay<Tuple>::type;
return detail::createObject<
IteratorType, Tuple, 0 == std::tuple_size<ttype>::value,
std::tuple_size<ttype>::value>::getObject(getElementValue());
}
/**
* Returns whether the current iterator is at the end
*
* \returns True If the current iterator is at the end
* False Otherwise
*/
bool atEnd() const { return m_outerIterator == m_outerEndIterator; }
/**
* Restart the iterator
*/
void restart() {
m_outerIterator = m_outerBeginIterator;
m_innerIterator.restart();
}
};
/*!@copydoc PermutationIterator
*/
template <typename IteratorType, typename CollectionType>
class PermutationIterator<IteratorType, CollectionType> {
private:
using value_type = IteratorType;
using pointer = IteratorType*;
using reference = IteratorType&;
using iterator = PermutationIterator<IteratorType, CollectionType>;
typename CollectionType::const_iterator m_collectionIterator;
const typename CollectionType::const_iterator m_collectionBeginIterator;
const typename CollectionType::const_iterator m_collectionEndIterator;
public:
/**
* Constructor
*
* \param[in] beginIterator The begin iterator for the collection
* \param[in] endIterator The end iterator for the collection
*/
PermutationIterator(
const typename CollectionType::const_iterator& beginIterator,
const typename CollectionType::const_iterator& endIterator) noexcept
: m_collectionIterator(beginIterator),
m_collectionBeginIterator(beginIterator),
m_collectionEndIterator(endIterator) {
;
}
/*! @copydoc PermutationIterator::operator++()
*/
iterator& operator++() noexcept {
// The iterator is at its end if the m_outerIterator is at its own end
if(!atEnd()) {
++m_collectionIterator;
}
return *this;
}
/*! @copydoc PermutationIterator::atEnd()
*/
bool atEnd() const {
return m_collectionIterator == m_collectionEndIterator;
}
/*! @copydoc PermutationIterator::hasEmptyCollection()
*/
bool hasEmptyCollection() const noexcept {
return m_collectionBeginIterator == m_collectionEndIterator;
}
/*! @copydoc PermutationIterator::operator==()
*/
bool operator==(const iterator& other) const {
return m_collectionIterator == other.m_collectionIterator;
}
/*! @copydoc PermutationIterator::operator!=()
*/
bool operator!=(const iterator& other) const { return !(*this == other); }
/*! @copydoc PermutationIterator::operator*()
*/
value_type operator*() const {
return IteratorType(std::get<CollectionType::value>(getElementValue()));
}
/*! @copydoc PermutationIterator::getElementValue()
*/
std::tuple<typename CollectionType::value_type> getElementValue() const {
expectsMessage(m_collectionIterator != m_collectionEndIterator,
"Do not dereference end iterator");
return std::make_tuple(*m_collectionIterator);
}
/*! @copydoc PermutationIterator::restart()
*/
void restart() { m_collectionIterator = m_collectionBeginIterator; }
};
} // namespace core
} // namespace execHelper
#endif /* __PERMUTATION_ITERATOR_H__ */
<file_sep>#include <map>
#include <memory>
#include <vector>
#include <gsl/string_span>
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "log/assertions.h"
#include "plugins/commandLineCommand.h"
#include "plugins/executePlugin.h"
#include "plugins/memory.h"
#include "plugins/valgrind.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
using std::map;
using std::move;
using std::shared_ptr;
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::COMMAND_KEY;
using execHelper::config::CommandCollection;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::CommandLineCommand;
using execHelper::plugins::ExecutePlugin;
using execHelper::plugins::Memory;
using execHelper::plugins::MemoryHandler;
using execHelper::plugins::Valgrind;
using execHelper::test::FleetingOptionsStub;
namespace {
const czstring<> PLUGIN_NAME = "execute-plugin";
const czstring<> MEMORY_KEY = "memory";
const czstring<> PATTERN_KEY = "patterns";
template <typename T> auto checkGetPlugin(const string& pluginName) -> bool {
auto plugin = ExecutePlugin::getPlugin(pluginName);
auto* derived = dynamic_cast<T*>(
plugin.get()); // derived will be a nullptr if the cast fails
return (derived != nullptr);
}
class Expected {
public:
explicit Expected(Command directCommand, Task task = Task(),
VariablesMap variablesMap = VariablesMap(MEMORY_KEY),
Patterns patterns = Patterns())
: m_directCommand(move(directCommand)),
m_task(move(task)),
m_variablesMap(move(variablesMap)),
m_patterns(move(patterns)) {
;
}
[[nodiscard]] inline auto getCommand() const noexcept -> const Command& {
return m_directCommand;
}
[[nodiscard]] inline auto getTask() const noexcept -> const Task& {
return m_task;
}
[[nodiscard]] inline auto getVariables() const noexcept
-> const VariablesMap& {
return m_variablesMap;
}
inline void setVariables(const VariablesMap& variables) noexcept {
m_variablesMap = variables;
}
[[nodiscard]] inline auto getPatterns() const noexcept -> const Patterns& {
return m_patterns;
}
inline void addPattern(const Pattern& pattern) noexcept {
m_patterns.push_back(pattern);
}
inline void setPatterns(const Patterns& patterns) noexcept {
m_patterns = patterns;
}
private:
Command m_directCommand;
Task m_task;
VariablesMap m_variablesMap;
Patterns m_patterns;
};
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Obtaining the default variables map of the execute-plugin",
"[clang-tidy]") {
GIVEN("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
ExecutePlugin plugin({});
VariablesMap actualVariables(PLUGIN_NAME);
WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO("Testing the default execute settings", "[execute-plugin]") {
GIVEN("A selector plugin object and the default options") {
ExecutePlugin plugin({});
Task task;
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode("test"));
ExecutePlugin::push(Patterns());
WHEN("We apply the selector plugin") {
bool success =
plugin.apply(task, VariablesMap(PLUGIN_NAME), Patterns());
THEN("It should succeed") { REQUIRE(success); }
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
}
}
SCENARIO("Test the settings node to variables map mapping",
"[execute-plugin]") {
MAKE_COMBINATIONS("Of settings node configurations") {
SettingsNode settings("test-execute-plugin");
FleetingOptionsStub fleetingOptions;
MemoryHandler memory;
CommandCollection commands;
map<Command, vector<Expected>> expected;
Patterns configuredPatterns;
COMBINATIONS("Add a plugin command directly") {
const Command command = MEMORY_KEY;
commands.push_back(command);
REQUIRE(settings.add(COMMAND_KEY, command));
expected.emplace(command, vector<Expected>({Expected(command)}));
}
COMBINATIONS("Add a command") {
const Command command("a-command");
commands.push_back(command);
REQUIRE(settings.add(COMMAND_KEY, command));
REQUIRE(settings.add(command, MEMORY_KEY));
expected.emplace(command, vector<Expected>({Expected(command)}));
}
COMBINATIONS("Add multiple commands") {
commands = {"multiple-command1", "multiple-command2"};
REQUIRE(settings.replace(COMMAND_KEY, commands));
for(const auto& command : commands) {
expected.emplace(command,
vector<Expected>({Expected(command)}));
REQUIRE(settings.add(command, MEMORY_KEY));
}
}
COMBINATIONS("Add an indirect command") {
const string command("indirect-command");
commands.push_back(command);
const vector<string> directCommands(
{"direct-command1", "direct-command2"});
REQUIRE(settings.add(COMMAND_KEY, command));
REQUIRE(settings.add(command, directCommands));
for(const auto& directCommand : directCommands) {
REQUIRE(settings.add(directCommand, MEMORY_KEY));
expected.emplace(command,
vector<Expected>({Expected(directCommand)}));
}
}
COMBINATIONS("Add some root settings") {
for(const auto& command : commands) {
ensures(expected.count(command) > 0U);
for(auto& expectedTask : expected.at(command)) {
const auto& directCommand = expectedTask.getCommand();
VariablesMap expectedVariableMap(MEMORY_KEY);
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-root-setting1"),
"root-setting-value1"));
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-root-setting2"),
"root-setting-value2"));
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-root-setting3"),
"root-setting-value3"));
for(const auto& key :
expectedVariableMap.get<SettingsValues>(
SettingsKeys(), SettingsValues())) {
settings[MEMORY_KEY][key] = expectedVariableMap[key];
}
expectedTask.setVariables(expectedVariableMap);
}
}
}
COMBINATIONS("Add some command specific settings") {
for(const auto& command : commands) {
ensures(expected.count(command) > 0U);
for(auto& expectedTask : expected.at(command)) {
const auto& directCommand = expectedTask.getCommand();
VariablesMap expectedVariableMap(MEMORY_KEY);
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-specific-setting1"),
"specific-setting-value1"));
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-specific-setting2"),
"specific-setting-value2"));
REQUIRE(expectedVariableMap.add(
string(directCommand).append("-specific-setting3"),
"specific-setting-value3"));
for(const auto& key :
expectedVariableMap.get<SettingsValues>(
SettingsKeys(), SettingsValues())) {
settings[MEMORY_KEY][directCommand][key] =
expectedVariableMap[key];
}
expectedTask.setVariables(expectedVariableMap);
}
}
}
COMBINATIONS("Add generic patterns") {
if(!commands.empty()) {
const Pattern pattern1("PATTERN1", {"value1a", "value1b"});
const Pattern pattern2("PATTERN2", {"value2a", "value2b"});
const Patterns patterns({pattern1, pattern2});
for(const auto& pattern : patterns) {
configuredPatterns.push_back(pattern);
REQUIRE(settings.add({MEMORY_KEY, PATTERN_KEY},
pattern.getKey()));
for(const auto& command : commands) {
ensures(expected.count(command) > 0U);
for(auto& expectedCommand : expected.at(command)) {
expectedCommand.addPattern(pattern);
}
}
}
}
}
COMBINATIONS("Add a specific pattern") {
if(!commands.empty()) {
const Pattern patternA("PATTERNa", {"valueaa", "valueab"});
const Pattern patternB("PATTERNb", {"valueba", "valuebb"});
const Patterns patterns({patternA, patternB});
const Command& command = commands.front();
ensures(expected.count(command) > 0U);
for(auto& expectedCommand : expected.at(command)) {
expectedCommand.setPatterns({});
for(const auto& pattern : patterns) {
configuredPatterns.push_back(pattern);
REQUIRE(settings.add({MEMORY_KEY,
expectedCommand.getCommand(),
PATTERN_KEY},
pattern.getKey()));
expectedCommand.addPattern(pattern);
}
}
}
}
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(move(settings));
ExecutePlugin::push(move(configuredPatterns));
ExecutePlugin plugin(commands);
THEN_WHEN("We apply the execute plugin") {
Task task;
bool returnCode =
plugin.apply(task, VariablesMap("random-thing"), Patterns());
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
const Memory::Memories& memories =
MemoryHandler::getExecutions();
auto memory = memories.begin();
for(const auto& command : commands) {
REQUIRE(expected.count(command) > 0U);
for(const auto& expectedCommand : expected.at(command)) {
REQUIRE(memory != memories.end());
REQUIRE(memory->task == expectedCommand.getTask());
REQUIRE(memory->patterns ==
expectedCommand.getPatterns());
for(const auto& key :
expectedCommand.getVariables().get<SettingsValues>(
SettingsKeys(), SettingsValues())) {
REQUIRE(memory->variables.contains(key));
REQUIRE(memory->variables[key] ==
expectedCommand.getVariables()[key]);
}
++memory;
}
}
}
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
}
}
SCENARIO("Test problematic cases", "[execute-plugin]") {
GIVEN("A plugin with a non-existing plugin to execute") {
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode("test"));
ExecutePlugin::push(Patterns());
ExecutePlugin plugin({"non-existing-plugin"});
WHEN("We execute the plugin") {
Task task;
bool returnCode =
plugin.apply(task, VariablesMap("test"), Patterns());
THEN("It should not succeed") { REQUIRE_FALSE(returnCode); }
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
}
GIVEN("A plugin that fails to execute") {
const Command command("command");
FleetingOptionsStub fleetingOptions;
fleetingOptions.m_commands = {command};
SettingsNode settings("test");
REQUIRE(settings.add(COMMAND_KEY, command));
REQUIRE(settings.add(command, MEMORY_KEY));
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(move(settings));
ExecutePlugin::push(Patterns());
ExecutePlugin plugin({"memory", "memory"});
MemoryHandler memory;
MemoryHandler::setReturnCode(false);
WHEN("We execute the plugin") {
Task task;
bool returnCode =
plugin.apply(task, VariablesMap("test"), Patterns());
THEN("It should not succeed") { REQUIRE_FALSE(returnCode); }
THEN("It should have stopped executing after the failure") {
REQUIRE(memory.getExecutions().size() == 1U);
}
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
}
}
// Note: this test requires RTTI support
SCENARIO("Testing the plugin getter", "[execute-plugin]") {
GIVEN("Nothing in particular") {
WHEN("We request the respective plugin object") {
THEN("We should get the appropriate ones") {
REQUIRE(checkGetPlugin<const CommandLineCommand>(
"command-line-command"));
REQUIRE(checkGetPlugin<const Memory>("memory"));
REQUIRE(checkGetPlugin<const Valgrind>("valgrind"));
}
}
WHEN("We try to get a non-existing plugin") {
THEN("We should not get anything") {
REQUIRE_THROWS_AS(
ExecutePlugin::getPlugin("non-existing-plugin"),
plugins::InvalidPlugin);
}
}
}
}
} // namespace execHelper::plugins::test
<file_sep>set(MODULE_NAME yaml)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/yamlWrapper.cpp
src/yaml.cpp
src/logger.cpp
)
set(DEPENDENCIES
yaml-3rd
log
config
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/yaml)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>#ifndef CONFIG_GENERATORS_INCLUDE
#define CONFIG_GENERATORS_INCLUDE
#include <array>
#include <map>
#include <thread>
#include "config/commandLineOptions.h"
#include "config/path.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "utils/testValue.h"
// NOTE: Must be in rc namespace!
namespace rc {
struct BoolValue : public execHelper::test::utils::TestValue<bool> {
explicit BoolValue(bool option) noexcept
: execHelper::test::utils::TestValue<bool>(option) {}
[[nodiscard]] auto config() const noexcept -> std::string final {
return m_value ? "yes" : "no";
}
};
struct JobsValue
: public execHelper::test::utils::TestValue<execHelper::config::Jobs_t> {
explicit JobsValue(execHelper::config::Jobs_t option) noexcept
: execHelper::test::utils::TestValue<execHelper::config::Jobs_t>(
option) {}
[[nodiscard]] auto config() const noexcept -> std::string override {
if(m_value == 0) {
return "auto";
}
return std::to_string(m_value);
}
[[nodiscard]] auto
operator==(const execHelper::config::Jobs_t& other) const noexcept
-> bool final {
if(m_value == 0) {
return other == std::thread::hardware_concurrency();
}
return m_value == other;
}
};
struct HelpValue : public BoolValue {
explicit HelpValue(execHelper::config::HelpOption_t option) noexcept
: BoolValue(option) {}
};
struct VersionValue : public BoolValue {
explicit VersionValue(bool option) noexcept : BoolValue(option) {}
};
struct VerbosityValue : public BoolValue {
explicit VerbosityValue(bool option) noexcept : BoolValue(option) {}
};
struct DryRunValue : public BoolValue {
explicit DryRunValue(bool option) noexcept : BoolValue(option) {}
};
struct KeepGoingValue : public BoolValue {
explicit KeepGoingValue(bool option) noexcept : BoolValue(option) {}
};
struct ListPluginsValue : public BoolValue {
explicit ListPluginsValue(bool option) noexcept : BoolValue(option) {}
};
using AppendSearchPathValue = execHelper::config::Paths;
template <> struct Arbitrary<JobsValue> {
static auto arbitrary() {
return gen::construct<JobsValue>(gen::arbitrary<JobsValue::Value>());
};
};
template <> struct Arbitrary<HelpValue> {
static auto arbitrary() {
return gen::construct<HelpValue>(gen::arbitrary<HelpValue::Value>());
};
};
template <> struct Arbitrary<VersionValue> {
static auto arbitrary() {
return gen::construct<VersionValue>(
gen::arbitrary<VersionValue::Value>());
};
};
template <> struct Arbitrary<VerbosityValue> {
static auto arbitrary() {
return gen::construct<VerbosityValue>(
gen::arbitrary<VerbosityValue::Value>());
};
};
template <> struct Arbitrary<DryRunValue> {
static auto arbitrary() {
return gen::construct<DryRunValue>(
gen::arbitrary<DryRunValue::Value>());
};
};
template <> struct Arbitrary<KeepGoingValue> {
static auto arbitrary() {
return gen::construct<KeepGoingValue>(
gen::arbitrary<KeepGoingValue::Value>());
};
};
template <> struct Arbitrary<ListPluginsValue> {
static auto arbitrary() {
return gen::construct<ListPluginsValue>(
gen::arbitrary<ListPluginsValue::Value>());
};
};
// TODO: further extend this. Currently generates settings nodes of exactly 2 levels deep + root level
template <> struct Arbitrary<execHelper::config::SettingsNode> {
static auto arbitrary() {
const auto levelSize = *gen::inRange(0U, 1000U);
return gen::apply(
[](const std::string& root,
const std::map<std::string, std::vector<std::string>>& content) {
execHelper::config::SettingsNode settings(root);
std::for_each(content.begin(), content.end(),
[&settings](const auto& entry) {
if(!settings.add(std::move(entry.first),
std::move(entry.second))) {
std::cerr << "Failed to add key"
<< std::endl;
}
});
return settings;
},
gen::arbitrary<std::string>(),
gen::container<std::map<std::string, std::vector<std::string>>>(
levelSize, gen::arbitrary<std::string>(),
gen::arbitrary<std::vector<std::string>>()));
};
};
template <> struct Arbitrary<execHelper::config::Pattern> {
static auto arbitrary() {
auto key = gen::nonEmpty(gen::string<std::string>());
auto values = gen::container<std::vector<std::string>>(
gen::nonEmpty(gen::string<std::string>()));
//auto values = gen::just<std::vector<std::string>>({"Blaat1", "Blaat2"});
return gen::construct<execHelper::config::Pattern>(key, values);
};
};
} // namespace rc
#endif /* CONFIG_GENERATORS_INCLUDE */
<file_sep>.. _feature-documentation:
Feature documentation
*********************
.. toctree::
:maxdepth: 1
:glob:
:caption: The following features are currently documented:
**/*
Test reports
============
The `Feature test report <https://bverhagen.gitlab.io/exec-helper/integration/report.html>`_ shows the detailed results of the feature scenario's.
The `Unit test coverage report <https://bverhagen.gitlab.io/exec-helper/coverage/index.html>`_ shows the detailed coverage of the unit tests.
<file_sep>#ifndef LOGGER_INCLUDE
#define LOGGER_INCLUDE
#include "log/log.h"
BOOST_LOG_GLOBAL_LOGGER(exec_helper_commander_logger,
execHelper::log::LoggerType);
static const std::string LOG_CHANNEL = "commander";
#define LOG(x) \
BOOST_LOG_STREAM_CHANNEL_SEV(exec_helper_commander_logger::get(), \
LOG_CHANNEL, execHelper::log::x) \
<< boost::log::add_value(fileLog, __FILE__) \
<< boost::log::add_value(lineLog, __LINE__)
#endif /* LOGGER_INCLUDE */
<file_sep>#ifndef VERBOSITY_INCLUDE
#define VERBOSITY_INCLUDE
#include <iostream>
#include <gsl/string_span>
#include "config/fleetingOptionsInterface.h"
#include "config/variablesMap.h"
#include "core/task.h"
namespace execHelper {
namespace plugins {
using Verbosity = bool;
const gsl::czstring<> VERBOSITY_KEY = "verbose";
/**
* \brief Extends the functionality to include the _verbose_ config parameter and processes this parameter, using the --verbose flag
*/
struct VerbosityLong {
/*! @copydoc JobsLong::getVariables(config::VariablesMap&, const config::FleetingOptionsInterface&)
*/
static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept;
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
if(*(variables.get<Verbosity>(VERBOSITY_KEY))) {
task.append("--verbose");
}
}
};
/**
* \brief Extends the functionality to include the _verbose_ config parameter and processes this parameter, using the --debug flag
*/
struct VerbosityDebug {
/*! @copydoc JobsLong::getVariables(config::VariablesMap&, const config::FleetingOptionsInterface&)
*/
inline static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept {
VerbosityLong::getVariables(variables, options);
}
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
if(*(variables.get<Verbosity>(VERBOSITY_KEY))) {
task.append("--debug");
}
}
};
/**
* \brief Extends the functionality to include the _verbose_ config parameter and processes this parameter, using the -v flag
*/
struct VerbosityShort {
/*! @copydoc JobsLong::getVariables(config::VariablesMap&, const config::FleetingOptionsInterface&)
*/
inline static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept {
VerbosityLong::getVariables(variables, options);
}
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
if(*(variables.get<Verbosity>(VERBOSITY_KEY))) {
task.append("-v");
}
}
};
} // namespace plugins
} // namespace execHelper
#endif /* VERBOSITY_INCLUDE */
<file_sep>#ifndef __PATTERNS_H__
#define __PATTERNS_H__
#include <string>
#include "config/pattern.h"
#include "mapPermutator.h"
namespace execHelper {
namespace core {
/**
* Replaces all occurences of the given pattern in the subject with replacement
*
* \param[in] subject The subject
* \param[in] pattern The pattern
* \param[in] replacement The replacement
* \returns The replaced string
*/
std::string replacePatterns(const std::string& subject,
const std::string& pattern,
const std::string& replacement) noexcept;
} // namespace core
} // namespace execHelper
#endif /* __PATTERNS_H__ */
<file_sep>#include "configBuilder.h"
using std::move;
using gsl::not_null;
namespace execHelper {
namespace test {
namespace baseUtils {
ConfigBuilder::ConfigBuilder() {
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&m_ioService));
}
Commands::const_iterator ConfigBuilder::begin() const noexcept {
return m_commands.begin();
}
Commands::const_iterator ConfigBuilder::end() const noexcept {
return m_commands.end();
}
void ConfigBuilder::add(const TestCommand& command) noexcept {
m_commands.push_back(command);
}
void ConfigBuilder::add(TestCommand&& command) noexcept {
m_commands.emplace_back(move(command));
}
void ConfigBuilder::write(not_null<YamlWriter*> yaml) noexcept {
for(auto& command : m_commands) {
command.write(yaml);
}
}
ConfigBuilder::ConfigBuilderRAII ConfigBuilder::startIteration() noexcept {
m_ioService.run();
return ConfigBuilderRAII(m_commands);
}
ConfigBuilder::ConfigBuilderRAII::ConfigBuilderRAII(Commands& commands) noexcept
: m_commands(commands) {
;
}
ConfigBuilder::ConfigBuilderRAII::~ConfigBuilderRAII() {
for(auto& command : m_commands) {
command.resetExecutions();
}
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>FROM bverhagen/awesome-aur-wrapper:latest
LABEL maintainer="<EMAIL>"
RUN sudo pacman -Sy --needed --noconfirm archlinux-keyring && sudo pacman -Scc --noconfirm # Fixes some pacman keyring issues
RUN sudo pacman -Sy --needed --noconfirm boost boost-libs yaml-cpp gcc clang clang-tools-extra cmake scons make libffi ninja doxygen python-pip pkg-config valgrind cppcheck java-environment unzip curl catch2 llvm fakeroot patch file python-sphinx python-sphinx_rtd_theme && sudo pacman -Scc --noconfirm && sudo rm -rf /var/lib/pacman/sync/*
RUN yay -Sy --needed --noconfirm --noprovides rapidcheck microsoft-gsl-git lcov-git pmd && sudo pacman -Scc --noconfirm && sudo rm -rf /var/lib/pacman/sync/*
RUN sudo pip install pytest-bdd pyyaml pytest-html
<file_sep>#include "posixShell.h"
#include <vector>
#ifdef _WIN32
#include <boost/algorithm/string.hpp>
#else
#include <glob.h>
#include <wordexp.h>
#endif
#include <boost/filesystem.hpp>
#include <boost/process.hpp>
#include <boost/process/start_dir.hpp>
#include <gsl/span>
#include "config/envp.h"
#include "log/assertions.h"
#include "logger.h"
#include "task.h"
#ifdef _WIN32
using boost::replace_all;
#endif
using std::string;
using gsl::span;
using gsl::zstring;
using execHelper::config::Envp;
namespace process = boost::process;
namespace this_process = boost::this_process;
namespace filesystem = boost::filesystem;
namespace {
const execHelper::core::PosixShell::ShellReturnCode POSIX_SUCCESS = 0U;
/**
* This construction constructs the PATH from its parents' path and the various inputs.
* \note It is guaranteed that the given working directory will be the first entry in the returned path
*
* \param[in] env The environment to consider for constructing the path
* \param[in] workingDir The working directory from where the path will operate
* \returns The constructed PATH for the child. The first entry will be the given working directory
*/
inline auto getPath(const process::environment& env,
const filesystem::path& workingDir) noexcept
-> std::vector<filesystem::path> {
std::vector<filesystem::path> path({filesystem::absolute(workingDir)});
if(env.count("PATH") == 0) {
auto parent_path = this_process::path();
path.insert(path.end(), parent_path.begin(), parent_path.end());
return path;
}
std::vector<std::string> stringPaths = env.at("PATH").to_vector();
path.reserve(stringPaths.size() + stringPaths.size());
std::copy(stringPaths.begin(), stringPaths.end(), std::back_inserter(path));
return path;
}
} // namespace
namespace execHelper::core {
auto PosixShell::execute(const Task& task) -> PosixShell::ShellReturnCode {
if(task.getTask().empty()) {
return POSIX_SUCCESS;
}
process::environment env = this_process::
environment(); // Explicitly copy the environment so that only the child environment is modified
for(const auto& envPair : task.getEnvironment()) {
env[envPair.first] = envPair.second;
}
TaskCollection args = shellExpand(task);
filesystem::path binary = args.front();
if(binary.is_relative()) {
binary = process::search_path(
args.front(),
getPath(
env,
filesystem::path(
task.getWorkingDirectory()))); // getPath guarantees that the given working directory is part of the PATH it returns, so there is no need to explicitly look for the binary in the considered working directory first.
binary = filesystem::absolute(binary);
}
if(!filesystem::exists(binary)) {
throw PathNotFoundError(std::string("Could not find binary '")
.append(args.front())
.append("' on this system"));
}
args.erase(args.begin());
return system(
binary, process::args = args,
process::start_dir = filesystem::path(task.getWorkingDirectory()), env);
}
auto PosixShell::isExecutedSuccessfully(
ShellReturnCode returnCode) const noexcept -> bool {
return returnCode == POSIX_SUCCESS;
}
inline auto PosixShell::shellExpand(const Task& task) noexcept
-> TaskCollection {
return wordExpand(task);
}
inline auto PosixShell::wordExpand(const Task& task) noexcept
-> TaskCollection {
#ifdef _WIN32
TaskCollection result;
auto environment = task.getEnvironment();
// Windows has some 'special' environment variables
environment["cd"] = task.getWorkingDirectory().string();
environment["CD"] = task.getWorkingDirectory().string();
for(auto arg : task.getTask()) {
for(const auto& env : environment) {
auto pattern = std::string("%").append(env.first).append("%");
replace_all(arg, pattern, env.second);
}
result.emplace_back(arg);
}
return result;
#else
// Cache envp pointer
char** cached_environ = environ;
// Set global environment, since this is where wordexp will look for substitutions
Envp envp = Envp(task.getEnvironment());
environ = envp.getEnvp();
TaskCollection result;
for(const auto& taskItem : task.getTask()) {
wordexp_t p{};
size_t returnCode =
wordexp(taskItem.c_str(), &p, WRDE_SHOWERR | WRDE_UNDEF);
if(returnCode == 0) {
span<zstring<>> w(p.we_wordv, p.we_wordc);
std::copy(w.begin(), w.end(), std::back_inserter(result));
} else {
switch(returnCode) {
case WRDE_BADVAL:
user_feedback_error("Command contains undefined variable");
break;
case WRDE_SYNTAX:
user_feedback_error("Syntax error in shell argument");
break;
case WRDE_BADCHAR:
user_feedback_error("Bad character: Illegal occurrence of "
"newline or one of |, &, ;, <, >, (, ), {, "
"}.");
break;
default:
user_feedback_error(
"An error occured during executing: " << returnCode);
break;
}
result.push_back(taskItem);
}
wordfree(&p);
}
environ = cached_environ;
return result;
#endif
}
} // namespace execHelper::core
<file_sep>#ifndef ADD_TO_TASK_INCLUDE
#define ADD_TO_TASK_INCLUDE
#include <functional>
#include <initializer_list>
#include <optional>
#include <string>
#include <utility>
#include <vector>
#include <gsl/pointers>
#include "base-utils/nonEmptyString.h"
#include "core/task.h"
namespace execHelper::test {
using AddToTaskFunction = std::function<core::TaskCollection(std::string)>;
inline void addToTask(const std::string& value, gsl::not_null<core::Task*> task,
AddToTaskFunction func) {
task->append(func(value));
}
inline void addToTask(const NonEmptyString& value,
gsl::not_null<core::Task*> task, AddToTaskFunction func) {
task->append(func(*value));
}
inline void addToTask(bool value, gsl::not_null<core::Task*> task,
AddToTaskFunction func) {
if(value) {
task->append(func("true"));
} else {
task->append(func("false"));
}
}
inline void addToTask(const std::vector<std::string>& value,
gsl::not_null<core::Task*> task, AddToTaskFunction func) {
std::for_each(
value.begin(), value.end(),
[&task, func](const auto& element) { task->append(func(element)); });
}
inline void addToTask(const std::pair<std::string, std::string>& value,
gsl::not_null<core::Task*> task, AddToTaskFunction func) {
task->append(func(value.first + "=" + value.second));
}
template <typename T>
inline void addToTask(const std::optional<T>& value,
gsl::not_null<core::Task*> task, AddToTaskFunction func) {
if(value) {
addToTask(*value, task, func);
}
}
template <typename T>
inline void addToTask(const std::optional<T>& value,
gsl::not_null<core::Task*> task, AddToTaskFunction func,
const T& defaultValue) {
if(value) {
addToTask(*value, task, func);
} else {
addToTask(defaultValue, task, func);
}
}
} // namespace execHelper::test
#endif /* ADD_TO_TASK_INCLUDE */
<file_sep>#ifndef __MODE_STUB_H__
#define __MODE_STUB_H__
#include "core/mode.h"
namespace execHelper {
namespace core {
namespace test {
class ModeStub : public Mode {
public:
ModeStub() : Mode("modeStub") { ; }
};
} // namespace test
} // namespace core
} // namespace execHelper
#endif /* __MODE_STUB_H__ */
<file_sep>#include "pattern.h"
#include <ostream>
#include <utility>
using std::move;
using std::ostream;
using std::string;
namespace execHelper::config {
Pattern::Pattern(PatternKey patternKey, PatternValues values,
ShortOption shortOption, LongOption longOption) noexcept
: m_key(move(patternKey)),
m_values(move(values)),
m_shortOption(shortOption),
m_longOption(std::move(longOption)) {
;
}
auto Pattern::operator==(const Pattern& other) const noexcept -> bool {
return (m_key == other.m_key && m_values == other.m_values &&
m_shortOption == other.m_shortOption &&
m_longOption == other.m_longOption);
}
auto Pattern::operator!=(const Pattern& other) const noexcept -> bool {
return !(*this == other);
}
auto Pattern::getKey() const noexcept -> const PatternKey& { return m_key; }
auto Pattern::getValues() const noexcept -> const PatternValues& {
return m_values;
}
auto Pattern::setValues(PatternValues values) noexcept -> bool {
m_values = std::move(values);
return true;
}
auto Pattern::getShortOption() const noexcept -> const ShortOption& {
return m_shortOption;
}
auto Pattern::getLongOption() const noexcept -> const LongOption& {
return m_longOption;
}
auto operator<<(ostream& os, const Pattern& pattern) noexcept -> ostream& {
os << "{" << pattern.getKey() << ": ";
auto shortOption = pattern.getShortOption();
if(shortOption) {
os << "short option: " << shortOption.value() << ", ";
}
auto longOption = pattern.getLongOption();
if(longOption) {
os << "long option: " << longOption.value() << ", ";
}
os << "values: {";
for(const auto& value : pattern.getValues()) {
os << value << ";";
}
os << "}";
os << "}";
return os;
}
} // namespace execHelper::config
<file_sep>#ifndef TEST_VALUE_INCLUDE
#define TEST_VALUE_INCLUDE
#include <string>
namespace execHelper::test::utils {
template <typename T> class TestValue {
public:
using Value = T;
TestValue(T value) noexcept : m_value(value) { ; }
TestValue(T&& value) noexcept : m_value(std::move(value)) { ; }
inline T& operator*() noexcept { return m_value; }
inline const T& operator*() const noexcept { return m_value; }
virtual bool operator==(const T& other) const noexcept {
return m_value == other;
}
inline bool operator!=(const T& other) const noexcept {
return !(*this == other);
}
virtual std::string config() const noexcept = 0;
protected:
T m_value;
};
template <typename T>
std::ostream& operator<<(std::ostream& os, const TestValue<T>& value) noexcept {
os << *value;
return os;
}
} // namespace execHelper::test::utils
#endif /* TEST_VALUE_INCLUDE */
<file_sep>#include "immediateExecutor.h"
#include <iostream>
#include "log/log.h"
#include "shell.h"
#include "task.h"
using std::move;
namespace execHelper::core {
ImmediateExecutor::ImmediateExecutor(std::shared_ptr<Shell> shell,
Callback callback) noexcept
: m_shell(move(shell)), m_callback(move(callback)) {
assert(m_shell != nullptr);
}
void ImmediateExecutor::execute(const Task& task) noexcept {
user_feedback_info("Executing " << task.toString());
Shell::ShellReturnCode returnCode = m_shell->execute(task);
if(!m_shell->isExecutedSuccessfully(returnCode)) {
m_callback(returnCode);
}
}
} // namespace execHelper::core
<file_sep>set(TEST_BIN_DIR test)
set(UNITTEST_BIN_DIR "${TEST_BIN_DIR}/unittest")
add_subdirectory(catch)
add_subdirectory(base-utils)
add_subdirectory(utils)
add_subdirectory(log)
add_subdirectory(config)
add_subdirectory(yaml)
add_subdirectory(core)
add_subdirectory(plugins)
add_subdirectory(commander)
<file_sep>#include <iostream>
#include <string>
#include <utility>
#include <vector>
#include "log/log.h"
#include "log/logLevel.h"
#define CATCH_CONFIG_RUNNER
#include "catch.h"
using std::cerr;
using std::cout;
using std::endl;
using std::move;
using std::string;
using std::vector;
using Catch::Session;
using Catch::clara::Opt;
using execHelper::log::InvalidLogLevel;
using execHelper::log::toLogLevel;
using execHelper::log::LogLevel::none;
namespace {
struct ConsoleListener : Catch::TestEventListenerBase {
public:
explicit ConsoleListener(const Catch::ReporterConfig& config) noexcept
: Catch::TestEventListenerBase::TestEventListenerBase(config),
m_stream(
std::cout
.rdbuf()), // Duplicate the current cout backend (should stil be stdout at this point) to a separate stream, as std::cout itself can be taken over by a reporter
m_reporterConfig(config.fullConfig(), m_stream),
m_reporter(m_reporterConfig),
m_groupInfo("unittest", 0, 1) {
;
}
void testRunStarting(Catch::TestRunInfo const& _testRunInfo) override {
m_reporter.testRunStarting(_testRunInfo);
m_reporter.testGroupStarting(m_groupInfo);
}
void testRunEnded(Catch::TestRunStats const& testRunStats) override {
m_reporter.testGroupEnded(m_groupInfo);
m_reporter.testRunEnded(testRunStats);
}
void skipTest(Catch::TestCaseInfo const& testInfo) override {
m_reporter.skipTest(testInfo);
}
void testCaseStarting(Catch::TestCaseInfo const& testInfo) override {
m_reporter.testCaseStarting(testInfo);
}
void testCaseEnded(Catch::TestCaseStats const& testCaseStats) override {
m_reporter.testCaseEnded(testCaseStats);
}
void sectionStarting(Catch::SectionInfo const& sectionInfo) override {
m_reporter.sectionStarting(sectionInfo);
}
void sectionEnded(Catch::SectionStats const& sectionStats) override {
m_reporter.sectionEnded(sectionStats);
}
void assertionStarting(Catch::AssertionInfo const& assertionInfo) override {
m_reporter.assertionStarting(assertionInfo);
}
bool assertionEnded(Catch::AssertionStats const& assertionStats) override {
return m_reporter.assertionEnded(assertionStats);
}
private:
std::ostream m_stream;
Catch::ReporterConfig m_reporterConfig;
Catch::ConsoleReporter m_reporter;
Catch::GroupInfo m_groupInfo;
};
CATCH_REGISTER_LISTENER(ConsoleListener)
} // namespace
int main(int argc, char** argv) {
Session session;
std::string logLevelString("none");
auto cli = session.cli();
cli |= Opt(logLevelString,
"level")["-u"]["--debug"]("Set the log level of the binary");
session.cli(cli);
int returnCode = session.applyCommandLine(argc, argv);
if(returnCode != 0) {
cerr << "Error parsing command line parameters" << endl;
return returnCode;
}
auto logLevel = none;
try {
logLevel = toLogLevel(logLevelString);
} catch(InvalidLogLevel& e) {
cerr << "Invalid log level string: " << logLevelString << std::endl;
return EXIT_FAILURE;
}
static vector<string> logModules(
{"log", "yaml", "config", "core", "plugins", "commander"});
execHelper::log::LogInit logInit;
for(const auto& logModule : logModules) {
logInit.setSeverity(logModule, logLevel);
}
return session.run();
}
<file_sep>.. _exec-helper-config-patterns:
Patterns
********
Description
===========
Patterns are parts of the configuration that will be replaced by its value when evaluated by :program:`exec-helper`. The *patterns* keyword describes a list of patterns identified by their key. See the 'patterns' section for more information about how to define a pattern.
Patterns can be used to:
* add options to the :program:`exec-helper` command line.
* allow iterating over multiple configurations
* control the configurations to iterate over
Patterns
========
A pattern can contain the following fields:
.. describe:: default-values
A list of default values to use when no values have been defined.
.. describe:: short-option
The short option on the command line associated with this pattern
.. describe:: long-option
The long option on the command line associated with this pattern
Predefined patterns
===================
Exec-helper predefines some specific patterns for convenience:
* **EH_WORKING_DIR**: contains the working directory from where the :program:`exec-helper` executable is called.
Example configuration
=====================
.. literalinclude:: ../examples/exec-helper-config.example
:language: yaml
See also
========
See :ref:`exec-helper-config` (5) for information about the configuration file.
<file_sep>.. _exec-helper:
exec-helper
***********
Synopsis
========
**exec-helper** <*commands*> [*options*]
**eh** <*commands*> [*options*]
Description
===========
The :program:`exec-helper` utility is a meta-wrapper for executables, optimizing one of the slowest links in most workflows: you. It enables the user to optimize the existing workflow in multiple minor and major ways:
* It minimizes the amount of typing while eliminating redundancies
* It chains multiple commands, inserting patterns at specified places
* It avoids having to memorize or search for the right invocations for more complicated commands
* It allows to write your system- and project-specific plugins for more advanced optimizations
These optimizations enable efficient users to do what they like to do the most: hang around the coffee machine with peace of mind.
Options
=======
Mandatory arguments to long options are mandatory for short options too. Arguments to options can be specified by appending the option with '=ARG' or ' ARG'. This manual will further use the '=ARG' notation. Multiple arguments can be specified, if appropriate and without the need to repeat the option, by using spaces in between the arguments.
.. program:: exec-helper
.. option:: -h, --help
Display a usage message on standard output and exit successfully.
.. option:: -v, --verbose
Enable the verbose flag for the command if available.
.. option:: -z, --command=COMMAND
Execute one or more configured COMMANDs. This is an alias for the *<commands>* mandatory option above.
.. option:: -s, --settings-file[=FILE]
Use FILE as the settings file for the :program:`exec-helper` configuration. Default: *.exec-helper*.
Exec-helper will use the first file it finds with the given FILE name. It will search in order in the following locations:
1. The current working directory
2. The parent directories of the working directory. The parent directories are searched in *reversed* order, meaning that the direct parent of the current working directory is searched first, next the direct parent of the direct parent of the current working directory and so-forth until the root directory is reached.
3. The *HOME* directory of the caller.
.. option:: -j, --jobs[=JOBS]
Use the specified number of JOBS for each task (if supported). Use *auto* to let :program:`exec-helper` determine an appropriate number. Use a value of *1* for running jobs single-threaded. Default: *auto*.
.. option:: -n, --dry-run
Print the commands that would be executed, but do not execute them.
.. option:: -k, --keep-going
Execute all scheduled commands, even if one or more of them fail.
Configured options
==================
Additional command-line options for :program:`exec-helper` can be configured in the settings file. Refer to the :manpage:`exec-helper-config(5)` documentation for more information.
Exit status
===========
When :program:`exec-helper` is called improperly or its plugins are invoked improperly, :program:`exec-helper` will exit with a status of *one*. In other cases, it exits with the same status as the last failed command or *zero* if all commands are executed successfully.
Auto-completion
===============
Auto-completions are available for the Bash and Zsh shell. Package maintainers receive the tools to automatically enable these completions. If your installation package does not do this, you can enable them yourself by adding `source <install-directory>/share/exec-helper/completions/init-completion.sh` to your profile or bashrc.
See also
========
See :ref:`exec-helper-config` (5) for information about the configuration file.
See :ref:`exec-helper-plugins` (5) for the available plugins and their configuration options.
<file_sep>#ifndef CORE_GENERATORS_INCLUDE
#define CORE_GENERATORS_INCLUDE
#include <string>
#include <vector>
#include "unittest/rapidcheck.h"
#include "core/task.h"
namespace rc {
template <> struct Arbitrary<execHelper::core::Task> {
static Gen<execHelper::core::Task> arbitrary() {
return gen::construct<execHelper::core::Task>(
gen::arbitrary<std::vector<std::string>>());
};
};
} // namespace rc
#endif /* CORE_GENERATORS_INCLUDE */
<file_sep>#include "config/configFileSearcher.h"
#include "unittest/catch.h"
#include <filesystem>
#include <fstream>
using std::ofstream;
using std::string;
namespace filesystem = std::filesystem;
namespace execHelper::config::test {
SCENARIO("Test the config file searcher", "[config][config-file-searcher]") {
GIVEN("A few search paths and a file to search") {
const Paths searchPaths({"dir1", "dir1/dir2", "dir1/dir2//dir3",
"dir1/dir2//dir3//dir4/./"});
const Path settingsFile("test-settings-file");
ConfigFileSearcher configFileSearcher(searchPaths);
WHEN("We write the settings file") {
// Clear in case certain of these files are still lurking around
for(const auto& searchPath : searchPaths) {
Path fileToDelete(searchPath);
fileToDelete /= settingsFile;
remove(fileToDelete.c_str());
}
for(const auto& searchPath : searchPaths) {
create_directories(searchPath);
}
for(const auto& searchPath : searchPaths) {
ofstream file;
filesystem::path filename(searchPath);
filename /= settingsFile;
file.open(filename, std::ios::out | std::ios::trunc);
file << "test.exec-helper" << std::endl;
file.close();
THEN("It should find it") {
auto result = configFileSearcher.find(settingsFile);
REQUIRE(result != std::nullopt);
REQUIRE(result.value() == filename);
}
}
// Clean up after ourselves
for(const auto& searchPath : searchPaths) {
remove_all(searchPath);
}
}
WHEN("We do not write the settings file") {
THEN("It should not find it") {
auto result = configFileSearcher.find(settingsFile);
REQUIRE(result == std::nullopt);
}
}
}
}
} // namespace execHelper::config::test
<file_sep>.. _exec-helper-plugins-lcov:
Lcov plugin
***********
Description
===========
The lcov plugin is used for executing code coverage analysis using lcov.
Mandatory settings
==================
The configuration of the lcov plugin must contain the following settings:
.. program:: exec-helper-plugins-lcov
.. describe:: run-command
The :program:`exec-helper` command or plugin to use for running the binaries for which the coverage needs to be analyzed.
Optional settings
=================
The configuration of the lcov plugin may contain the following settings:
.. program:: exec-helper-plugins-lcov
.. include:: patterns.rst
.. include:: command-line.rst
.. describe:: info-file
The lcov .info file to use for the analysis. Default: :code:`lcov-plugin.info`.
.. describe:: base-directory
The base directory to use for the lcov analysis. Check the lcov documentation on the :code:`--base-directory` option for more information. Default: :code:`.` (the current working directory).
.. describe:: directory
Use the coverage data files in the given directory. Check the lcov documentation on the :code:`--directory` option for more information. Default: :code:`.` (the current working directory).
.. describe:: zero-counters
Set this option to *yes* to reset the coverage counters before starting the analysis. All other values are threated as *no*. Default: :code:`no`.
.. describe:: gen-html
Set this option to *yes* to enable HTML report generation of the coverage data. Default: :code:`no`.
.. describe:: gen-html-output
Set the output directory of the generated HTML report. Does nothing if **gen-html** is not enabled. Default: :code:`.` (the current working directory).
.. describe:: gen-html-title
Set the title of the generated HTML report. Does nothing if **gen-html** is not enabled. Default: :code:`Hello`.
.. describe:: excludes
A list of directories and files to excluse from the coverage report. The paths are relative to the current working directory. Default: an empty list.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/lcov.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*Makefile*:
.. literalinclude:: ../examples/Makefile
:language: none
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>Custom modules
**************
.. highlight:: gherkin
.. literalinclude:: discovery.feature
.. literalinclude:: usage.feature
<file_sep>#include <optional>
#include <sstream>
#include <string>
#include "config/pattern.h"
#include "unittest/catch.h"
using std::make_optional;
using std::string;
using std::stringstream;
using execHelper::config::Pattern;
using execHelper::config::PatternValues;
namespace execHelper::core::test {
SCENARIO("Test the pattern properties", "[patterns][Pattern]") {
MAKE_COMBINATIONS("Some pattern properties") {
const string key("key1");
const PatternValues values({"value1a", "value1b"});
auto shortOption = make_optional<char>();
auto longOption = make_optional<string>();
COMBINATIONS("Leave them as is") { ; }
COMBINATIONS("Set the short option") { shortOption = '1'; }
COMBINATIONS("Set the long option") { longOption = "long-option1"; }
WHEN("We create a pattern") {
Pattern pattern(key, values, shortOption, longOption);
THEN("We should find these properties") {
REQUIRE(pattern.getKey() == key);
REQUIRE(pattern.getValues() == values);
if(longOption) {
REQUIRE(pattern.getLongOption());
REQUIRE(pattern.getLongOption().value() == longOption);
} else {
REQUIRE(pattern.getLongOption() == longOption);
}
if(shortOption) {
REQUIRE(pattern.getShortOption());
REQUIRE(pattern.getShortOption().value() == shortOption);
} else {
REQUIRE(pattern.getShortOption() == shortOption);
}
}
}
}
GIVEN("Empty pattern properties") {
const string key;
const PatternValues values({});
const string longOption;
WHEN("We create the pattern") {
Pattern pattern(key, values, std::nullopt, longOption);
THEN("We should find these properties") {
REQUIRE(pattern.getKey() == key);
REQUIRE(pattern.getValues() == values);
REQUIRE(pattern.getLongOption() == longOption);
REQUIRE(pattern.getShortOption() == std::nullopt);
}
}
}
}
SCENARIO("Test the pattern facilities", "[patterns][Pattern]") {
GIVEN("A test pattern") {
const string key("test-key");
const PatternValues values({"value1", "value2"});
const char shortOption = 'o';
const string longOption = "test-option";
Pattern pattern(key, values, shortOption, longOption);
WHEN("We copy the pattern") {
Pattern
other( // NOLINT(performance-unnecessary-copy-initialization)
pattern);
THEN("We should get the same pattern") {
REQUIRE(pattern == other);
REQUIRE_FALSE(pattern != other);
}
}
WHEN("We assign the pattern") {
Pattern other("", {}, '\0', string("null"));
other = pattern;
THEN("We should get the same pattern") {
REQUIRE(pattern == other);
REQUIRE_FALSE(pattern != other);
}
}
WHEN("We create similar patterns") {
Pattern other1("other-key", values, shortOption, longOption);
Pattern other2(key, {"value1"}, shortOption, longOption);
Pattern other3(key, values, 'e', longOption);
Pattern other4(key, values, shortOption, string("other-option"));
THEN("They should not be equal") {
REQUIRE(pattern != other1);
REQUIRE(pattern != other2);
REQUIRE(pattern != other3);
REQUIRE(pattern != other4);
REQUIRE_FALSE(pattern == other1);
REQUIRE_FALSE(pattern == other2);
REQUIRE_FALSE(pattern == other3);
REQUIRE_FALSE(pattern == other4);
}
}
}
}
SCENARIO("Test the pattern streaming operator", "[patterns][Pattern]") {
GIVEN("A configured object with a short option to stream") {
Pattern pattern("pattern-key", {"value1", "value2"}, '1',
string("long-option"));
stringstream stream;
WHEN("We apply the streaming operator") {
stream << pattern;
THEN("We should get the stream") {
stringstream expectedStream;
expectedStream << "{" << pattern.getKey() << ": ";
expectedStream
<< "short option: " << pattern.getShortOption().value()
<< ", ";
expectedStream
<< "long option: " << pattern.getLongOption().value()
<< ", ";
expectedStream << "values: {";
for(const auto& value : pattern.getValues()) {
expectedStream << value << ";";
}
expectedStream << "}";
expectedStream << "}";
REQUIRE(stream.str() == expectedStream.str());
}
}
}
GIVEN("A configured object with no short option to stream") {
Pattern pattern("pattern-key", {"value1", "value2"}, std::nullopt,
string("long-option"));
stringstream stream;
WHEN("We apply the streaming operator") {
stream << pattern;
THEN("We should get the stream") {
stringstream expectedStream;
expectedStream << "{" << pattern.getKey() << ": ";
expectedStream
<< "long option: " << pattern.getLongOption().value()
<< ", ";
expectedStream << "values: {";
for(const auto& value : pattern.getValues()) {
expectedStream << value << ";";
}
expectedStream << "}";
expectedStream << "}";
REQUIRE(stream.str() == expectedStream.str());
}
}
}
}
} // namespace execHelper::core::test
<file_sep>#ifndef PATH_NOT_FOUND_ERROR_INCLUDE
#define PATH_NOT_FOUND_ERROR_INCLUDE
#include <stdexcept>
namespace execHelper {
namespace core {
/**
* \brief Thrown when a path is not found on the system
*/
class PathNotFoundError : public std::runtime_error {
public:
/**
* Create an exception with the given message
*
* \param[in] msg A diagnostic error message
*/
explicit PathNotFoundError(const std::string& msg)
: std::runtime_error(msg) {
;
}
/*! @copydoc config::Argv::Argv(const config::Argv&)
*/
PathNotFoundError(const PathNotFoundError& other) = default;
/*! @copydoc config::Argv::Argv(config::Argv&&)
*/
PathNotFoundError(PathNotFoundError&& other) = default;
~PathNotFoundError() noexcept override = default;
/*! @copydoc config::Argv::operator=(const config::Argv&)
*/
PathNotFoundError& operator=(const PathNotFoundError& other) = default;
/*! @copydoc config::Argv::operator=(config::Argv&&)
*/
PathNotFoundError& operator=(PathNotFoundError&& other) = default;
};
} // namespace core
} // namespace execHelper
#endif /* PATH_NOT_FOUND_ERROR_INCLUDE */
<file_sep>class Pattern:
def __init__(self, id, default_values, long_options):
self._id = id
self._defaults = default_values
self._long_options = long_options
self._values = default_values
@property
def id(self):
return self._id
@property
def default_values(self):
return self._defaults
@property
def values(self):
return self._values
@property
def long_options(self):
return self._long_options
<file_sep>task:add_args({one(config['filename']) or 'bootstrap.sh'})
task:add_args(get_commandline())
register_task(task)
<file_sep>#include "unittest/catch.h"
#include <algorithm>
#include <optional>
#include <string>
#include "config/commandLineOptions.h"
#include "config/fleetingOptions.h"
#include "config/variablesMap.h"
#include "log/logLevel.h"
#include "config/generators.h"
#include "log/generators.h"
#include "unittest/rapidcheck.h"
#include "utils/testValue.h"
using std::back_inserter;
using std::string;
using std::transform;
using execHelper::log::LogLevel;
using execHelper::log::toString;
using execHelper::test::propertyTest;
using rc::AppendSearchPathValue;
using rc::DryRunValue;
using rc::HelpValue;
using rc::JobsValue;
using rc::KeepGoingValue;
using rc::ListPluginsValue;
using rc::VerbosityValue;
using rc::VersionValue;
namespace {
inline auto toStrings(const execHelper::config::Paths& paths) noexcept
-> std::vector<std::string> {
std::vector<std::string> strings;
transform(paths.begin(), paths.end(), back_inserter(strings),
[](const auto& path) { return path.string(); });
return strings;
}
} // namespace
namespace execHelper::config::test {
SCENARIO("Test the fleeting options defaults", "[config][fleeting-options]") {
GIVEN("The expected defaults") {
VariablesMap expectedDefaults("exec-helper");
REQUIRE(expectedDefaults.add(HELP_OPTION_KEY, "no"));
REQUIRE(expectedDefaults.add(VERSION_KEY, "no"));
REQUIRE(expectedDefaults.add(VERBOSE_KEY, "no"));
REQUIRE(expectedDefaults.add(JOBS_KEY, "auto"));
REQUIRE(expectedDefaults.add(DRY_RUN_KEY, "no"));
REQUIRE(expectedDefaults.add(KEEP_GOING_KEY, "no"));
REQUIRE(expectedDefaults.add(LOG_LEVEL_KEY, "none"));
REQUIRE(expectedDefaults.add(LIST_PLUGINS_KEY, "no"));
REQUIRE(expectedDefaults.add(APPEND_SEARCH_PATH_KEY,
AppendSearchPathOption_t()));
REQUIRE(expectedDefaults.add(COMMAND_KEY, CommandCollection()));
REQUIRE(expectedDefaults.add(SETTINGS_FILE_KEY));
WHEN("We request the defaults") {
VariablesMap defaults = FleetingOptions::getDefault();
THEN("They must match the expected ones") {
REQUIRE(defaults == expectedDefaults);
}
}
}
}
SCENARIO("Test the getters of the fleeting options",
"[config][fleeting-options]") {
propertyTest(
"Test multiple configurations",
[](const HelpValue& help, const VersionValue& version,
const VerbosityValue& verbosity, const JobsValue& jobs,
const DryRunValue& dryRun, const LogLevel& logLevel,
const KeepGoingValue& keepGoing, const ListPluginsValue& listPlugins,
const AppendSearchPathValue& appendedSearchPaths,
const CommandCollection& commands) {
VariablesMap variables = FleetingOptions::getDefault();
REQUIRE(variables.replace(HELP_OPTION_KEY, help.config()));
REQUIRE(variables.replace(VERSION_KEY, version.config()));
REQUIRE(variables.replace(VERBOSE_KEY, verbosity.config()));
REQUIRE(variables.replace(JOBS_KEY, jobs.config()));
REQUIRE(variables.replace(DRY_RUN_KEY, dryRun.config()));
REQUIRE(
variables.replace(LOG_LEVEL_KEY, string{toString(logLevel)}));
REQUIRE(variables.replace(KEEP_GOING_KEY, keepGoing.config()));
REQUIRE(variables.replace(LIST_PLUGINS_KEY, listPlugins.config()));
REQUIRE(variables.replace(APPEND_SEARCH_PATH_KEY,
toStrings(appendedSearchPaths)));
REQUIRE(variables.replace(COMMAND_KEY, commands));
THEN_WHEN("We create fleeting options based on the variables map") {
FleetingOptions fleetingOptions(variables);
THEN_CHECK("The getters are as expected") {
REQUIRE(help == fleetingOptions.getHelp());
REQUIRE(version == fleetingOptions.getVersion());
REQUIRE(verbosity == fleetingOptions.getVerbosity());
REQUIRE(jobs == fleetingOptions.getJobs());
REQUIRE(dryRun == fleetingOptions.getDryRun());
REQUIRE(logLevel == fleetingOptions.getLogLevel());
REQUIRE(keepGoing == fleetingOptions.getKeepGoing());
REQUIRE(listPlugins == fleetingOptions.listPlugins());
REQUIRE(appendedSearchPaths ==
fleetingOptions.appendedSearchPaths());
REQUIRE(commands == fleetingOptions.getCommands());
}
}
});
}
SCENARIO("Test copy constructor and assignment", "[patterns][Pattern]") {
GIVEN("An object to copy") {
VariablesMap variables = FleetingOptions::getDefault();
REQUIRE(variables.replace(COMMAND_KEY, "command"));
FleetingOptions originalFleetingOptions(variables);
WHEN("We copy the fleeting options") {
FleetingOptions
copied( // NOLINT(performance-unnecessary-copy-initialization)
originalFleetingOptions);
THEN("We should get the same fleeting options") {
REQUIRE(originalFleetingOptions == copied);
REQUIRE_FALSE(originalFleetingOptions != copied);
}
}
WHEN("We assign the pattern") {
FleetingOptions
copied = // NOLINT(performance-unnecessary-copy-initialization)
originalFleetingOptions;
THEN("We should get the same pattern") {
REQUIRE(originalFleetingOptions == copied);
REQUIRE_FALSE(originalFleetingOptions != copied);
}
}
}
}
} // namespace execHelper::config::test
<file_sep>#ifndef __YAML_WRAPPER_H__
#define __YAML_WRAPPER_H__
#include <algorithm>
#include <iostream>
#include <numeric>
#include <string>
#include <yaml-cpp/node/node.h>
#include <yaml-cpp/yaml.h>
#include "config/path.h"
#include "config/settingsNode.h"
namespace execHelper {
namespace yaml {
/**
* \brief Wrapper for yaml-cpp
*/
class YamlWrapper {
public:
/**
* Constructor
*
* \param[in] file The file to load
* \throws YAML::BadFile If the given file does not exist
*/
explicit YamlWrapper(const config::Path& file);
/**
* Constructor
*
* \param[in] yamlConfig The content to parse
*/
explicit YamlWrapper(const std::string& yamlConfig);
/*! @copydoc config::Argv::Argv(const Argv&)
*/
YamlWrapper(const YamlWrapper& other);
/*! @copydoc config::Argv::Argv(Argv&&)
*/
YamlWrapper(YamlWrapper&& other) noexcept;
~YamlWrapper() = default;
/*! @copydoc config::Argv::operator=(const Argv&)
*/
YamlWrapper& operator=(const YamlWrapper& other);
/*! @copydoc config::Argv::operator=(Argv&&)
*/
YamlWrapper& operator=(YamlWrapper&& other) noexcept;
/*! @copydoc config::Argv::swap(Argv&)
*/
void swap(const YamlWrapper& other) noexcept;
/**
* Returns the value associated with the keys as the given type
*
* \param[in] keys A collection of keys to follow
* \returns The associated value
*/
template <typename T>
T get(const std::initializer_list<config::SettingsKey>& keys) const {
YAML::Node node = std::accumulate(
keys.begin(), keys.end(), Clone(m_node),
[](const auto& node, const auto& key) { return node[key]; });
return node.as<T>();
}
/**
* Returns the node below the given key structure
*
* \param[in] keys A collection of keys to follow
* \returns The node associated with the given key structure
*/
YAML::Node getSubNode(const std::initializer_list<std::string>& keys) const;
// Convenience wrapper for parsing the whole tree
/**
* Parses the tree below the given keys and writes it to the given settings
*
* \param[in] keys The keys to write to the settings
* \param[out] settings The settings to write the parsed structure to
* \returns True If the parsing was successful
* False Otherwise
*/
bool getTree(const std::initializer_list<std::string>& keys,
config::SettingsNode* settings) const noexcept;
/**
* Parses the given node and writes it to the given settings
*
* \param[in] rootNode The node to start from
* \param[out] settings The settings to write the parsed structure to
* \returns True If the parsing was successful
* False Otherwise
*/
static bool getTree(const YAML::Node& rootNode,
config::SettingsNode* settings) noexcept;
private:
/**
* Parse the given node and add it to the given settings under the given key
* structure
*
* \param[in] node The node to parse
* \param[out] yamlNode The root settings
* \param[in] keys The key structure of the root settings under which to
* write the parsed structure \returns True If the subtree was
* successfully constructed and added False Otherwise
*/
static bool getSubTree(const YAML::Node& node,
config::SettingsNode* yamlNode,
const config::SettingsKeys& keys) noexcept;
YAML::Node m_node;
};
} // namespace yaml
} // namespace execHelper
#endif /* __YAML_WRAPPER_H__ */
<file_sep>#!/bin/bash
echo "Mocking bootstrap..."
echo "$1 $2"
<file_sep>#include <catch.hpp>
#include <iostream>
#include <optional>
#include <sstream>
#include <string>
#include <utility>
#include <vector>
#include "config/settingsNode.h"
#include "config/generators.h"
#include "unittest/catch.h"
#include "unittest/rapidcheck.h"
#include "utils/utils.h"
using std::move;
using std::nullopt;
using std::ostream;
using std::string;
using std::stringstream;
using std::to_string;
using Catch::Matchers::VectorContains;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::test::propertyTest;
namespace {
void assertBoolSetting(const SettingsNode& settings, const string& key,
bool expected) {
REQUIRE(settings.get<bool>(key) != std::nullopt);
REQUIRE(settings.get<bool>(key).value() == expected);
REQUIRE(settings.get<bool>(key, !expected) == expected);
}
const execHelper::config:: // NOLINT(fuchsia-statically-constructed-objects)
SettingsValue DEFAULT_VALUE("blaat");
const execHelper::config:: // NOLINT(fuchsia-statically-constructed-objects)
SettingsValues DEFAULT_VALUES({DEFAULT_VALUE});
} // namespace
namespace execHelper::config::test {
SCENARIO("Basic addition and getting of values", "[config][settingsNode]") {
GIVEN("A basic setup") {
const SettingsKey rootKey("root-key");
const SettingsValue testValue1("test-value1");
const SettingsKey testKey2("test-key2");
const SettingsValue testValue2("test-value2");
const SettingsValues testValue3({"test-value3a", "test-value3b"});
const SettingsKeys rootKeys({testValue1, testKey2});
const std::vector<string> testValue4(
{"test-value4a", "test-value4b",
"test-value4c"}); // Note: due to the lifetime of an
// initializer_list in c++ 11, we can not use an
// initializer_list object here.
const SettingsKey testKey5("test-key5");
const SettingsValues testValue5({"test-value5a", "test-value5b"});
const SettingsKey testKey6("test-key6");
const std::vector<string> testValue6(
{"test-value6a", "test-value6b"}); // Note: due to the lifetime of
// an initializer_list in c++ 11,
// we can not use an
// initializer_list object here.
SettingsNode settings(rootKey);
WHEN("We get the key") {
const auto& resultRootKey = settings.key();
THEN("It should match") { REQUIRE(resultRootKey == rootKey); }
}
WHEN("We add values") {
REQUIRE(settings.add(testValue1));
REQUIRE(settings.add({testKey2}, testValue2));
REQUIRE(settings.add(testValue3));
REQUIRE(
settings.add({"test-value4a", "test-value4b", "test-value4c"}));
REQUIRE(settings.add({testKey5}, testValue5));
REQUIRE(settings.add({testKey6}, {"test-value6a", "test-value6b"}));
THEN("The settings should contain them") {
REQUIRE(settings.contains(testValue1));
REQUIRE(settings.contains({testKey2}));
REQUIRE(settings[testKey2].contains(testValue2));
for(const auto& key : testValue3) {
REQUIRE(settings.contains(key));
}
for(const auto& key : testValue4) {
REQUIRE(settings.contains(key));
}
REQUIRE(settings.contains(testKey5));
for(const auto& value : testValue5) {
REQUIRE(settings[testKey5].contains(value));
}
REQUIRE(settings.contains(testKey6));
for(const auto& value : testValue6) {
REQUIRE(settings[testKey6].contains(value));
}
}
THEN("The keys should exist") {
REQUIRE(settings[testValue1].key() == testValue1);
REQUIRE(settings[testKey2].key() == testKey2);
REQUIRE(settings[testKey2][testValue2].key() == testValue2);
for(const auto& key : testValue3) {
REQUIRE(settings[key].key() == key);
}
for(const auto& key : testValue4) {
REQUIRE(settings[key].key() == key);
}
REQUIRE(settings[testKey5].key() == testKey5);
for(const auto& key : testValue5) {
REQUIRE(settings[testKey5][key].key() == key);
}
REQUIRE(settings[testKey6].key() == testKey6);
for(const auto& key : testValue6) {
REQUIRE(settings[testKey6][key].key() == key);
}
}
THEN("We should be able to get the optional values") {
REQUIRE(settings.get<SettingsValues>(SettingsKeys()) !=
std::nullopt);
REQUIRE_THAT(
settings.get<SettingsValues>(SettingsKeys()).value(),
VectorContains(testValue1));
REQUIRE(settings.get<SettingsValues>({testValue1}) ==
std::nullopt);
REQUIRE(settings.get<SettingsValues>({testKey2}) !=
std::nullopt);
REQUIRE(settings.get<SettingsValues>({testKey2}).value() ==
SettingsValues({testValue2}));
for(const auto& key : testValue3) {
REQUIRE(settings.get<SettingsValues>({key}) ==
std::nullopt);
REQUIRE_THAT(
settings.get<SettingsValues>(SettingsKeys()).value(),
VectorContains(key));
}
for(const auto& key : testValue4) {
REQUIRE(settings.get<SettingsValues>({key}) ==
std::nullopt);
REQUIRE_THAT(
settings.get<SettingsValues>(SettingsKeys()).value(),
VectorContains(key));
}
REQUIRE(settings.get<SettingsValues>({testKey5}) !=
std::nullopt);
REQUIRE(settings.get<SettingsValues>({testKey5}).value() ==
testValue5);
REQUIRE(settings[testKey5].get<SettingsValues>(
SettingsKeys()) != std::nullopt);
REQUIRE(settings[testKey5]
.get<SettingsValues>(SettingsKeys())
.value() == testValue5);
for(const auto& key : testValue5) {
REQUIRE(settings.get<SettingsValues>({testKey5, key}) ==
std::nullopt);
REQUIRE(settings[testKey5].get<SettingsValues>({key}) ==
std::nullopt);
}
REQUIRE(settings.get<SettingsValues>(
{testKey5, "non-existing-key"}) == std::nullopt);
REQUIRE(settings.get<SettingsValues>({testKey6}) !=
std::nullopt);
REQUIRE(settings.get<SettingsValues>({testKey6}).value() ==
SettingsValues(testValue6));
REQUIRE(settings[testKey6].get<SettingsValues>(
SettingsKeys()) != std::nullopt);
REQUIRE(settings[testKey6]
.get<SettingsValues>(SettingsKeys())
.value() == testValue6);
for(const auto& key : testValue6) {
REQUIRE(settings.get<SettingsValues>({testKey6, key}) ==
std::nullopt);
REQUIRE(settings[testKey6].get<SettingsValues>({key}) ==
std::nullopt);
}
}
THEN("We should be able to get the non-default values") {
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE_THAT(
settings.get<SettingsValues>(SettingsKeys(), DEFAULT_VALUE),
VectorContains(testValue1));
REQUIRE(settings.get<SettingsValues>(
testValue1, DEFAULT_VALUE) == DEFAULT_VALUE);
REQUIRE(
settings.get<SettingsValues>({testKey2}, DEFAULT_VALUE) ==
SettingsValues({testValue2}));
for(const auto& key : testValue3) {
REQUIRE(settings.get<SettingsValues>(key, DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE_THAT(settings.get<SettingsValues>(SettingsKeys(),
DEFAULT_VALUE),
VectorContains(key));
}
for(const auto& key : testValue4) {
REQUIRE(settings.get<SettingsValues>(key, DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE_THAT(settings.get<SettingsValues>(SettingsKeys(),
DEFAULT_VALUE),
VectorContains(key));
}
REQUIRE(settings.get<SettingsValues>(
{testKey5}, DEFAULT_VALUE) == testValue5);
REQUIRE(settings[testKey5].get<SettingsValues>(
SettingsKeys(), DEFAULT_VALUE) == testValue5);
for(const auto& key : testValue5) {
REQUIRE(settings.get<SettingsValues>({testKey5, key},
DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE(settings[testKey5].get<SettingsValues>(
{key}, DEFAULT_VALUE) == DEFAULT_VALUE);
}
REQUIRE(settings.get<SettingsValues>(
{testKey5, "non-existing-key"}, DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE(settings.get<SettingsValues>(
{testKey6}, DEFAULT_VALUE) == testValue6);
REQUIRE(settings[testKey6].get<SettingsValues>(
SettingsKeys(), DEFAULT_VALUE) == testValue6);
REQUIRE(
settings.get<SettingsValues>({testKey6}, DEFAULT_VALUE) ==
SettingsValues(testValue6));
for(const auto& key : testValue6) {
REQUIRE(settings.get<SettingsValues>({testKey6, key},
DEFAULT_VALUE) ==
DEFAULT_VALUE);
}
}
}
}
}
SCENARIO("Test various ways to set a boolean value", "[config][settingsNode]") {
GIVEN("A settings node") {
const SettingsKey rootKey("root-key");
SettingsNode settings(rootKey);
WHEN("We set the boolean value as an integer") {
const SettingsKey falseInt("false-int");
const SettingsKey trueInt("true-int");
REQUIRE(settings.add(falseInt, "0"));
REQUIRE(settings.add(trueInt, "1"));
THEN("The false int must map to false") {
assertBoolSetting(settings, falseInt, false);
}
THEN("The true int must map to false") {
assertBoolSetting(settings, trueInt, true);
}
}
WHEN("We set the boolean values as true or false") {
const SettingsKey falseKey("false-key");
const SettingsKey trueKey("true-key");
REQUIRE(settings.add(falseKey, "false"));
REQUIRE(settings.add(trueKey, "true"));
THEN("The false key must map to false") {
assertBoolSetting(settings, falseKey, false);
}
THEN("The true key must map to true") {
assertBoolSetting(settings, trueKey, true);
}
}
WHEN("We set the boolean values as yes or no") {
const SettingsKey falseKey("false-key");
const SettingsKey trueKey("true-key");
REQUIRE(settings.add(falseKey, "no"));
REQUIRE(settings.add(trueKey, "yes"));
THEN("The false key must map to false") {
assertBoolSetting(settings, falseKey, false);
}
THEN("The true key must map to true") {
assertBoolSetting(settings, trueKey, true);
}
}
}
}
SCENARIO("Addition of multiple key values", "[config][settingsNode]") {
GIVEN("A basic setup") {
const SettingsKey rootKey("root-key");
const SettingsKeys key1({"key1a", "key1b", "key1c", "key1d"});
const SettingsValues value1({"value1a", "value1b", "value1c"});
SettingsNode settings(rootKey);
WHEN("We add the values") {
REQUIRE(settings.add(key1, value1));
THEN("It should contain it") {
SettingsKeys searchKeys;
for(const auto& key : key1) {
searchKeys.emplace_back(key);
REQUIRE(settings.contains(searchKeys));
}
REQUIRE(settings.contains(key1));
}
THEN("We should get the values using the [] operator") {
const SettingsNode* stageSettings = &settings;
for(const auto& key : key1) {
stageSettings = &stageSettings->operator[](key);
}
REQUIRE(stageSettings->get<SettingsValues>(
SettingsKeys(), SettingsValues()) == value1);
}
THEN("We should get the values") {
REQUIRE(settings.get<SettingsValues>(key1) != std::nullopt);
REQUIRE(settings.get<SettingsValues>(key1).value() == value1);
}
THEN("We should get the values, not the default ones") {
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key1, DEFAULT_VALUE) ==
value1);
}
}
}
GIVEN("A settingsnode with a deep hierarchy") {
SettingsKeys veryLongKeys1;
const SettingsValues value1({"value1a", "value1b"});
SettingsKeys veryLongKeys2;
const SettingsValues value2({"value2a", "value2b"});
SettingsNode settings("root-key");
const size_t TEST_SIZE_DEPTH = 8192U;
for(size_t i = 0; i < TEST_SIZE_DEPTH; ++i) {
veryLongKeys1.push_back("key1-" + to_string(i));
veryLongKeys2.push_back("key2-" + to_string(i));
}
WHEN("We add the keys") {
REQUIRE(settings.add(veryLongKeys1, value1));
REQUIRE(settings.add(veryLongKeys2, value2));
THEN("It should contain these") {
REQUIRE(settings.get<SettingsValues>(veryLongKeys1) !=
std::nullopt);
REQUIRE(settings.get<SettingsValues>(veryLongKeys1).value() ==
value1);
REQUIRE(settings.get<SettingsValues>(veryLongKeys2) !=
std::nullopt);
REQUIRE(settings.get<SettingsValues>(veryLongKeys2).value() ==
value2);
}
}
}
GIVEN("A settingsnode with a broad hierarchy") {
const SettingsKeys key1({"key1a", "key1b"});
const SettingsKeys key2({"key2a", "key2b"});
SettingsNode settings("root-key");
const size_t TEST_SIZE_BREADTH = 8192U;
WHEN("We add the keys") {
SettingsValues value1;
SettingsValues value2;
for(size_t i = 0; i < TEST_SIZE_BREADTH; ++i) {
const SettingsValue newValue1 = "value1-" + to_string(i);
const SettingsValue newValue2 = "value2-" + to_string(i);
REQUIRE(settings.add(key1, newValue1));
REQUIRE(settings.add(key2, newValue2));
value1.push_back(newValue1);
value2.push_back(newValue2);
}
THEN("It should contain these") {
REQUIRE(settings.get<SettingsValues>(key1) != std::nullopt);
REQUIRE(settings.get<SettingsValues>(key1).value() == value1);
REQUIRE(settings.get<SettingsValues>(key2) != std::nullopt);
REQUIRE(settings.get<SettingsValues>(key2).value() == value2);
}
}
WHEN("We add the keys as a whole") {
SettingsValues value1;
SettingsValues value2;
for(size_t i = 0; i < TEST_SIZE_BREADTH; ++i) {
const SettingsValue newValue1 = "value1-" + to_string(i);
const SettingsValue newValue2 = "value2-" + to_string(i);
value1.push_back(newValue1);
value2.push_back(newValue2);
}
REQUIRE(settings.add(key1, value1));
REQUIRE(settings.add(key2, value2));
THEN("It should contain these") {
REQUIRE(settings.get<SettingsValues>(key1) != std::nullopt);
REQUIRE(settings.get<SettingsValues>(key1).value() == value1);
REQUIRE(settings.get<SettingsValues>(key2) != std::nullopt);
REQUIRE(settings.get<SettingsValues>(key2).value() == value2);
}
}
}
}
SCENARIO("Testing the (in)equality operator", "[config][settingsNode]") {
GIVEN("A setup settings node") {
const SettingsKey rootKey("root-key");
const SettingsKeys key1({"key1a", "key1b", "key1c", "key1d"});
const SettingsValues value1({"value1a", "value1b", "value1c"});
const SettingsKeys key2(
{"key2a", "key2b", "key2c", "key2d", "key2e", "key2f"});
const SettingsValues value2(
{"value2a", "value2b", "value2c", "value2d"});
SettingsNode settings(rootKey);
REQUIRE(settings.add(key1, value1));
REQUIRE(settings.add(key2, value2));
WHEN("We create a similar settings node") {
SettingsNode similarSettings(rootKey);
REQUIRE(similarSettings.add(key1, value1));
REQUIRE(similarSettings.add(key2, value2));
THEN("They should be equal") {
REQUIRE(settings == similarSettings);
REQUIRE_FALSE(settings != similarSettings);
}
}
WHEN("We create a similar settings node with a different root key") {
SettingsNode similarSettings("other-root-key");
REQUIRE(similarSettings.add(key1, value1));
REQUIRE(similarSettings.add(key2, value2));
THEN("They should not be equal") {
REQUIRE_FALSE(settings == similarSettings);
REQUIRE(settings != similarSettings);
}
}
WHEN("We create a similar settings node with a different value") {
SettingsNode similarSettings("root-key");
REQUIRE(similarSettings.add(key1, value2));
THEN("They should not be equal") {
REQUIRE_FALSE(settings == similarSettings);
REQUIRE(settings != similarSettings);
}
}
WHEN("We create an almost similar settings node") {
SettingsNode similarSettings(rootKey);
REQUIRE(similarSettings.add(key1, value1));
THEN("They should not be equal") {
REQUIRE_FALSE(settings == similarSettings);
REQUIRE(settings != similarSettings);
}
}
WHEN("We create an almost similar settings node") {
SettingsNode similarSettings(rootKey);
REQUIRE(similarSettings.add(key2, value2));
THEN("They should not be equal") {
REQUIRE_FALSE(settings == similarSettings);
REQUIRE(settings != similarSettings);
}
}
WHEN("We compare with an empty settings node") {
SettingsNode similarSettings(rootKey);
THEN("They should not be equal") {
REQUIRE_FALSE(settings == similarSettings);
REQUIRE(settings != similarSettings);
}
}
}
GIVEN("Two empty settings nodes") {
const SettingsKey rootKey("root-key");
SettingsNode similarSettings1(rootKey);
SettingsNode similarSettings2(rootKey);
WHEN("We compare two empty settings nodes") {
THEN("They should not be equal") {
REQUIRE(similarSettings1 == similarSettings2);
REQUIRE_FALSE(similarSettings1 != similarSettings2);
}
}
}
}
SCENARIO("Test the copy constructor", "[settingsNode]") {
propertyTest("A settings node to copy", [](const SettingsNode& expected) {
THEN_WHEN("We copy the settings") {
SettingsNode
actual( // NOLINT(performance-unnecessary-copy-initialization)
expected);
THEN_CHECK("The objects must be equal") {
REQUIRE(actual == expected);
}
THEN_CHECK(
"The objects must exist on a different place in memory") {
REQUIRE(&expected != &actual);
REQUIRE(&(expected.key()) != &(actual.key()));
}
}
});
}
SCENARIO("Test the copy assignment operator", "[settingsNode]") {
propertyTest("A settings node to copy", [](const SettingsNode& expected,
std::string&& key) {
THEN_WHEN("We copy the settings") {
SettingsNode actual(
key); // NOLINT(performance-unnecessary-copy-initialization)
actual =
expected; // NOLINT(performance-unnecessary-copy-initialization)
AND_THEN("The objects must be equal") {
REQUIRE(actual == expected);
}
AND_THEN("The objects must exist on a different place in memory") {
REQUIRE(&expected != &actual);
REQUIRE(&(expected.key()) != &(actual.key()));
}
}
});
}
SCENARIO("Test the move constructor", "[settingsNode]") {
propertyTest("A settings node to move", [](const SettingsNode& expected) {
THEN_WHEN("We copy the settings") {
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpessimizing-move"
SettingsNode
actual( // NOLINT(performance-unnecessary-copy-initialization)
move( // NOLINT(clang-diagnostic-pessimizing-move)
SettingsNode(expected)));
#pragma GCC diagnostic pop
AND_THEN("The objects must be equal") {
REQUIRE(actual == expected);
}
AND_THEN("The objects must exist on a different place in memory") {
REQUIRE(&expected != &actual);
REQUIRE(&(expected.key()) != &(actual.key()));
}
}
});
}
SCENARIO("Test the move assignment operator", "[settingsNode]") {
propertyTest("A settings node to move", [](const SettingsNode& expected,
std::string&& key) {
THEN_WHEN("We copy the settings") {
SettingsNode
intermediateCopy( // NOLINT(performance-unnecessary-copy-initialization)
expected);
SettingsNode actual(key);
actual = move(
intermediateCopy); // NOLINT(performance-unnecessary-copy-initialization)
AND_THEN("The objects must be equal") {
REQUIRE(actual == expected);
}
AND_THEN("The objects must exist on a different place in memory") {
REQUIRE(&expected != &actual);
REQUIRE(&(expected.key()) != &(actual.key()));
}
}
});
}
SCENARIO("Testing the removal of values", "[config][settingsNode]") {
GIVEN("A settings node setup with some values") {
const SettingsKey rootKey("root-key");
const SettingsKeys key1({"key1"});
const SettingsKeys key2({"key2"});
const SettingsKeys key3({"key3"});
const SettingsValues value1({"value1a", "value1b"});
const SettingsValues value2({"value2a", "value2b"});
SettingsNode settings(rootKey);
REQUIRE(settings.add(key1, value1));
REQUIRE(settings.add(key2, value2));
REQUIRE(settings.add(key3));
WHEN("We clear a key with values") {
bool returnCode = settings.clear(key1);
THEN("It should succeed") { REQUIRE(returnCode); }
THEN("They should be cleared") {
REQUIRE(settings.get<SettingsValues>(key1) == std::nullopt);
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key1, DEFAULT_VALUE) ==
DEFAULT_VALUE);
}
THEN("The other key should be untouched") {
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key2, DEFAULT_VALUE) ==
value2);
REQUIRE(settings.get<SettingsValues>(key3, DEFAULT_VALUE) ==
DEFAULT_VALUE);
}
}
WHEN("We clear a key with no values") {
bool returnCode = settings.clear(key3);
THEN("It should succeed") { REQUIRE(returnCode); }
THEN("They should be cleared") {
REQUIRE(settings.get<SettingsValues>(key3) == std::nullopt);
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key3, DEFAULT_VALUE) ==
DEFAULT_VALUE);
}
THEN("The other key should be untouched") {
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key1, DEFAULT_VALUE) ==
value1);
REQUIRE(settings.get<SettingsValues>(key2, DEFAULT_VALUE) ==
value2);
}
}
WHEN("We clear with an empty key") {
bool returnCode = settings.clear(SettingsKeys());
THEN("It should fail") { REQUIRE_FALSE(returnCode); }
}
WHEN("We clear a non-existing key") {
bool returnCode1 =
settings.clear(SettingsKeys({"non-existing-key"}));
bool returnCode2 = settings.clear(
SettingsKeys({key1.front(), "non-existing-key"}));
THEN("It should succeed") {
REQUIRE(returnCode1);
REQUIRE(returnCode2);
}
THEN("The other keys should be untouched") {
const SettingsValues DEFAULT_VALUE({"blaat"});
REQUIRE(settings.get<SettingsValues>(key1, DEFAULT_VALUE) ==
value1);
REQUIRE(settings.get<SettingsValues>(key2, DEFAULT_VALUE) ==
value2);
REQUIRE(settings.get<SettingsValues>(key3, DEFAULT_VALUE) ==
DEFAULT_VALUE);
}
}
}
}
SCENARIO("Test the settings node streaming operator",
"[config][settingsNode]") {
GIVEN("A configured settings object to assign and a stream") {
const SettingsKey rootKey("root-key");
const SettingsKey rootKey2("root-key2");
const SettingsKeys key1({"key1"});
const SettingsValues value1({"value1a", "value1b"});
const SettingsKeys key2({"key2"});
const SettingsValues value2({"value2a", "value2b"});
const SettingsKeys key3({"key3a", "key3b"});
const SettingsValues value3({"value3"});
SettingsNode settings(rootKey);
REQUIRE(settings.add(key1, value1));
REQUIRE(settings.add(key2, value2));
REQUIRE(settings.add(key3, value3));
const SettingsNode constSettings =
settings; // NOLINT(performance-unnecessary-copy-initialization)
stringstream stream;
WHEN("We apply the streaming operator to the stream") {
stream << settings;
THEN("We should get the stream") {
stringstream correctStream;
correctStream << "- " << rootKey << ":" << std::endl;
correctStream << " - " << key1.front() << ":" << std::endl;
for(const auto& value : value1) {
correctStream << " - " << value << std::endl;
}
correctStream << " - " << key2.front() << ":" << std::endl;
for(const auto& value : value2) {
correctStream << " - " << value << std::endl;
}
correctStream << " - " << key3.front() << ":" << std::endl;
correctStream << " - " << key3.back() << ":" << std::endl;
for(const auto& value : value3) {
correctStream << " - " << value << std::endl;
}
REQUIRE(stream.str() == correctStream.str());
}
}
}
}
SCENARIO("Test adding values", "[config][settingsNode]") {
propertyTest("Add one value with a simple key",
[](const SettingsKey& key, const SettingsValue& value) {
SettingsNode settings("Addition test");
REQUIRE(!settings.contains(key));
REQUIRE(settings.get<SettingsValue>(key) == std::nullopt);
REQUIRE(settings.get<SettingsValue>(key, DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE(settings.add(key, value));
REQUIRE(settings.contains(key));
REQUIRE(settings.get<SettingsValue>(key) == value);
REQUIRE(settings.get<SettingsValue>(key, DEFAULT_VALUE) ==
value);
});
propertyTest("Add one value with a key", [](const SettingsKeys& key,
const SettingsValue& value) {
SettingsNode settings("Addition test");
REQUIRE(
settings.contains(key) ==
key.empty()); // An empty key will return true, as this points to the root settings node itself
REQUIRE(settings.get<SettingsValue>(key) == std::nullopt);
REQUIRE(settings.get<SettingsValue>(key, DEFAULT_VALUE) ==
DEFAULT_VALUE);
REQUIRE(settings.add(key, value));
REQUIRE(settings.contains(key));
REQUIRE(settings.get<SettingsValue>(key) == value);
REQUIRE(settings.get<SettingsValue>(key, DEFAULT_VALUE) == value);
});
propertyTest("Add multiple values with a simple key",
[](const SettingsKey& key, const SettingsValues& values) {
SettingsNode settings("Addition test");
REQUIRE(!settings.contains(key));
REQUIRE(settings.get<SettingsValues>(key) == std::nullopt);
REQUIRE(settings.get<SettingsValues>(
key, DEFAULT_VALUES) == DEFAULT_VALUES);
REQUIRE(settings.add(key, values));
REQUIRE(settings.contains(key));
REQUIRE(settings.get<SettingsValues>(key) == values);
REQUIRE(settings.get<SettingsValues>(
key, DEFAULT_VALUES) == values);
});
propertyTest("Add multiple elements with a key", [](const SettingsKeys& key,
const SettingsValues&
values) {
SettingsNode settings("Addition test");
REQUIRE(
settings.contains(key) ==
key.empty()); // An empty key will return true, as this points to the root settings node itself
REQUIRE(settings.get<SettingsValues>(key) == std::nullopt);
REQUIRE(settings.get<SettingsValues>(key, DEFAULT_VALUES) ==
DEFAULT_VALUES);
REQUIRE(settings.add(key, values));
REQUIRE(settings.contains(key));
REQUIRE(settings.get<SettingsValues>(key) == values);
REQUIRE(settings.get<SettingsValues>(key, DEFAULT_VALUES) == values);
});
}
} // namespace execHelper::config::test
<file_sep>#include <vector>
#include <gsl/string_span>
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/commandLine.h"
#include "plugins/pluginUtils.h"
#include "plugins/pmd.h"
#include "plugins/verbosity.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::TaskCollection;
using execHelper::plugins::COMMAND_LINE_KEY;
using execHelper::plugins::ExecuteCallback;
using execHelper::plugins::Pmd;
using execHelper::plugins::registerExecuteCallback;
using execHelper::plugins::VERBOSITY_KEY;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::utils::getExpectedTasks;
namespace {
const czstring<> PLUGIN_NAME = "pmd";
using Exec = string;
const czstring<> EXEC_KEY = "exec";
using Tool = string;
const czstring<> TOOL_KEY = "tool";
using MinimumTokens = string;
const czstring<> MINIMUM_TOKENS_KEY = "minimum-tokens";
using Files = vector<string>;
const czstring<> FILES_KEY = "files";
using Language = string;
const czstring<> LANGUAGE_KEY = "language";
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Obtaining the default variables map of the pmd plugin", "[pmd]") {
MAKE_COMBINATIONS("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
Pmd plugin;
VariablesMap actualVariables(PLUGIN_NAME);
REQUIRE(actualVariables.add(EXEC_KEY, PLUGIN_NAME));
REQUIRE(actualVariables.add(TOOL_KEY, "cpd"));
REQUIRE(actualVariables.add(COMMAND_LINE_KEY, CommandLineArgs()));
REQUIRE(actualVariables.add(VERBOSITY_KEY, "no"));
COMBINATIONS("Switch on verbosity") {
fleetingOptions.m_verbose = true;
REQUIRE(actualVariables.replace(VERBOSITY_KEY, "yes"));
}
THEN_WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN_CHECK("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO("Make combinations of different configurations for the pmd plugin",
"[pmd]") {
MAKE_COMBINATIONS("Of several settings") {
const Pattern pattern1("PATTERN1", {"value1a", "value1b"});
const Pattern pattern2("PATTERN2", {"value2a", "value2b"});
const Patterns patterns({pattern1, pattern2});
Pmd plugin;
Task task;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
Task expectedTask;
TaskCollection minimumTokens;
TaskCollection files;
TaskCollection verbosity;
TaskCollection language;
TaskCollection commandLine;
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
COMBINATIONS("Switch exec") {
REQUIRE(variables.replace(EXEC_KEY, "exec.sh"));
}
COMBINATIONS("Switch the tool") {
REQUIRE(variables.replace(TOOL_KEY, "tool1"));
}
COMBINATIONS("Switch on minimum tokens") {
REQUIRE(variables.replace(MINIMUM_TOKENS_KEY, "100"));
if(variables.get<Tool>(TOOL_KEY) == Tool("cpd")) {
minimumTokens.emplace_back("--minimum-tokens");
minimumTokens.emplace_back(
variables.get<MinimumTokens>(MINIMUM_TOKENS_KEY).value());
}
}
COMBINATIONS("Switch on files") {
const Files newFiles({"file1", "file2", "file*"});
REQUIRE(variables.replace(FILES_KEY, newFiles));
if(variables.get<Tool>(TOOL_KEY) == Tool("cpd")) {
for(const auto& file : newFiles) {
files.emplace_back("--files");
files.emplace_back(file);
}
}
}
COMBINATIONS("Add a language") {
REQUIRE(variables.replace(LANGUAGE_KEY, "language1"));
language.emplace_back("--language");
language.push_back(variables.get<Language>(LANGUAGE_KEY).value());
}
COMBINATIONS("Add a command line") {
commandLine = {"{" + pattern1.getKey() + "}",
"{" + pattern2.getKey() + "}"};
REQUIRE(variables.replace(COMMAND_LINE_KEY, commandLine));
}
COMBINATIONS("Switch on verbosity") {
REQUIRE(variables.replace(VERBOSITY_KEY, "yes"));
verbosity.emplace_back("-verbose");
}
string binaryName(variables.get<Exec>(EXEC_KEY).value());
binaryName.append("-").append(variables.get<Tool>(TOOL_KEY).value());
expectedTask.append(binaryName);
expectedTask.append(language);
expectedTask.append(verbosity);
expectedTask.append(minimumTokens);
expectedTask.append(files);
expectedTask.append(commandLine);
const ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(expectedTask, patterns);
bool returnCode = plugin.apply(task, variables, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
}
} // namespace execHelper::plugins::test
<file_sep>#include <algorithm>
#include <array>
#include <map>
#include <memory>
#include <optional>
#include <stdexcept>
#include <string>
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/executePlugin.h"
#include "plugins/luaPlugin.h"
#include "plugins/memory.h"
#include "config/generators.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "base-utils/nonEmptyString.h"
#include "utils/addToConfig.h"
#include "utils/addToTask.h"
#include "utils/commonGenerators.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::array;
using std::make_pair;
using std::make_shared;
using std::map;
using std::move;
using std::optional;
using std::pair;
using std::runtime_error;
using std::shared_ptr;
using std::static_pointer_cast;
using std::string;
using std::vector;
using gsl::not_null;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::TaskCollection;
using execHelper::plugins::ExecutePlugin;
using execHelper::core::test::ExecutorStub;
using execHelper::test::addToConfig;
using execHelper::test::addToTask;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::NonEmptyString;
using execHelper::test::propertyTest;
using execHelper::test::utils::getExpectedTasks;
namespace filesystem = std::filesystem;
namespace {
enum class Mode { Run, Exec };
constexpr auto getAllModes() {
return array<Mode, 2>({Mode::Run, Mode::Exec});
};
inline void addToConfig(const execHelper::config::SettingsKeys& key,
const Mode mode, not_null<VariablesMap*> config) {
string command;
switch(mode) {
case Mode::Run:
command = "run";
break;
case Mode::Exec:
command = "exec";
break;
default:
throw runtime_error("Invalid mode");
}
if(!config->add(key, command)) {
throw runtime_error("Failed to add key " + key.back() +
" with mode value to config");
}
}
inline void addToTask(const Mode mode,
gsl::not_null<execHelper::core::Task*> task) {
string command;
switch(mode) {
case Mode::Run:
command = "run";
break;
case Mode::Exec:
command = "exec";
break;
default:
throw runtime_error("Invalid mode");
}
execHelper::test::addToTask(
command, task,
[](const string& value) -> TaskCollection { return {value}; });
}
} // namespace
namespace rc {
template <> struct Arbitrary<Mode> {
static auto arbitrary() -> Gen<Mode> {
return gen::elementOf(getAllModes());
};
};
} // namespace rc
namespace execHelper::plugins::test {
SCENARIO("Testing the configuration settings of the docker plugin",
"[docker][successful]") {
propertyTest(
"",
[](Mode mode, const NonEmptyString& image,
const optional<filesystem::path>& workingDir,
const optional<vector<string>>& commandLine,
const optional<EnvironmentCollection>& environment,
const NonEmptyString& container, const optional<bool>& interactive,
const optional<bool>& tty, const optional<bool>& privileged,
const optional<string>& user,
const optional<pair<string, string>>&
env, // Lua does not necessarily preserve the order of these, so we currently limit ourselves to one value
const optional<vector<string>>& volumes,
const optional<NonEmptyString>& target) {
const Task task;
Task expectedTask(task);
Patterns patterns;
VariablesMap config("docker-test");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/docker.lua");
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
auto memory = make_shared<SpecialMemory>();
if(target) {
SettingsNode generalSettings("docker-test");
FleetingOptionsStub fleetingOptions;
string key = **target;
Plugins plugins;
plugins.emplace(
make_pair(move(key), static_pointer_cast<Plugin>(memory)));
ExecutePlugin::push(move(plugins));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(
&fleetingOptions));
ExecutePlugin::push(SettingsNode("general-docker"));
ExecutePlugin::push(Patterns(patterns));
}
expectedTask.append("docker");
addToConfig("mode", mode, &config);
addToTask(mode, &expectedTask);
addToConfig("interactive", interactive, &config);
addToTask(
interactive, &expectedTask,
[](const string& value) -> TaskCollection {
return {"--interactive=" + value};
},
false);
addToConfig("tty", tty, &config);
addToTask(
tty, &expectedTask,
[](const string& value) -> TaskCollection {
return {"--tty=" + value};
},
false);
addToConfig("privileged", privileged, &config);
addToTask(
privileged, &expectedTask,
[](const string& value) {
return value == "true" ? TaskCollection{"--privileged"}
: TaskCollection();
},
false);
addToConfig("user", user, &config);
addToTask(user, &expectedTask,
[](const string& value) -> TaskCollection {
return {
"--user=\"" + value + "\"",
};
});
addToConfig("env", env, &config);
addToTask(env, &expectedTask,
[](const string& value) -> TaskCollection {
return {
"\"--env=" + value + "\"",
};
});
if(commandLine) {
handleCommandLine(*commandLine, config, expectedTask);
}
addToConfig("image", image, &config);
addToConfig("container", container, &config);
switch(mode) {
case Mode::Run:
addToConfig("volumes", volumes, &config);
addToTask(volumes, &expectedTask,
[](const string& value) -> TaskCollection {
return {
"\"--volume=" + value + "\"",
};
});
expectedTask.append(*image);
break;
case Mode::Exec:
expectedTask.append(*container);
break;
default:
REQUIRE(false);
}
addToConfig("targets", target, &config);
if(environment) {
handleEnvironment(*environment, config, expectedTask);
}
if(workingDir) {
handleWorkingDirectory(*workingDir, config, expectedTask);
}
ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(expectedTask, patterns);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
if(target) {
auto executions = memory->getExecutions();
REQUIRE(executions.size() == 1);
REQUIRE(expectedTasks.size() == executions.size());
auto expectedTask = expectedTasks.begin();
for(auto execution = executions.begin();
execution != executions.end();
++execution, ++expectedTask) {
REQUIRE(execution->task == *expectedTask);
REQUIRE(execution->patterns.empty());
}
} else {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
}
if(target) {
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
}
});
}
SCENARIO("Use a pattern for the target", "[docker][successful]") {
propertyTest(
"A pattern to use as a target",
[](Mode mode, const NonEmptyString& image,
const NonEmptyString& container, const Pattern& pattern) {
auto target = string("{").append(pattern.getKey()).append("}");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/docker.lua");
const Task task;
Patterns patterns = {pattern};
map<std::string, shared_ptr<SpecialMemory>> memories;
const auto& patternValues = pattern.getValues();
transform(patternValues.begin(), patternValues.end(),
inserter(memories, memories.end()),
[](const auto& value) {
return make_pair(value, make_shared<SpecialMemory>());
});
// Register each memories mapping as the endpoint for every target command
Plugins plugins;
transform(memories.begin(), memories.end(),
inserter(plugins, plugins.end()), [](const auto& memory) {
return make_pair(
memory.first,
static_pointer_cast<Plugin>(memory.second));
});
ExecutePlugin::push(move(plugins));
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(
&fleetingOptions));
ExecutePlugin::push(SettingsNode("general-docker"));
ExecutePlugin::push(Patterns(patterns));
VariablesMap config("docker-test");
addToConfig("run", mode, &config);
addToConfig("image", image, &config);
addToConfig("container", container, &config);
addToConfig("targets", target, &config);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("The pattern was unwrapped into its actual values") {
for(const auto& memory : memories) {
// Expected executions is 1 per memory, multiplied by the number of occurences of the pattern value in all the pattern values
size_t nbOfExpectedExecutions =
count(patternValues.begin(), patternValues.end(),
memory.first);
REQUIRE(memory.second->getExecutions().size() ==
nbOfExpectedExecutions);
}
}
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
});
}
SCENARIO("Not passing an image or container to the docker plugin",
"[docker][error]") {
propertyTest(
"A docker plugin and a configuration configured for run mode",
[](const Mode mode, const optional<bool>& interactive,
const optional<bool>& tty, const optional<bool>& privileged,
const optional<string>& user,
const optional<pair<string, string>>&
env, // Lua does not necessarily preserve the order of these, so we currently limit ourselves to one value
const optional<vector<string>>& volumes) {
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/docker.lua");
VariablesMap config("docker-test");
addToConfig("run", mode, &config);
addToConfig("interactive", interactive, &config);
addToConfig("tty", tty, &config);
addToConfig("privileged", privileged, &config);
addToConfig("user", user, &config);
addToConfig("env", env, &config);
addToConfig("volumes", volumes, &config);
THEN_WHEN("We call the docker plugin with this configuration") {
bool returnCode = plugin.apply(Task(), config, Patterns());
THEN_CHECK("It should fail") { REQUIRE_FALSE(returnCode); }
}
});
}
} // namespace execHelper::plugins::test
<file_sep>#include <deque>
#include <string>
#include <vector>
#include "core/permutationIterator.h"
#include "unittest/catch.h"
using std::deque;
using std::string;
using std::vector;
using execHelper::core::PermutationIterator;
namespace {
class PermuteObjectElement {
public:
using Object1 = string;
using Object2 = uint8_t;
PermuteObjectElement(Object1 object1, Object2 object2)
: m_object1(std::move(object1)), m_object2(object2) {
;
}
auto operator==(const PermuteObjectElement& other) const noexcept -> bool {
return m_object1 == other.m_object1 && m_object2 == other.m_object2;
}
auto operator!=(const PermuteObjectElement& other) const noexcept -> bool {
return !(*this == other);
}
private:
const Object1 m_object1;
const Object2 m_object2;
};
class PermuteObject {
public:
using Collection1 = vector<PermuteObjectElement::Object1>;
using Collection2 = deque<PermuteObjectElement::Object2>;
using iterator =
PermutationIterator<PermuteObjectElement, Collection1, Collection2>;
using const_iterator =
PermutationIterator<const PermuteObjectElement, const Collection1,
const Collection2>;
PermuteObject(Collection1 collection1, Collection2 collection2) noexcept
: m_collection1(std::move(collection1)),
m_collection2(std::move(collection2)) {
;
}
auto operator==(const PermuteObject& other) const noexcept -> bool {
return m_collection1 == other.m_collection1 &&
m_collection2 == other.m_collection2;
}
auto operator!=(const PermuteObject& other) const noexcept -> bool {
return !(*this == other);
}
[[nodiscard]] auto getCollection1() const noexcept -> const Collection1& {
return m_collection1;
}
[[nodiscard]] auto getCollection2() const noexcept -> const Collection2& {
return m_collection2;
}
[[nodiscard]] auto begin() noexcept -> iterator {
return {m_collection1.begin(), m_collection2.begin(),
m_collection1.end(), m_collection2.end()};
}
[[nodiscard]] auto begin() const noexcept -> const_iterator {
return {m_collection1.begin(), m_collection2.begin(),
m_collection1.end(), m_collection2.end()};
}
[[nodiscard]] auto end() noexcept -> iterator {
return {m_collection1.end(), m_collection2.end(), m_collection1.end(),
m_collection2.end()};
}
[[nodiscard]] auto end() const noexcept -> const_iterator {
return {m_collection1.end(), m_collection2.end(), m_collection1.end(),
m_collection2.end()};
}
private:
Collection1 m_collection1;
Collection2 m_collection2;
};
auto getOrderedCombinations(const PermuteObject& permute)
-> vector<PermuteObjectElement> {
vector<PermuteObjectElement> orderedCombinations;
for(const auto& object1 : permute.getCollection1()) {
for(const auto& object2 : permute.getCollection2()) {
PermuteObjectElement combination(object1, object2);
orderedCombinations.push_back(combination);
}
}
return orderedCombinations;
}
template <typename T>
inline void
testForeach(T& permute,
const vector<PermuteObjectElement>& orderedCombinations) {
size_t orderedCombinationsIndex = 0U;
for(const auto& combination : permute) {
REQUIRE(orderedCombinationsIndex < orderedCombinations.size());
REQUIRE((combination == orderedCombinations[orderedCombinationsIndex]));
++orderedCombinationsIndex;
}
REQUIRE((orderedCombinationsIndex == orderedCombinations.size()));
}
template <typename T>
inline void
testIterators(T& permute,
const vector<PermuteObjectElement>& orderedCombinations) {
size_t orderedCombinationsIndex = 0U;
for(auto it = permute.begin(); it != permute.end();
++it) { // NOLINT(modernize-loop-convert)
REQUIRE(orderedCombinationsIndex < orderedCombinations.size());
REQUIRE((*it == orderedCombinations[orderedCombinationsIndex]));
++orderedCombinationsIndex;
}
REQUIRE((orderedCombinationsIndex == orderedCombinations.size()));
}
} // namespace
namespace execHelper::core::test {
SCENARIO(
"Test the permutation iterators when looping over the entire collection",
"[permutationiterator]") {
GIVEN("Some non-const collections to iterate over using permutations of "
"its content and the ordered combinations") {
PermuteObject permute({"object1", "object2"}, {1, 2});
const vector<PermuteObjectElement> orderedCombinations =
getOrderedCombinations(permute);
WHEN("We iterate over them entirely") {
THEN("We should be able to do so using a foreach loop") {
testForeach(permute, orderedCombinations);
}
THEN("We should be able to do so using iterators") {
testIterators(permute, orderedCombinations);
}
}
}
GIVEN("Some const collections to iterate over using permutations of its "
"content and the ordered combinations") {
const PermuteObject permute({"object1", "object2"}, {1, 2});
const vector<PermuteObjectElement> orderedCombinations =
getOrderedCombinations(permute);
WHEN("We iterate over them entirely") {
THEN("We should be able to do so using a foreach loop") {
testForeach(permute, orderedCombinations);
}
THEN("We should be able to do so using iterators") {
testIterators(permute, orderedCombinations);
}
}
}
GIVEN("Some collections that have empty collection on certain places") {
WHEN("Trying to iterate over an empty inner iterator") {
PermuteObject permute({"object1", "object2"}, {});
const PermuteObject constPermute({"object1", "object2"}, {});
THEN("We should get no elements to iterate over") {
REQUIRE(permute.begin() == permute.end());
REQUIRE(constPermute.begin() == constPermute.end());
}
}
WHEN("Trying to iterate over an empty outer iterator") {
PermuteObject permute({}, {1, 2});
const PermuteObject constPermute({"object1", "object2"}, {});
THEN("We should get no elements to iterate over") {
REQUIRE(permute.begin() == permute.end());
REQUIRE(constPermute.begin() == constPermute.end());
}
}
}
}
SCENARIO("Test the permutation iterators for partial iteration",
"[permutationiterator]") {
GIVEN("Some non-const collections to partially iterate over using "
"permutations of its content and the ordered combinations") {
const PermuteObject::Collection1 collection1{"object1", "object2",
"object3", "object4"};
const PermuteObject::Collection2 collection2{1, 2, 3, 4, 5, 6, 7, 8};
const PermuteObject permute(collection1, collection2);
vector<PermuteObjectElement> orderedCombinations;
const size_t beginIndexObject1 = 1U;
const size_t endIndexObject1 = permute.getCollection1().size() - 1U;
const size_t beginIndexObject2 = 1U;
const size_t endIndexObject2 = permute.getCollection2().size() - 1U;
for(size_t i = beginIndexObject1; i < endIndexObject1; ++i) {
for(size_t j = beginIndexObject2; j < endIndexObject2; ++j) {
PermuteObjectElement combination(permute.getCollection1()[i],
permute.getCollection2()[j]);
orderedCombinations.push_back(combination);
}
}
WHEN("We iterate over them partially") {
THEN("We should get the same ordered combinations") {
size_t orderedCombinationsIndex = 0U;
for(auto it = PermutationIterator<PermuteObjectElement,
PermuteObject::Collection1,
PermuteObject::Collection2>(
permute.getCollection1().begin() + beginIndexObject1,
permute.getCollection2().begin() + beginIndexObject2,
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2);
it !=
PermutationIterator<PermuteObjectElement,
PermuteObject::Collection1,
PermuteObject::Collection2>(
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2,
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2);
++it) {
REQUIRE(orderedCombinationsIndex <
orderedCombinations.size());
REQUIRE(*it ==
orderedCombinations[orderedCombinationsIndex]);
++orderedCombinationsIndex;
}
REQUIRE(orderedCombinationsIndex == orderedCombinations.size());
}
}
}
GIVEN("Some const collections to partially iterate over using permutations "
"of its content and the ordered combinations") {
const PermuteObject::Collection1 collection1{"object1", "object2",
"object3", "object4"};
const PermuteObject::Collection2 collection2{1, 2, 3, 4, 5, 6, 7, 8};
const PermuteObject permute(collection1, collection2);
vector<PermuteObjectElement> orderedCombinations;
const size_t beginIndexObject1 = 1U;
const size_t endIndexObject1 = collection1.size() - 1U;
const size_t beginIndexObject2 = 1U;
const size_t endIndexObject2 = collection2.size() - 1U;
for(size_t i = beginIndexObject1; i < endIndexObject1; ++i) {
for(size_t j = beginIndexObject2; j < endIndexObject2; ++j) {
PermuteObjectElement combination(collection1[i],
collection2[j]);
orderedCombinations.push_back(combination);
}
}
WHEN("We iterate over them partially") {
THEN("We should get the same ordered combinations") {
size_t orderedCombinationsIndex = 0U;
for(auto it =
PermutationIterator<const PermuteObjectElement,
const PermuteObject::Collection1,
const PermuteObject::Collection2>(
permute.getCollection1().begin() +
beginIndexObject1,
permute.getCollection2().begin() +
beginIndexObject2,
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2);
it !=
PermutationIterator<const PermuteObjectElement,
const PermuteObject::Collection1,
const PermuteObject::Collection2>(
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2,
permute.getCollection1().begin() + endIndexObject1,
permute.getCollection2().begin() + endIndexObject2);
++it) {
REQUIRE(orderedCombinationsIndex <
orderedCombinations.size());
REQUIRE(*it ==
orderedCombinations[orderedCombinationsIndex]);
++orderedCombinationsIndex;
}
REQUIRE(orderedCombinationsIndex == orderedCombinations.size());
}
}
}
}
} // namespace execHelper::core::test
<file_sep>#ifndef COMMAND_UTILS_INCLUDE
#define COMMAND_UTILS_INCLUDE
#include <limits>
#include <string>
#include <vector>
#include <gsl/string_span>
#include "tmpFile.h"
namespace execHelper {
namespace test {
namespace baseUtils {
using ConfigFile = TmpFile;
const gsl::czstring<> EXEC_HELPER_BINARY = "exec-helper";
const gsl::czstring<> COMMAND_KEY = "commands";
const gsl::czstring<> COMMAND_LINE_COMMAND_KEY = "command-line-command";
const gsl::czstring<> COMMAND_LINE_COMMAND_LINE_KEY = "command-line";
using ReturnCode = int32_t;
static const ReturnCode SUCCESS = EXIT_SUCCESS;
static const ReturnCode RUNTIME_ERROR = std::numeric_limits<ReturnCode>::max();
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* COMMAND_UTILS_INCLUDE */
<file_sep>option(BUILD_USAGE_DOCUMENTATION "Create and install the usage documentation (requires Sphinx)" ON)
option(BUILD_API_DOCUMENTATION "Create and install the API documentation (requires Doxygen)" OFF)
add_custom_target(docs ALL)
if(BUILD_USAGE_DOCUMENTATION)
message(STATUS "Building usage documentation")
add_subdirectory(usage)
endif()
if(BUILD_API_DOCUMENTATION)
message(STATUS "Building API documentation")
add_subdirectory(api)
endif()
<file_sep>#include <filesystem>
#include <sstream>
#include <string>
#include <vector>
#include <boost/algorithm/string.hpp>
#include "config/environment.h"
#include "config/path.h"
#include "core/task.h"
#include "unittest/catch.h"
#include "utils/utils.h"
using std::endl;
using std::move;
using std::string;
using std::stringstream;
using std::vector;
using boost::trim_right;
using execHelper::config::EnvironmentCollection;
using execHelper::config::EnvironmentValue;
using execHelper::config::Path;
using execHelper::core::TaskCollection;
using execHelper::test::utils::appendVectors;
namespace filesystem = std::filesystem;
namespace {
template <typename T> inline auto reverse(vector<T> toReverse) -> vector<T> {
std::reverse(toReverse.begin(), toReverse.end());
return toReverse;
}
auto toString(const TaskCollection& task) -> string {
string result;
for(const auto& taskPart : task) {
result.append(taskPart).append(" ");
}
trim_right(result);
return result;
}
} // namespace
namespace execHelper::core::test {
SCENARIO("Test the getters and setters of a task", "[task]") {
GIVEN("A task") {
MAKE_COMBINATIONS(
"Of getting and setting different parameters of a task") {
Task task;
TaskCollection actualCommands;
Path actualWorkingDir = filesystem::current_path();
EnvironmentCollection actualEnvironment;
COMBINATIONS("Append a string") {
const string taskPart = "const string task";
actualCommands.push_back(taskPart);
task.append(taskPart);
}
COMBINATIONS("Append a moved string") {
string taskPart = "const string task";
actualCommands.push_back(taskPart);
task.append(move(taskPart));
}
COMBINATIONS("Append a task collection") {
const TaskCollection taskPart = {
"taskcollection1", "taskcollection2", "taskcollection3"};
appendVectors(actualCommands, taskPart);
task.append(taskPart);
}
COMBINATIONS("Move a task collection") {
TaskCollection taskPart = {"taskcollection1", "taskcollection2",
"taskcollection3"};
appendVectors(actualCommands, taskPart);
task.append(move(taskPart));
}
COMBINATIONS("Add a string with the space separator as part of the "
"argument") {
const string taskPart = "This is one command in total";
actualCommands.push_back(taskPart);
task.append(taskPart);
}
COMBINATIONS("Add a collection with the space separator as part of "
"the argument") {
const TaskCollection taskPart = {
"This is one command in total",
"This is another command that is one"};
appendVectors(actualCommands, taskPart);
task.append(taskPart);
}
COMBINATIONS("Set an environment") {
const EnvironmentCollection environment = {{"ENV1", "value1"},
{"ENV2", "value2"}};
actualEnvironment = environment;
task.setEnvironment(environment);
}
COMBINATIONS("Set a moved environment") {
EnvironmentCollection environment = {{"MOVE-ENV1", "value1"},
{"MOVE-ENV2", "value2"}};
actualEnvironment = environment;
task.setEnvironment(move(environment));
}
COMBINATIONS("Append an environment value") {
EnvironmentValue value1("APPEND1", "append-value1");
actualEnvironment.insert(value1);
task.appendToEnvironment(move(value1));
}
COMBINATIONS(
"Append an environment value twice with a different value") {
EnvironmentValue value1("OVERWRITE1", "append-value1");
task.appendToEnvironment(move(value1));
EnvironmentValue value2("OVERWRITE1", "append-value2");
actualEnvironment.insert(value2);
task.appendToEnvironment(move(value2));
}
COMBINATIONS("Setting the work directory") {
actualWorkingDir /= "tmp";
task.setWorkingDirectory(actualWorkingDir);
}
COMBINATIONS("Append an environment collection") {
EnvironmentCollection collection(
{{"APPENDCOLLECTION1", "append-collection1"},
{"APPENDCOLLECTION2", "append-collection2"}});
for(const auto& value : collection) {
actualEnvironment.insert(value);
}
task.appendToEnvironment(move(collection));
}
actualEnvironment.emplace(make_pair(
"PWD", filesystem::absolute(actualWorkingDir).string()));
THEN_WHEN("We check the getters") {
THEN_CHECK("The commands are correct") {
REQUIRE(task.getTask() == actualCommands);
}
THEN_CHECK("The command string is correct") {
REQUIRE(task.toString() == toString(actualCommands));
}
THEN_CHECK("The environment is correct") {
REQUIRE(task.getEnvironment() == actualEnvironment);
}
THEN_CHECK("The working directory is correct") {
REQUIRE(task.getWorkingDirectory() == actualWorkingDir);
}
}
}
}
}
SCENARIO("Test the comparison operators", "[task]") {
GIVEN("One task") {
Task actualTask;
actualTask.append("task1");
actualTask.append("task2");
WHEN("We compare it with itself") {
THEN("It should be true") {
REQUIRE(actualTask == actualTask);
REQUIRE_FALSE(actualTask != actualTask);
}
}
}
GIVEN("Two tasks used for equality comparison") {
MAKE_COMBINATIONS("For different equality situations") {
Task task1;
Task task2;
COMBINATIONS("Append the same commands") {
static const TaskCollection actualTask({"task1", "task2"});
task1.append(actualTask);
task2.append(actualTask);
}
COMBINATIONS("Add the same environment") {
static const EnvironmentCollection env({{"ENV1", "value1"},
{"ENV2", "value2"},
{"ENV3", "value3"}});
task1.setEnvironment(env);
task2.setEnvironment(env);
}
COMBINATIONS("Add the same working directories") {
static const Path workingDirectory("/tmp");
task1.setWorkingDirectory(workingDirectory);
task2.setWorkingDirectory(workingDirectory);
}
THEN_WHEN("We compare them") {
THEN_CHECK("They should compare equal") {
REQUIRE(task1 == task2);
REQUIRE_FALSE(task1 != task2);
}
}
}
}
GIVEN("Two tasks used for equality comparison") {
MAKE_COMBINATIONS("For different inequality situations") {
Task task1;
Task task2;
COMBINATIONS("Append to one a command") {
task1.append("one-command");
}
COMBINATIONS("Different commands") {
task1.append("command1");
task2.append("command2");
}
COMBINATIONS("Different number of commands") {
task1.append("command1");
task2.append(TaskCollection({"command1", "command2"}));
}
COMBINATIONS("Commands in different order") {
TaskCollection commands({"command1", "command2"});
task1.append(commands);
task2.append(reverse(commands));
}
COMBINATIONS("Append to one an additional environment variable") {
task1.appendToEnvironment(EnvironmentValue("ONE-ENV", "env1"));
}
COMBINATIONS("Append different number of environment variables") {
const EnvironmentValue value1("ENV1", "value1");
EnvironmentValue moveValue1ForTask1(value1);
EnvironmentValue moveValue1ForTask2(value1);
const EnvironmentValue value2("ENV2", "value1");
EnvironmentValue moveValue2ForTask2(value2);
task1.appendToEnvironment(move(moveValue1ForTask1));
task2.appendToEnvironment(move(moveValue1ForTask2));
task2.appendToEnvironment(move(moveValue2ForTask2));
}
COMBINATIONS("Append environment variables with different keys") {
task1.appendToEnvironment(EnvironmentValue("key1", "value"));
task1.appendToEnvironment(EnvironmentValue("key2", "value"));
}
COMBINATIONS("Append environment variables with different values") {
task1.appendToEnvironment(EnvironmentValue("key", "value1"));
task1.appendToEnvironment(EnvironmentValue("key", "value2"));
}
COMBINATIONS("Change the working directories") {
task1.setWorkingDirectory(filesystem::current_path());
task2.setWorkingDirectory(filesystem::current_path() / "tmp");
}
THEN_WHEN("We compare them") {
THEN_CHECK("They should not compare equal") {
COMBINATIONS_ONLY_IF_BRANCH_TAKEN {
REQUIRE(task1 != task2);
REQUIRE_FALSE(task1 == task2);
}
}
}
}
}
}
SCENARIO("Test the streaming operator", "[task]") {
GIVEN("An empty stream and a task") {
stringstream stream;
stringstream actualStream;
const Path actualWorkingDirectory("/tmp");
const TaskCollection tasks({"task1", "task2"});
Task task;
task.append(tasks);
const EnvironmentCollection env(
{{"ENV1", "value1"},
{"ENV2", "value2"},
{"PWD", filesystem::absolute(actualWorkingDirectory).string()}});
task.setEnvironment(env);
task.setWorkingDirectory(actualWorkingDirectory);
WHEN("We stream the task") {
stream << task;
THEN("The stream should be ok") { REQUIRE(stream.good()); }
THEN("The stream should have the correct format") {
actualStream << "Task {Environment(" << env.size() << "): {";
for(const auto& envValue : env) {
actualStream << " " << envValue.first << ": "
<< envValue.second << ";";
}
actualStream << "} ";
actualStream << "Command(" << tasks.size() << "): {";
for(const auto& taskPart : tasks) {
actualStream << " " << taskPart;
}
actualStream << "} ";
actualStream << "Working-dir: {"
<< actualWorkingDirectory.native() << "}";
actualStream << "}" << endl;
REQUIRE(stream.str() == actualStream.str());
}
}
}
}
} // namespace execHelper::core::test
<file_sep>[pytest]
bdd_features_base_dir = feature
cache_dir=../../build/
junit_family=xunit2
markers =
successful: Run all scenarios that test for successful cases
error: Run all scenario's that test for error cases
execution_order: Run all tests having to do with order of execution of command lines
cmd_args: Run all tests concerning command line arguments
no_args: Run all tests concerning calling exec-helper without any command line options
invalid_args: Run all tests concerning calling exec-helper with invalid command line arguments
help_option: Run all tests concerning the 'help' command line option
version_option: Run all tests concerning the 'version' command line option
dry_run_option: Run all tests concerning the 'dry run' command line option
list_plugins_option: Run all tests concerning the 'list plugins' command line option
auto_complete_option: Run all tests concerning the 'auto complete' command line option
keep_going_option: Run all tests concerning the 'keep going' command line option
config: Run all tests concerning configuration options
environment: Run all tests concerning setting the environment in the configuration
custom_plugins: Run all tests related to custom plugins
custom_plugins_discovery: Run all tests related to custom plugin discovery
custom_plugins_usage: Run all tests related to custom plugin usage
working_dir: Run all tests related to the working directory of tasks
settings_file_location: Run all tests related to paths relative to the settings file
<file_sep>#ifndef FLEETING_OPTIONS_STUB_INCLUDE
#define FLEETING_OPTIONS_STUB_INCLUDE
#include "config/fleetingOptionsInterface.h"
namespace execHelper::test {
class FleetingOptionsStub : public config::FleetingOptionsInterface {
public:
virtual ~FleetingOptionsStub() = default;
config::HelpOption_t getHelp() const noexcept override { return m_help; }
config::VersionOption_t getVersion() const noexcept override {
return m_version;
}
config::VerboseOption_t getVerbosity() const noexcept override {
return m_verbose;
}
config::DryRunOption_t getDryRun() const noexcept override {
return m_dryRun;
}
auto getKeepGoing() const noexcept -> config::KeepGoingOption_t override {
return m_keepGoing;
}
auto getLogLevel() const noexcept -> log::LogLevel override {
return m_logLevel;
}
config::Jobs_t getJobs() const noexcept override { return m_jobs; }
[[nodiscard]] auto listPlugins() const noexcept
-> config::ListPluginsOption_t override {
return m_listPlugins;
}
[[nodiscard]] auto appendedSearchPaths() const noexcept
-> const config::Paths& override {
return m_appendSearchPaths;
}
const config::CommandCollection& getCommands() const noexcept override {
return m_commands;
}
auto getAutoComplete() const noexcept
-> const std::optional<config::AutoCompleteOption_t>& override {
return m_autocomplete;
}
config::HelpOption_t m_help = {false};
config::VersionOption_t m_version = {false};
config::VerboseOption_t m_verbose = {false};
config::DryRunOption_t m_dryRun = {false};
config::KeepGoingOption_t m_keepGoing = {false};
log::LogLevel m_logLevel = {log::warning};
config::Jobs_t m_jobs = 1024U;
config::ListPluginsOption_t m_listPlugins = {false};
config::Paths m_appendSearchPaths = {};
config::CommandCollection m_commands = {};
std::optional<config::AutoCompleteOption_t> m_autocomplete;
};
} // namespace execHelper::test
#endif /* FLEETING_OPTIONS_STUB_INCLUDE */
<file_sep>.. _exec-helper-plugins-memory:
Memory plugin
*************
Description
===========
The memory plugin remembers the times and the parameters with which it was called. The plugin is mainly used for testing purposes.
Mandatory settings
==================
There are no mandatory settings for the memory plugin.
Optional settings
=================
There are no optional settings for the memory plugin.
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>#ifndef THREADEDNESS_INCLUDE
#define THREADEDNESS_INCLUDE
#include <string>
#include <gsl/string_span>
#include "config/commandLineOptions.h"
#include "config/fleetingOptionsInterface.h"
#include "config/variablesMap.h"
#include "core/task.h"
namespace execHelper::plugins {
using Jobs = config::Jobs_t;
const gsl::czstring<> JOBS_KEY = "jobs";
/**
* \brief Extends the functionality to include the _jobs_ config parameter and processes this parameter, using the --jobs flag
*/
struct JobsLong {
/**
* Adds the variables for this functionality to the given variables map
*
* @param[in] options The fleeting options to take into account
* @param[out] variables The variables map to add the variables to
*/
static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept;
/**
* Applies the given variables to the task
*
* @param[in] variables The variables map to use
* @param[out] task The task with the given variables map applied to it
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
task.append(
{"--jobs", std::to_string(*(variables.get<Jobs>(JOBS_KEY)))});
}
};
/**
* \brief Extends the functionality to include the _jobs_ config parameter and processes this parameter, using the -j flag
*/
struct JobsShort {
/*! @copydoc JobsLong::getVariables(config::VariablesMap&, const config::FleetingOptionsInterface&)
*/
inline static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept {
JobsLong::getVariables(variables, options);
}
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
task.append({"-j", std::to_string(*(variables.get<Jobs>(JOBS_KEY)))});
}
};
} // namespace execHelper::plugins
#endif /* THREADEDNESS_INCLUDE */
<file_sep>set(MODULE_NAME plugins)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/luaPlugin.cpp
src/pmd.cpp
src/pluginUtils.cpp
src/memory.cpp
src/commandPlugin.cpp
src/valgrind.cpp
src/executePlugin.cpp
src/plugin.cpp
src/commandLineCommand.cpp
src/lcov.cpp
src/logger.cpp
src/commandLine.cpp
src/verbosity.cpp
src/addEnvironment.cpp
src/threadedness.cpp
)
set(DEPENDENCIES
log
config
core
lua-wrapper
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/plugins)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
set(PLUGINS
src/scripts/make.lua
src/scripts/cmake.lua
src/scripts/docker.lua
src/scripts/ninja.lua
src/scripts/bootstrap.lua
src/scripts/scons.lua
src/scripts/clang-tidy.lua
src/scripts/cppcheck.lua
src/scripts/selector.lua
src/scripts/clang-static-analyzer.lua
)
install(FILES ${PLUGINS} DESTINATION share/exec-helper/plugins COMPONENT runtime)
<file_sep>task:add_args({'ninja'})
task:add_args({'-C', one(config['build-dir']) or '.'})
task:add_args(get_verbose('--verbose'))
task:add_args({'-j', one(config['jobs']) or jobs})
task:add_args(get_commandline())
register_task(task)
<file_sep>#include "executePlugin.h"
#include <algorithm>
#include <gsl/gsl_assert>
#include <gsl/string_span>
#include "config/fleetingOptionsInterface.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "log/assertions.h"
#include "commandLineCommand.h"
#include "lcov.h"
#include "logger.h"
#include "memory.h"
#include "pluginUtils.h"
#include "pmd.h"
#include "valgrind.h"
using std::find;
using std::make_shared;
using std::shared_ptr;
using std::string;
using std::vector;
using gsl::czstring;
using gsl::not_null;
using execHelper::config::Command;
using execHelper::config::CommandCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::PatternKeys;
using execHelper::config::Patterns;
using execHelper::config::PatternsHandler;
using execHelper::config::SettingsKey;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
namespace {
auto getNextPatterns(const VariablesMap& variables,
const PatternsHandler& patterns) -> Patterns {
auto newPatternKeys =
variables.get<PatternKeys>(execHelper::plugins::getPatternsKey(), {});
Patterns newPatterns;
for(const auto& key : newPatternKeys) {
if(!patterns.contains(key)) {
user_feedback_error("Unknown pattern key '" << key
<< "' is ignored.");
continue;
}
newPatterns.push_back(patterns.getPattern(key));
}
return newPatterns;
}
} // namespace
vector<gsl::not_null< // NOLINT(fuchsia-statically-constructed-objects)
const FleetingOptionsInterface*>>
execHelper::plugins::ExecutePlugin::m_fleeting;
vector<SettingsNode> // NOLINT(fuchsia-statically-constructed-objects)
execHelper::plugins::ExecutePlugin::m_settings;
vector<PatternsHandler> // NOLINT(fuchsia-statically-constructed-objects)
execHelper::plugins::ExecutePlugin::m_patterns;
vector<execHelper::plugins:: // NOLINT(fuchsia-statically-constructed-objects)
Plugins>
execHelper::plugins::ExecutePlugin::
m_plugins; // NOLINT(fuchsia-statically-constructed-objects)
namespace execHelper::plugins {
ExecutePlugin::ExecutePlugin(
const CommandCollection& commandsToExecute) noexcept
: m_commands(commandsToExecute), m_initialCommands(commandsToExecute) {
ensures(m_commands.size() == m_initialCommands.size());
}
ExecutePlugin::ExecutePlugin(const CommandCollection& commandsToExecute,
const Command& initialCommand) noexcept
: m_commands(commandsToExecute),
m_initialCommands(commandsToExecute.size(), initialCommand) {
ensures(m_commands.size() == m_initialCommands.size());
}
auto ExecutePlugin::getVariablesMap(
const FleetingOptionsInterface& /*fleetingOptions*/) const noexcept
-> VariablesMap {
return VariablesMap("execute-plugin");
}
inline void ExecutePlugin::index(VariablesMap* variables,
const SettingsNode& settings,
const SettingsKeys& key) noexcept {
if(!settings.contains(key)) {
return;
}
expects(!key.empty());
if(!variables->replace(key.back(), *(settings.get<SettingsValues>(key)))) {
LOG(error) << "Failed to replace key '" << key.back() << "'";
}
// Get current depth to the level of the given key
const SettingsNode& currentDepth = std::accumulate(
key.begin(), key.end(), static_cast<const SettingsNode&>(settings),
[](const auto& node, const auto& subkey) { return node[subkey]; });
for(const auto& depthKey :
currentDepth.get<SettingsValues>(SettingsKeys(), SettingsValues())) {
(*variables)[depthKey] = currentDepth[depthKey];
}
}
inline auto ExecutePlugin::getVariablesMap(
VariablesMap* variables, const vector<SettingsKeys>& keys,
const SettingsNode& rootSettings) noexcept -> bool {
for(const auto& key : keys) {
if(rootSettings.contains(key)) {
index(variables, rootSettings, key);
}
}
return true;
}
auto ExecutePlugin::apply(Task task, const VariablesMap& /*variables*/,
const Patterns& /*patterns*/) const noexcept -> bool {
if(m_commands.empty()) {
user_feedback_error("No commands configured to execute");
LOG(warning) << "No commands configured to execute";
}
auto initialCommand = m_initialCommands.begin();
for(auto command = m_commands.begin(); command != m_commands.end();
++command, ++initialCommand) {
auto plugin = getNextStep(*command, *initialCommand);
if(!plugin) {
user_feedback_error("Could not find a command or plugin called '"
<< *command << "'");
return false;
}
expects(!m_fleeting.empty());
expects(!m_settings.empty());
VariablesMap newVariablesMap =
plugin->getVariablesMap(*(m_fleeting.back()));
const vector<SettingsKeys> keys(
{{*command}, {*command, *initialCommand}});
getVariablesMap(&newVariablesMap, keys, m_settings.back());
expects(!m_patterns.empty());
auto newPatterns = getNextPatterns(newVariablesMap, m_patterns.back());
if(!plugin->apply(task, newVariablesMap, newPatterns)) {
user_feedback_error("An error occured executing the '"
<< *command << "' command");
return false;
}
}
return true;
}
auto ExecutePlugin::summary() const noexcept -> std::string {
return "ExecutePlugin";
}
auto ExecutePlugin::getNextStep(const Command& command,
const Command& /*originalCommand*/) noexcept
-> shared_ptr<const Plugin> {
const auto pluginNames = getPluginNames();
if(find(pluginNames.begin(), pluginNames.end(), command) !=
pluginNames.end()) {
LOG(trace) << "Retrieving plugin named '" << command << "'.";
try {
return getPlugin(command);
} catch(const InvalidPlugin& e) {
LOG(error) << "Unable to retrieve a plugin name '" << command
<< "'.";
return nullptr;
}
}
LOG(trace) << "Checking whether '" << command << "' is a known command";
SettingsNode& settings = m_settings.back();
auto commandToExecuteOpt = settings.get<CommandCollection>(command);
if(!commandToExecuteOpt) {
LOG(warning)
<< "Execute plugin found an empty value collection for the '"
<< command << "' command";
return nullptr;
}
auto commandsToExecute = *(commandToExecuteOpt);
for(const auto& commandToExecute : commandsToExecute) {
LOG(trace) << command << " -> " << commandToExecute;
}
return make_shared<ExecutePlugin>(commandsToExecute, command);
}
auto ExecutePlugin::getPluginNames() noexcept -> std::vector<std::string> {
Expects(!m_plugins.empty());
std::vector<std::string> plugins{"command-line-command", "memory",
"valgrind", "pmd", "lcov"};
transform(m_plugins.back().begin(), m_plugins.back().end(),
back_inserter(plugins),
[](const auto& plugin) { return plugin.first; });
return plugins;
}
auto ExecutePlugin::getPlugin(const string& pluginName)
-> shared_ptr<const Plugin> {
// But, since calling a plugin should not change the internal state of a plugin (see the constness), we can take a shortcut
// by returning the plugin directly
if(!m_plugins.empty() && m_plugins.back().count(pluginName) > 0) {
return m_plugins.back()[pluginName];
}
if(pluginName == "command-line-command") {
return make_shared<CommandLineCommand>();
}
if(pluginName == "memory") {
return make_shared<Memory>();
}
if(pluginName == "valgrind") {
return make_shared<Valgrind>();
}
if(pluginName == "pmd") {
return make_shared<Pmd>();
}
if(pluginName == "lcov") {
return make_shared<Lcov>();
}
throw InvalidPlugin(string("Plugin associated with name '")
.append(pluginName)
.append("' not found"));
}
auto ExecutePlugin::push(
not_null<const config::FleetingOptionsInterface*> fleetingOptions) noexcept
-> bool {
m_fleeting.push_back(fleetingOptions);
return true;
}
auto ExecutePlugin::push(config::SettingsNode&& settings) noexcept -> bool {
m_settings.emplace_back(settings);
return true;
}
auto ExecutePlugin::push(config::Patterns&& patterns) noexcept -> bool {
m_patterns.emplace_back(patterns);
return true;
}
void ExecutePlugin::push(Plugins&& plugins) noexcept {
m_plugins.emplace_back(plugins);
}
void ExecutePlugin::popFleetingOptions() noexcept {
Expects(!m_fleeting.empty());
m_fleeting.pop_back();
}
void ExecutePlugin::popSettingsNode() noexcept {
Expects(!m_settings.empty());
m_settings.pop_back();
}
void ExecutePlugin::popPatterns() noexcept {
Expects(!m_patterns.empty());
m_patterns.pop_back();
}
void ExecutePlugin::popPlugins() noexcept {
Expects(!m_plugins.empty());
m_plugins.pop_back();
}
} // namespace execHelper::plugins
<file_sep>import asyncio
import os
import pickle
import random
import stat
from threading import Thread
from pathlib import Path
import tempfile
import uuid
class Run(object):
def __init__(self):
self._environment = {}
self._working_dir = None
@property
def environment(self):
return self._environment
@environment.setter
def environment(self, value):
self._environment = value
@property
def working_dir(self):
return self._working_dir
@working_dir.setter
def working_dir(self, value):
self._working_dir = value
class Server(Thread):
def __init__(self, host, port):
Thread.__init__(self)
self._host = host
self._port = port
self._runs = []
self._loop = None
@property
def host(self):
return self._host
@property
def port(self):
return self._port
async def get_characteristics(self, reader, writer):
print("hello")
serialized = await reader.read(10000)
data = pickle.loads(serialized)
print("Received data!")
run = Run()
run.environment = data['env']
run.working_dir = Path(data['working_dir'])
self._runs.append(run)
writer.close()
await writer.wait_closed()
@property
def runs(self):
return self._runs
def run(self):
self._loop = asyncio.new_event_loop()
coro = asyncio.start_server(self.get_characteristics, self._host, self._port)
server = self._loop.run_until_complete(coro)
self._loop.run_forever()
server.close()
self._loop.run_until_complete(server.wait_closed())
self._loop.close()
self._loop = None
def stop(self):
if self._loop is not None:
self._loop.call_soon_threadsafe(self._loop.stop)
class Command(object):
_prefix = 'binary-'
_suffix = '.exec-helper'
def __init__(self, id, plugin_id, directory, return_code = 0):
self._id = id
self._plugin_id = plugin_id
self._binary = Path(directory).joinpath(self._prefix + str(uuid.uuid4()) + self._suffix)
self._env = dict()
self._patterns = []
self._return_code = return_code
port = random.randint(49152,65535)
self._server = Server('localhost', port)
def __del__(self):
self.stop()
# self.remove()
@property
def id(self):
return self._id
@property
def runs(self):
return self._server.runs
@property
def patterns(self):
return self._patterns
def set_environment(self, envs):
self._env = envs
def add_pattern(self, pattern):
self._patterns.append(pattern)
def to_dict(self):
result = dict()
result[self._id] = self._plugin_id
result[self._plugin_id] = dict()
result[self._plugin_id][self._id] = dict()
result[self._plugin_id][self._id]['command-line'] = [str(self._binary)]
if self._env:
result[self._plugin_id][self._id]['environment'] = self._env
if self._patterns:
result[self._plugin_id][self._id]['patterns'] = [pattern.id for pattern in self._patterns]
return result
def write_binary(self):
with open(self._binary, 'w') as f:
f.write("#!/usr/bin/env python3\n")
f.write("import asyncio\n")
f.write("import os\n")
f.write("import pickle\n")
f.write("import sys\n")
f.write("\n")
f.write("async def set_characteristics(loop):\n")
f.write(" reader,writer = await asyncio.open_connection('{host}', {port})\n".format(host = self._server.host, port = self._server.port))
f.write("\n")
f.write(" run_data = dict()\n")
f.write(" run_data['env'] = { key: value for key, value in os.environ.items()}\n")
f.write(" run_data['working_dir'] = f'{os.getcwd()}'\n")
f.write(" serialized = pickle.dumps(run_data)\n")
f.write("\n")
f.write(" writer.write(serialized)\n")
f.write(" writer.close()\n")
f.write(" await writer.wait_closed()\n")
f.write("\n")
f.write("loop = asyncio.get_event_loop()\n")
f.write("loop.run_until_complete(set_characteristics(loop))\n")
f.write("loop.close()\n")
f.write("sys.exit({RETURN_CODE})\n".format(RETURN_CODE = self._return_code))
os.chmod(self._binary, stat.S_IREAD | stat.S_IEXEC)
self._server.start()
def remove(self):
self._binary.unlink()
def stop(self):
if self._server.is_alive():
self._server.stop()
self._server.join()
@staticmethod
def _permutate_patterns(patterns):
if not patterns:
yield dict()
return
pattern = patterns[0]
if not pattern.values:
for permutation in Command._permutate_patterns(patterns[1:]):
yield permutation
for value in pattern.values:
for permutation in Command._permutate_patterns(patterns[1:]):
permutation[pattern.id] = value
yield permutation
def pattern_generator(self):
for permutation in Command._permutate_patterns(self._patterns):
yield permutation
<file_sep>#include <algorithm>
#include <array>
#include <filesystem>
#include <optional>
#include <string>
#include <string_view>
#include <utility>
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "plugins/luaPlugin.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::array;
using std::optional;
using std::pair;
using std::string;
using std::string_view;
using std::vector;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Jobs_t;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::test::ExecutorStub;
using execHelper::test::propertyTest;
using execHelper::test::utils::getExpectedTasks;
namespace filesystem = std::filesystem;
namespace {
enum class Mode { Generate, Build, Install };
constexpr auto getAllModes() {
return array<Mode, 3>({Mode::Generate, Mode::Build, Mode::Install});
};
} // namespace
namespace rc {
template <> struct Arbitrary<Mode> {
static auto arbitrary() -> Gen<Mode> {
return gen::elementOf(getAllModes());
};
};
} // namespace rc
namespace execHelper::plugins::test {
SCENARIO("Testing the configuration settings of the cmake plugin", "[cmake]") {
propertyTest(
"",
[](Mode mode, const optional<filesystem::path>& sourcePath,
const optional<filesystem::path>& buildPath,
const optional<filesystem::path>& workingDir,
const optional<vector<string>>& commandLine,
const optional<EnvironmentCollection>& environment,
const optional<bool> verbose, const optional<Jobs_t> jobs,
const optional<string>& generator,
const optional<pair<string, string>>&
defines, // Problem: the order of a map is not necessarily preserved
const optional<string>& target,
const optional<filesystem::path>& prefix,
const optional<string>& configuration,
const optional<string>& component) {
const Task task;
Task expectedTask(task);
Patterns patterns;
VariablesMap config("cmake-test");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/cmake.lua");
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
expectedTask.append("cmake");
// Set all configuration options, even if they are not relevant
if(sourcePath) {
REQUIRE(config.add("source-dir", sourcePath->string()));
}
if(buildPath) {
REQUIRE(config.add("build-dir", buildPath->string()));
}
if(generator) {
REQUIRE(config.add("generator", *generator));
}
if(defines) {
REQUIRE(
config.add({"defines", defines->first}, defines->second));
}
if(target) {
REQUIRE(config.add("target", *target));
}
if(jobs) {
REQUIRE(config.add("jobs", std::to_string(*jobs)));
}
if(prefix) {
REQUIRE(config.add("prefix", prefix->string()));
}
if(configuration) {
REQUIRE(config.add("configuration", *configuration));
}
if(component) {
REQUIRE(config.add("component", *component));
}
switch(mode) {
case Mode::Generate:
REQUIRE(config.add("mode", "generate"));
expectedTask.append(
{"-S", sourcePath ? sourcePath->string() : "."});
expectedTask.append(
{"-B", buildPath ? buildPath->string() : "."});
if(generator) {
expectedTask.append({"-G", *generator});
}
if(defines) {
expectedTask.append({"-D", string("\"") + defines->first +
"=" + defines->second +
"\""});
}
if(verbose) {
handleVerbosity(*verbose, "--log-level=VERBOSE", config,
expectedTask);
}
break;
case Mode::Build:
REQUIRE(config.add("mode", "build"));
expectedTask.append(
{"--build", buildPath ? buildPath->string() : "."});
if(target) {
expectedTask.append({"--target", *target});
}
if(configuration) {
expectedTask.append({"--config", *configuration});
}
expectedTask.append(
{"--parallel", jobs ? std::to_string(*jobs) : "1"});
if(verbose) {
handleVerbosity(*verbose, "--verbose", config,
expectedTask);
}
break;
case Mode::Install:
REQUIRE(config.add("mode", "install"));
expectedTask.append(
{"--install", buildPath ? buildPath->string() : "."});
if(configuration) {
expectedTask.append({"--config", *configuration});
}
if(prefix) {
expectedTask.append({"--prefix", prefix->string()});
}
if(component) {
expectedTask.append({"--component", *component});
}
if(verbose) {
handleVerbosity(*verbose, "--verbose", config,
expectedTask);
}
break;
default:
REQUIRE(false);
}
if(environment) {
handleEnvironment(*environment, config, expectedTask);
}
if(commandLine) {
handleCommandLine(*commandLine, config, expectedTask);
}
if(workingDir) {
handleWorkingDirectory(*workingDir, config, expectedTask);
}
ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(expectedTask, patterns);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
});
}
SCENARIO("Set a wrong mode", "[cmake]") {
propertyTest("", [](const string& mode) {
RC_PRE(mode != "generate");
RC_PRE(mode != "build");
RC_PRE(mode != "install");
const Task task;
Patterns patterns;
VariablesMap config("cmake-test");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/cmake.lua");
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
REQUIRE(config.add("mode", mode));
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should fail") { REQUIRE_FALSE(returnCode); }
}
});
}
} // namespace execHelper::plugins::test
<file_sep>set(MODULE_NAME luajit)
find_package(PkgConfig QUIET)
if(PKG_CONFIG_FOUND)
pkg_check_modules(${MODULE_NAME} luajit IMPORTED_TARGET)
endif()
# Attempt to find it if not configured in pkgconfig
if(NOT ${MODULE_NAME}_FOUND)
MESSAGE(STATUS "Looking manually")
set(${MODULE_NAME}_LIBRARIES luajit-5.1)
find_path(${MODULE_NAME}_INCLUDE_DIRS NAMES lua.hpp PATH_SUFFIXES luajit-2.0 luajit-2.1)
find_library(${MODULE_NAME}_LIBRARY_DIRS NAMES ${${MODULE_NAME}_LIBRARIES})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(${MODULE_NAME}
FOUND_VAR ${MODULE_NAME}_FOUND
REQUIRED_VARS ${MODULE_NAME}_INCLUDE_DIRS ${MODULE_NAME}_LIBRARY_DIRS
)
mark_as_advanced(${MODULE_NAME}_INCLUDE_DIRS)
mark_as_advanced(${MODULE_NAME}_LIBRARIES)
mark_as_advanced(${MODULE_NAME}_LIBRARY_DIRS)
if(${MODULE_NAME}_FOUND)
add_library(${MODULE_NAME} SHARED IMPORTED GLOBAL)
target_include_directories(${MODULE_NAME} SYSTEM INTERFACE ${${MODULE_NAME}_INCLUDE_DIRS})
target_link_libraries(${MODULE_NAME} INTERFACE ${${MODULE_NAME}_LIBRARIES})
endif()
endif()
<file_sep>#include "utils.h"
#include <filesystem>
#include <iostream>
#include <sstream>
#include <gsl/gsl>
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "plugins/pluginUtils.h"
#include "base-utils/configFileWriter.h"
#include "base-utils/yaml.h"
#include "logger.h"
using std::endl;
using std::initializer_list;
using std::map;
using std::pair;
using std::reference_wrapper;
using std::string;
using std::stringstream;
using std::vector;
using gsl::not_null;
using execHelper::config::EnvironmentCollection;
using execHelper::config::EnvironmentValue;
using execHelper::config::Path;
using execHelper::config::Pattern;
using execHelper::config::PatternCombinations;
using execHelper::config::PatternKey;
using execHelper::config::PatternKeys;
using execHelper::config::PatternValue;
using execHelper::config::PatternValues;
using execHelper::config::SettingsKeys;
using execHelper::config::SettingsNode;
using execHelper::config::SettingsValues;
using execHelper::core::Task;
using execHelper::plugins::PatternPermutator;
using execHelper::plugins::replacePatternCombinations;
using execHelper::core::test::ExecutorStub;
using execHelper::test::baseUtils::ConfigFileWriter;
using execHelper::test::baseUtils::YamlWriter;
namespace filesystem = std::filesystem;
namespace {
const string YAML_CONFIG_KEY_DELIMITER(": ");
const string YAML_CONFIG_DELIMITER("\n");
const string YAML_CONFIG_NESTED_CHARACTER(" ");
const string YAML_CONFIG_OPTION_CHARACTER("- ");
const string rootPatternKey("patterns");
} // namespace
namespace execHelper {
namespace test {
namespace utils {
MainVariables::MainVariables(const Arguments& arguments) {
argc = static_cast<int>(arguments.size());
argv.reset(new char*[argc]);
for(size_t i = 0; i < arguments.size(); ++i) {
argv.get()[i] = const_cast<char*>(arguments[i].c_str());
}
}
string convertToConfig(const initializer_list<string>& keys,
const string& value, const string& prepend) noexcept {
return convertToConfig(vector<string>(keys), {value}, prepend);
}
string convertToConfig(const initializer_list<string>& keys,
const initializer_list<string>& values,
const string& prepend) noexcept {
return convertToConfig(vector<string>(keys), values, prepend);
}
string convertToConfig(const initializer_list<string>& keys,
const vector<string>& values,
const string& prepend) noexcept {
return convertToConfig(vector<string>(keys), vector<string>(values),
prepend);
}
string convertToConfig(const vector<string>& keys,
const initializer_list<string>& values,
const string& prepend) noexcept {
return convertToConfig(keys, vector<string>(values), prepend);
}
string convertToConfig(const vector<string>& keys, const vector<string>& values,
const string& prepend) noexcept {
string config;
string nestedCharacters;
for(const auto& key : keys) {
config += prepend + nestedCharacters + key + YAML_CONFIG_KEY_DELIMITER +
YAML_CONFIG_DELIMITER;
nestedCharacters += YAML_CONFIG_NESTED_CHARACTER;
}
for(const auto& value : values) {
config += prepend + nestedCharacters + YAML_CONFIG_OPTION_CHARACTER +
value + YAML_CONFIG_DELIMITER;
}
config += YAML_CONFIG_DELIMITER;
return config;
}
string convertToConfig(const string& key, const vector<string>& values,
const string& prepend) {
return convertToConfig(vector<string>({key}), values, prepend);
}
string convertToConfig(const Patterns& patterns) noexcept {
stringstream stream;
if(!patterns.empty()) {
stream << rootPatternKey << YAML_CONFIG_KEY_DELIMITER
<< YAML_CONFIG_DELIMITER;
for(auto& pattern : patterns) {
string patternNestedCharacter = YAML_CONFIG_NESTED_CHARACTER;
stream << patternNestedCharacter << pattern.getKey()
<< YAML_CONFIG_KEY_DELIMITER << YAML_CONFIG_DELIMITER;
patternNestedCharacter += YAML_CONFIG_NESTED_CHARACTER;
const auto shortOption = pattern.getShortOption();
if(shortOption) {
stream << convertToConfig("short-option",
string(1U, shortOption.value()),
patternNestedCharacter);
}
const auto longOption = pattern.getLongOption();
if(longOption) {
stream << convertToConfig("long-option", longOption.value(),
patternNestedCharacter);
}
stream << convertToConfig("values", pattern.getValues(),
patternNestedCharacter);
}
}
return stream.str();
}
string convertToConfig(const SettingsNode& settings,
const string& prepend) noexcept {
stringstream stream;
const string nestedCharacter(prepend);
const auto settingsValues =
settings.get<SettingsValues>(SettingsKeys(), SettingsValues());
if(settingsValues.empty()) {
stream << prepend << YAML_CONFIG_OPTION_CHARACTER << settings.key()
<< YAML_CONFIG_DELIMITER;
} else if(settingsValues.size() == 1 &&
settings
.get<SettingsValues>(settingsValues.back(), SettingsValues())
.empty()) {
stream << prepend << settings.key() << YAML_CONFIG_KEY_DELIMITER
<< settingsValues.back() << YAML_CONFIG_DELIMITER;
} else {
string valueNestedCharacter = nestedCharacter;
stream << nestedCharacter << settings.key() << YAML_CONFIG_KEY_DELIMITER
<< YAML_CONFIG_DELIMITER;
valueNestedCharacter += YAML_CONFIG_NESTED_CHARACTER;
for(const auto& key : settingsValues) {
stream << convertToConfig(settings[key], valueNestedCharacter);
}
}
return stream.str();
}
string convertToConfig(const SettingsNode& settings, const Patterns& patterns,
const string& prepend) noexcept {
string result = convertToConfig(patterns);
result += convertToConfig(settings, prepend);
return result;
}
string convertToConfig(string key, string value, const string& prepend) {
return prepend + key + YAML_CONFIG_KEY_DELIMITER + value +
YAML_CONFIG_DELIMITER;
}
string convertToConfig(const string& key,
const std::initializer_list<string>& values,
const string& prepend) {
std::vector<string> vectorValues(values);
return convertToConfig(key, vectorValues, prepend);
}
string basename(const string& file) {
auto found = file.find_last_of("/\\");
return file.substr(0, found);
}
YamlWriter toYaml(const SettingsNode& settings,
const Patterns& patterns) noexcept {
static string patternKey("patterns");
YamlWriter yaml;
try {
for(const auto& pattern : patterns) {
const auto longOption = pattern.getLongOption();
if(longOption) {
yaml[patternKey][pattern.getKey()]["long-option"] =
longOption.value();
}
yaml[patternKey][pattern.getKey()]["default-values"] =
pattern.getValues();
const auto shortOption = pattern.getShortOption();
if(shortOption) {
yaml[patternKey][pattern.getKey()]["short-option"] =
shortOption.value();
}
}
const auto settingsValues =
settings.get<SettingsValues>(SettingsKeys(), SettingsValues());
for(const auto& subKey : settingsValues) {
if(settings.get<SettingsValues>(subKey, SettingsValues()).empty()) {
if(settingsValues.size() == 1U) {
yaml = subKey;
} else {
yaml.push_back(subKey);
}
} else {
yaml[subKey] = toYaml(settings[subKey], Patterns());
}
}
} catch(const YAML::InvalidNode&) {
LOG(error) << "Somehow the used YAML node is invalid";
assert(false);
}
return yaml;
}
void writeSettingsFile(not_null<ConfigFileWriter*> configFileWriter,
const SettingsNode& settings,
const Patterns& patterns) noexcept {
YamlWriter yaml = toYaml(settings, patterns);
configFileWriter->write(yaml);
}
PatternCombinations createPatternCombination(
const initializer_list<PatternKey>& keys,
const initializer_list<PatternValue>& values) noexcept {
return createPatternCombination(PatternKeys(keys), values);
}
PatternCombinations
createPatternCombination(const PatternKeys& keys,
const PatternValues& values) noexcept {
PatternCombinations combination;
if(keys.size() != values.size()) {
return combination;
}
for(size_t i = 0; i < keys.size(); ++i) {
combination[keys[i]] = values[i];
}
return combination;
}
PatternPermutator makePatternPermutator(const Patterns& patterns) noexcept {
map<PatternKey, PatternValues> patternValuesMatrix;
if(patterns.empty()) {
patternValuesMatrix.emplace(
make_pair(string("BLAATBLAATBLAATBLAAT"), vector<string>({"b"})));
} else {
for(const auto& pattern : patterns) {
patternValuesMatrix.emplace(
make_pair(pattern.getKey(), pattern.getValues()));
}
}
return PatternPermutator(patternValuesMatrix);
}
ExecutorStub::TaskQueue getExpectedTasks(const Task& task,
const Patterns patterns) noexcept {
ExecutorStub::TaskQueue result;
for(const auto& combination : makePatternPermutator(patterns)) {
result.emplace_back(replacePatternCombinations(task, combination));
}
return result;
}
ExecutorStub::TaskQueue getExpectedTasks(const ExecutorStub::TaskQueue& tasks,
const Patterns patterns) noexcept {
ExecutorStub::TaskQueue result;
for(const auto& combination : makePatternPermutator(patterns)) {
for(const auto& task : tasks) {
result.emplace_back(replacePatternCombinations(task, combination));
}
}
return result;
}
string toString(const SettingsNode& settings, unsigned int nbOfTabs) noexcept {
string prefix;
for(unsigned int i = 0; i < nbOfTabs; ++i) {
prefix += " ";
}
string result;
result += prefix + "- " + settings.key();
auto settingsValues = settings.get<SettingsValues>(SettingsKeys());
if(settingsValues) {
result += "\n";
return result;
} else {
result += ":\n";
}
for(const auto& value : settingsValues.value()) {
result += toString(settings[value], nbOfTabs + 1);
}
return result;
}
std::string inheritWorkingDirKey() noexcept { return "EH_WORKING_DIR"; }
Patterns getPredefinedPatterns() noexcept {
Patterns predefined;
predefined.emplace_back(
Pattern{inheritWorkingDirKey(), {filesystem::current_path().string()}});
return predefined;
}
} // namespace utils
} // namespace test
} // namespace execHelper
<file_sep>#include <cstdio>
#include <filesystem>
#include <fstream>
#include <optional>
#include <string>
#include <sys/stat.h>
#include <boost/lexical_cast.hpp>
#include <boost/process/search_path.hpp>
#include "base-utils/executionContent.h"
#include "base-utils/generators.h"
#include "base-utils/tmpFile.h"
#include "config/path.h"
#include "core/posixShell.h"
#include "core/task.h"
#include "unittest/catch.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "utils/utils.h"
using std::ofstream;
using std::optional;
using std::string;
using boost::lexical_cast;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Path;
using execHelper::test::propertyTest;
using execHelper::test::baseUtils::ExecutionContent;
using execHelper::test::baseUtils::IoService;
using execHelper::test::baseUtils::TmpFile;
namespace filesystem = std::filesystem;
namespace process = boost::process;
namespace this_process = boost::this_process;
namespace {
/**
* Searches for the given binary file in the current directory and the given path
*
* @param[in] binary The binary to search for
* @param[in] paths The current paths to search in
* @returns The full path to the binary
*/
auto searchBinary(const std::string& binary,
std::vector<boost::filesystem::path> paths)
-> optional<filesystem::path> {
paths.emplace_back(".");
auto result = process::search_path(binary, paths).native();
if(result.empty()) {
return std::nullopt;
}
return result;
}
/**
* Searches for the given binary file in the current directory and the given path
*
* @param[in] binary The binary to search for
* @returns The full path to the binary
*/
auto searchBinary(const std::string& binary) -> optional<filesystem::path> {
return searchBinary(binary, this_process::path());
}
} // namespace
namespace execHelper::core::test {
SCENARIO("Test that the right return code is returned", "[shell][posixshell]") {
propertyTest("An execution engine and a shell",
[](PosixShell::ShellReturnCode expectedReturnCode) {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(expectedReturnCode);
ioService.run();
THEN_WHEN(
"We execute the command of the execution engine") {
Task task;
task.append(executionEngine.getConfigCommand());
auto actualReturnCode = shell.execute(task);
THEN_CHECK("The actual return code should equal the "
"expected return code") {
REQUIRE(actualReturnCode == expectedReturnCode);
}
}
});
}
SCENARIO("Test that the command is executed the expected number of times",
"[shell][posixshell]") {
propertyTest("An execution engine and a shell", [](uint8_t nbOfRepeats) {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We execute the command of the execution engine the expected "
"number of times") {
for(uint8_t i = 0; i < nbOfRepeats; ++i) {
Task task;
task.append(executionEngine.getConfigCommand());
shell.execute(task);
}
THEN_CHECK("We should find the expected number of executions") {
REQUIRE(executionEngine.getNumberOfExecutions() == nbOfRepeats);
}
THEN_CHECK(
"We should find the expected number of empty data members") {
const auto& data = executionEngine.getReceivedData();
REQUIRE(data.size() == nbOfRepeats);
for(const auto& entry : data) {
REQUIRE(entry.args.empty());
}
}
}
});
}
SCENARIO("Test non-existing binaries", "[shell][posixshell]") {
propertyTest("A non-existing file and a shell", [](const TmpFile& file) {
PosixShell shell;
THEN_WHEN("We try to execute the given file") {
Task task({file.toString()});
THEN_CHECK("An exception should be thrown during execution") {
REQUIRE_THROWS_AS(shell.execute(task),
execHelper::core::PathNotFoundError);
}
}
});
}
SCENARIO("Test the shell for shell expansion", "[shell][posixshell]") {
propertyTest(
"An execution engine and a shell", [](const uint32_t randomExpansion) {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We execute the command using shell expansion") {
Task task;
task.append(executionEngine.getConfigCommand());
task.append("\"$(echo " + std::to_string(randomExpansion) +
")\"");
shell.execute(task);
THEN_CHECK("The execution engine should have received the "
"expanded value") {
auto data = executionEngine.getReceivedData();
REQUIRE(data.size() == 1U);
REQUIRE(data.front().args.size() == 1U);
const auto receivedArgument =
lexical_cast<uint32_t>(data.front().args.front());
REQUIRE(receivedArgument == randomExpansion);
}
}
});
}
SCENARIO("Test the shell for word expansion", "[shell][posixshell]") {
propertyTest(
"An execution engine and a shell",
[](const std::string& shellExpansion) {
RC_PRE(!shellExpansion.empty());
IoService ioService;
EnvironmentCollection env = {{"EXPANSION", shellExpansion}};
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We execute the command using word expansion") {
Task task;
task.setEnvironment(env);
task.append(executionEngine.getConfigCommand());
task.append("\"${EXPANSION}\"");
shell.execute(task);
THEN_CHECK("The execution engine should have received the "
"expanded value") {
auto data = executionEngine.getReceivedData();
REQUIRE(data.size() == 1U);
REQUIRE(data.front().args.size() == 1U);
REQUIRE(data.front().args.front() == shellExpansion);
}
}
});
}
SCENARIO("Test the shell for binaries prefixed with an absolute path",
"[shell][posixshell]") {
propertyTest("An execution engine and a shell", []() {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We replace the binary to execute with its absolute path") {
// Add the current directory to the path
//auto path = this_process::path();
//path.emplace_back(".");
auto command = executionEngine.getConfigCommand();
auto binary = searchBinary(command[0]);
REQUIRE(binary);
command[0] = binary->string();
Task task;
task.append(command);
auto actualReturnCode = shell.execute(task);
THEN_CHECK("The actual return code should equal the expected "
"return code") {
REQUIRE(shell.isExecutedSuccessfully(actualReturnCode));
}
THEN_CHECK("The command should be properly executed") {
REQUIRE(executionEngine.getNumberOfExecutions() == 1U);
}
}
});
}
SCENARIO("Test the shell for binaries prefixed with a relative path",
"[shell][posixshell]") {
propertyTest("An execution engine and a shell", []() {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We replace the binary to execute with its relative path") {
// Add the current directory to the path
//auto path = this_process::path();
//path.emplace_back(".");
auto command = executionEngine.getConfigCommand();
auto binary = searchBinary(command[0]);
REQUIRE(binary);
command[0] =
filesystem::relative(*binary, filesystem::current_path())
.native();
Task task;
task.append(command);
shell.execute(task);
THEN_CHECK("The command should be properly executed") {
REQUIRE(executionEngine.getNumberOfExecutions() == 1U);
}
}
});
}
SCENARIO("Test the shell for binaries found in the task environment path",
"[shell][posixshell]") {
propertyTest("An execution engine and a shell", []() {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We add the absolute path of the binary as the one entry of "
"the PATH environment variable") {
auto command = executionEngine.getConfigCommand();
auto binary = searchBinary(command[0]);
REQUIRE(binary);
auto newPath = *binary;
newPath.remove_filename();
REQUIRE(
newPath !=
newPath
.parent_path()); // The test will not test this property properly if the parent path equals the current path
EnvironmentCollection env = {{"PATH", newPath.native()}};
Task task;
task.append(command);
task.setEnvironment(env);
task.setWorkingDirectory(newPath.parent_path().native());
shell.execute(task);
THEN_CHECK("The command should be properly executed") {
REQUIRE(executionEngine.getNumberOfExecutions() == 1U);
}
}
});
}
SCENARIO("Test the shell for binaries found in the working directory but not "
"in the path",
"[shell][posixshell]") {
propertyTest("An execution engine and a shell", []() {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN("We add the absolute path of the binary as the one entry of "
"the PATH environment variable") {
auto command = executionEngine.getConfigCommand();
auto binary = searchBinary(command[0]);
REQUIRE(binary);
auto workingDir =
binary
->parent_path(); // The binary will be found in the working directory
EnvironmentCollection env = {
{"PATH",
""}}; // Make sure that the PATH is empty, so it will not be found there
Task task;
task.append(command);
task.setEnvironment(env);
task.setWorkingDirectory(workingDir);
shell.execute(task);
THEN_CHECK("The command should be properly executed") {
REQUIRE(executionEngine.getNumberOfExecutions() == 1U);
}
}
});
}
SCENARIO("The shell should properly set the PWD environment variable",
"[shell][posixShell]") {
propertyTest(
"An execution engine, a shell, a working dir and an initial value",
[](const Path& workingDir, std::string&& initialValue) {
IoService ioService;
PosixShell shell;
ExecutionContent::registerIoService(
gsl::not_null<IoService*>(&ioService));
ExecutionContent executionEngine(0);
ioService.run();
THEN_WHEN(
"We set the PWD environment variable to an initial value and "
"the working directory to the given directory and run it") {
Task task;
task.setEnvironment({{"PWD", move(initialValue)}});
task.setWorkingDirectory(workingDir);
task.append(executionEngine.getConfigCommand());
shell.execute(task);
THEN_CHECK(
"That the PWD variable is set to the working directory") {
REQUIRE(executionEngine.getReceivedData().size() == 1);
auto env = executionEngine.getReceivedData().back().env;
REQUIRE(env.count("PWD") > 0);
REQUIRE(env["PWD"] == workingDir.string());
}
}
});
}
} // namespace execHelper::core::test
<file_sep>.. _exec-helper-plugins-pmd:
Pmd plugin
**********
Description
===========
The pmd plugin is used for executing the pmd static code analyzer tool suite.
Mandatory settings
==================
There are no mandatory settings for this plugin.
Optional settings
=================
The configuration of the pmd plugin may contain the following settings:
.. program:: exec-helper-plugins-pmd
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: exec
The path to the pmd-run executable. The path can either be an absolute path or a relative path from the current working directory. Default: :code:`pmd`.
.. describe:: tool
The pmd tool to use. The currently supported tools are:
* cpd
Default: :code:`cpd`
.. describe:: language
Specify the language PMD is analyzing. Check the :code:`--language` option of the pmd documentation for more information. Default: no explicit language parameter is passed.
Cpd specific settings
---------------------
.. describe:: minimum-tokens
The minimum token length to be considered a duplicate. Check the :code:`--minimum-tokens` option of the cpd documentation for more information. Default: no explicit minimum tokens parameter is passed.
.. describe:: files
A list of files to check for duplicated code. Check the :code:`--files` option of the cpd documentation for more information. Default: no explicit files parameter is passed.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/pmd.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*Makefile*:
.. literalinclude:: ../examples/Makefile
:language: none
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
.. include:: see-also.rst
<file_sep>.. _exec-helper-plugins-selector:
Selector plugin
***************
Description
The selector plugin is used for selecting a configuration path based on the value(s) of a target, typically one from a pattern value.
Mandatory settings
==================
The configuration of the command-line-command must contain the following settings:
.. program:: exec-helper-plugins-selector
.. include:: patterns.rst
.. describe:: targets
The targets to select on. Note that if patterns are used in this list, they must be listed using the *patterns* configuration, as is the case for every plugin.
The runtime value(s) associated with the pattern key must resolve either to an existing (configured) plugin or a configured command.
Optional settings
=================
There are no optional settings for the selector plugin.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/selector.example
:language: yaml
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
.. include:: see-also.rst
<file_sep># Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- Project information -----------------------------------------------------
project = 'Exec-helper'
copyright = 'Exec-helper is Copyright (C) 2016-2020 under the Berkeley Software Distribution 3-clause (BSD 3-clause).'
author = '<NAME>'
section = '5'
# The full version, including alpha/beta/rc tags
release = '0.5.0'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
# 'recommonmark',
'sphinx_rtd_theme'
]
master_doc = 'index'
templates_path = ['docs/usage/_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'docs/sphinx']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['docs/usage/_static']
highlight_language = 'yaml'
# -- Options for man output --
man_pages = [
('src/applications/exec-helper', 'exec-helper', 'Or How To Get Your Coffee In Peace: A meta-wrapper for executables', '', 1),
('src/applications/exec-helper', 'eh', 'Or How To Get Your Coffee In Peace: A meta-wrapper for executables', '', 1),
('src/config/docs/exec-helper-config', 'exec-helper-config', 'Exec-helper configuration documentation', '', 5),
('src/config/docs/exec-helper-config-patterns', 'exec-helper-config-patterns', 'Using patterns', '', 5),
('src/config/docs/exec-helper-config-environment', 'exec-helper-config-environment', 'Configuring environments', '', 5),
('src/plugins/docs/exec-helper-plugins', 'exec-helper-plugins', 'A list of included plugins', '', 5),
('src/plugins/docs/exec-helper-custom-plugins', 'exec-helper-custom-plugins', 'Writing custom plugins', '', 5),
('src/plugins/docs/exec-helper-plugins-bootstrap', 'exec-helper-plugins-bootstrap', 'An overview of the bootstrap plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-clang-static-analyzer', 'exec-helper-plugins-clang-static-analyzer', 'An overview of the clang-static-analyzer plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-clang-tidy', 'exec-helper-plugins-clang-tidy', 'An overview of the clang-tidy plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-cmake', 'exec-helper-plugins-cmake', 'An overview of the cmake plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-command-line-command', 'exec-helper-plugins-command-line-command', 'An overview of the command-line-command plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-command-plugin', 'exec-helper-plugins-command-plugin', 'An overview of the command-plugin plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-cppcheck', 'exec-helper-plugins-cppcheck', 'An overview of the cppcheck plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-docker', 'exec-helper-plugins-docker', 'An overview of the docker plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-execute-plugin', 'exec-helper-plugins-execute-plugin', 'An overview of the execute-plugin plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-lcov', 'exec-helper-plugins-lcov', 'An overview of the lcov plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-make', 'exec-helper-plugins-make', 'An overview of the make plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-memory', 'exec-helper-plugins-memory', 'An overview of the memory plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-ninja', 'exec-helper-plugins-ninja', 'An overview of the ninja plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-pmd', 'exec-helper-plugins-pmd', 'An overview of the pmd plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-scons', 'exec-helper-plugins-scons', 'An overview of the scons plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-selector', 'exec-helper-plugins-selector', 'An overview of the selector plugin configuration settings', '', 5),
('src/plugins/docs/exec-helper-plugins-valgrind', 'exec-helper-plugins-valgrind', 'An overview of the valgrind plugin configuration settings', '', 5),
]
<file_sep>#include <string>
#include <gsl/string_span>
#include "config/commandLineOptions.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/commandPlugin.h"
#include "plugins/executePlugin.h"
#include "plugins/memory.h"
#include "plugins/pluginUtils.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "fleetingOptionsStub.h"
using std::shared_ptr;
using std::string;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::CommandCollection;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
namespace {
const czstring<> PLUGIN_NAME = "commands";
const czstring<> MEMORY_KEY = "memory";
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Obtaining the default variables map of the command-plugin",
"[command-plugin]") {
MAKE_COMBINATIONS("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
CommandPlugin plugin;
VariablesMap actualVariables(PLUGIN_NAME);
REQUIRE(actualVariables.add(PLUGIN_NAME, CommandCollection()));
COMBINATIONS("Add a command") {
const Command command = "command1";
fleetingOptions.m_commands = {command};
REQUIRE(actualVariables.replace(PLUGIN_NAME, command));
}
COMBINATIONS("Add multiple commands") {
const CommandCollection commands = {"command1a", "command1b"};
fleetingOptions.m_commands = commands;
REQUIRE(actualVariables.replace(PLUGIN_NAME, commands));
}
THEN_WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN_CHECK("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO("Test the commandPlugin plugin", "[command-plugin]") {
MAKE_COMBINATIONS("Of several options and configurations") {
MemoryHandler memory;
Task task;
CommandPlugin plugin;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
ExecutorStub::TaskQueue expectedTasks;
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode(PLUGIN_NAME));
ExecutePlugin::push(Patterns());
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
COMBINATIONS("Add a command to execute") {
REQUIRE(variables.add(PLUGIN_NAME, MEMORY_KEY));
expectedTasks.emplace_back(Task());
}
COMBINATIONS("Add multiple commands to execute") {
const unsigned int NB_OF_COMMANDS = 5U;
for(unsigned int i = 0U; i < NB_OF_COMMANDS; ++i) {
REQUIRE(variables.add(PLUGIN_NAME, MEMORY_KEY));
expectedTasks.emplace_back(Task());
}
}
THEN_WHEN("We apply the plugin") {
bool return_code = plugin.apply(task, variables, Patterns());
THEN_CHECK("The call should succeed") { REQUIRE(return_code); }
THEN_CHECK("All expected actions should be executed") {
const Memory::Memories& memories =
MemoryHandler::getExecutions();
REQUIRE(memories.size() == expectedTasks.size());
auto taskIterator = expectedTasks.begin();
for(auto memoryIterator = memories.begin();
memoryIterator != memories.end();
++memoryIterator, ++taskIterator) {
REQUIRE(memoryIterator->task == task);
}
}
}
ExecutePlugin::popPlugins();
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
}
}
} // namespace execHelper::plugins::test
<file_sep>.. _exec-helper-plugins-cppcheck:
Cppcheck plugin
***************
Description
===========
The cppcheck plugin is used for executing the cppcheck static code analysis tool.
Mandatory settings
==================
There are no mandatory settings for the cppcheck plugin.
Optional settings
=================
The configuration of the cppcheck plugin may contain the following settings:
.. program:: exec-helper-plugins-cppcheck
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: enable-checks
A list of checks that should be enabled or disabled. Check the documentation of cppcheck for a list of all the available checks. Default: *all*.
.. describe:: src-dir
The base directory containing all the files to check. Default: :code:`.` (the current working directory).
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/cppcheck.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following files need to be created in the *src* directory:
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>#ifndef EXEC_HELPER_CONFIG_INCLUDE
#define EXEC_HELPER_CONFIG_INCLUDE
#include <stdexcept>
#include "log/logLevel.h"
#include "commandLineOptions.h"
#include "variablesMap.h"
#include "fleetingOptionsInterface.h"
namespace execHelper {
namespace config {
/**
* \brief Options that are as fleeting as the wind
*/
class FleetingOptions : public FleetingOptionsInterface {
public:
/**
* Create fleeting options
*
* \param[in] optionsMap The variables map to get the fleeting options from
*/
explicit FleetingOptions(const VariablesMap& optionsMap) noexcept;
/*! @copydoc config::Argv::Argv(const config::Argv& other)
*/
FleetingOptions(const FleetingOptions& other) = default;
/*! @copydoc config::Argv::Argv(config::Argv&& other)
*/
FleetingOptions(FleetingOptions&& other) = default;
virtual ~FleetingOptions() = default;
/*! @copydoc config::Argv::operator=(const config::Argv& other)
*/
FleetingOptions& operator=(const FleetingOptions& other) = delete;
/*! @copydoc config::Argv::operator=(config::Argv&& other)
*/
FleetingOptions& operator=(FleetingOptions&& other) = delete;
/*! @copydoc config::Argv::operator==(const config::Argv& other) const
*/
auto operator==(const FleetingOptions& other) const -> bool;
/*! @copydoc config::Argv::operator!=(const config::Argv& other) const
*/
auto operator!=(const FleetingOptions& other) const -> bool;
auto getHelp() const noexcept -> HelpOption_t override;
auto getVersion() const noexcept -> VersionOption_t override;
auto getVerbosity() const noexcept -> VerboseOption_t override;
auto getDryRun() const noexcept -> DryRunOption_t override;
auto getKeepGoing() const noexcept -> KeepGoingOption_t override;
auto getJobs() const noexcept -> Jobs_t override;
auto listPlugins() const noexcept -> ListPluginsOption_t override;
[[nodiscard]] auto appendedSearchPaths() const noexcept
-> const Paths& override;
/**
* Get the value of the log level option
*
* \returns The log level associated with the log level option
*/
auto getLogLevel() const noexcept -> log::LogLevel override;
auto getCommands() const noexcept -> const CommandCollection& override;
auto getAutoComplete() const noexcept
-> const std::optional<AutoCompleteOption_t>& override;
/**
* Returns the default variables for the fleeting options
*
* \returns The default variables
*/
static auto getDefault() noexcept -> VariablesMap;
private:
const HelpOption_t m_help;
const VersionOption_t m_version;
const VerboseOption_t m_verbose;
const DryRunOption_t m_dryRun;
const KeepGoingOption_t m_keepGoing;
Jobs_t m_jobs;
const LogLevelOption_t m_logLevel;
const ListPluginsOption_t m_listPlugins;
const Paths m_appendSearchPaths;
CommandCollection m_commands;
const std::optional<config::AutoCompleteOption_t> m_autocomplete;
};
} // namespace config
} // namespace execHelper
#endif /* EXEC_HELPER_CONFIG_INCLUDE */
<file_sep>#include "unittest/catch.h"
#include "config/commandLineOptions.h"
namespace execHelper::config::test {
SCENARIO("Test the command line keys", "[config][command-line-options]") {
REQUIRE(HELP_OPTION_KEY == "help");
REQUIRE(VERBOSE_KEY == "verbose");
REQUIRE(DRY_RUN_KEY == "dry-run");
REQUIRE(JOBS_KEY == "jobs");
REQUIRE(SETTINGS_FILE_KEY == "settings-file");
REQUIRE(COMMAND_KEY == "command");
REQUIRE(LOG_LEVEL_KEY == "debug");
}
} // namespace execHelper::config::test
<file_sep>#include "consoleLogger.h"
#include <boost/log/keywords/filter.hpp>
#include <boost/log/utility/setup/console.hpp>
using boost::log::expressions::smessage;
using boost::log::expressions::stream;
using boost::log::keywords::filter;
using boost::log::keywords::format;
namespace execHelper::log {
ConsoleLogger::ConsoleLogger(std::ostream& logStream)
: m_logMessageFilter(
boost::log::expressions::channel_severity_filter(channel, severity)),
m_consoleSink(add_console_log(
logStream, filter = severity >= none && false,
format = (stream << timestamp << " <" << severity << "> [" << channel
<< "] " << fileLog << ":" << lineLog << " "
<< smessage))) {}
ConsoleLogger::~ConsoleLogger() {
if(m_consoleSink) {
boost::log::core::get()->remove_sink(
m_consoleSink); // NOLINT(fuchsia-default-arguments-calls)
m_consoleSink->flush();
m_consoleSink.reset();
}
}
auto ConsoleLogger::setSeverity(const Channel& channel,
LogLevel severity) noexcept -> bool {
m_logMessageFilter[channel] = severity;
m_consoleSink->set_filter(m_logMessageFilter || false);
return true;
}
} // namespace execHelper::log
<file_sep>#ifndef __VALGRIND_H__
#define __VALGRIND_H__
#include "plugin.h"
namespace execHelper {
namespace plugins {
/**
* \brief Plugin for running valgrind
*/
class Valgrind : public Plugin {
public:
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
};
} // namespace plugins
} // namespace execHelper
#endif /* __VALGRIND_H__ */
<file_sep>#ifndef COMMAND_LINE_INCLUDE
#define COMMAND_LINE_INCLUDE
#include <string>
#include <vector>
#include <gsl/string_span>
#include "config/fleetingOptionsInterface.h"
#include "config/variablesMap.h"
#include "core/task.h"
namespace execHelper {
namespace plugins {
using CommandLineArg = std::string;
using CommandLineArgs = std::vector<CommandLineArg>;
static const gsl::czstring<> COMMAND_LINE_KEY = "command-line";
/**
* \brief Extends the functionality to include the _command-line_ config parameter and processes this parameter
*/
struct CommandLine {
/*! @copydoc AddEnvironment::getVariables(config::VariablesMap&, const config::FleetingOptionsInterface&)
*/
static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept;
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
task.append(*(variables.get<CommandLineArgs>(COMMAND_LINE_KEY)));
}
};
} // namespace plugins
} // namespace execHelper
#endif /* COMMAND_LINE_INCLUDE */
<file_sep>include(generators.CMakeLists.txt)
set(EXE_NAME ${PROJECT_NAME}-core-unittest)
set(SRCS
src/permutationIteratorTest.cpp
src/posixShellTest.cpp
src/taskTest.cpp
src/immediateExecutorTest.cpp
)
set(DEPENDENCIES
core-generators
boost-program-options
filesystem
core
test-utils
unittest
rpcheck
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES})
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
add_test(${EXE_NAME} ${EXE_NAME})
install(TARGETS ${EXE_NAME} DESTINATION ${UNITTEST_BIN_DIR} COMPONENT core)
<file_sep>set(MODULE_NAME Catch)
set(MODULE_INCLUDES catch.hpp)
find_package(PkgConfig)
pkg_check_modules(${MODULE_NAME} QUIET ${MODULE_NAME})
find_path(${MODULE_NAME}_INCLUDE_DIR NAMES ${MODULE_INCLUDES} PATH_SUFFIXES catch2)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(${MODULE_NAME} DEFAULT_MSG
${MODULE_NAME}_INCLUDE_DIR)
mark_as_advanced(${MODULE_NAME}_INCLUDE_DIR)
set(${MODULE_NAME}_INCLUDE_DIRS ${${MODULE_NAME}_INCLUDE_DIR} )
set(${MODULE_NAME}_LIBRARIES dl)
set(${MODULE_NAME}_LIBRARY_DIRS ${${MODULE_NAME}_LIBRARY})
<file_sep>.. _exec-helper-plugins-make:
Make plugin
***********
Description
===========
The make plugin is used for executing Makefiles.
Mandatory settings
==================
There are no mandatory settings for this plugin.
Optional settings
=================
The configuration of the make plugin may contain the following settings:
.. program:: exec-helper-plugins-make
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: build-dir
The path to the Makefile. This is either an absolute path are a path relative to the location of this file. Default: :code:`.` (the current working directory).
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/make.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*Makefile*:
.. literalinclude:: ../examples/Makefile
:language: none
*hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
.. include:: see-also.rst
<file_sep>.. _exec-helper-config-environment:
Environment
***********
Description
===========
Environment variables can be configured in the configuration file. They will only be set for the particular command(s) defined by the relevant section of the configuration.
Environment variables can *not* be set directly in a command line command. The :program:`environment` configuration key needs to be used for this. See section 'environment'.
Environment
===========
The :program:`environment` keyword can be set for every plugin that supports the env configuration setting. Check the documentation on a specific plugin to check whether the plugin supports this configuration setting.
The :program:`environment` keyword must contain a *map* of key-value pairs, where the key is the name of the :program:`environment` variable and the value is the value associated with the specified :program:`environment` variable. :ref:`exec-helper-config-patterns` can be used for the :program:`environment` these variable values too.
**Note**: The *PWD* environment variable, following POSIX convention, is set by the application to the working directory of the task. Therefore, its value cannot be overriden in the configuration.
Example configuration
=====================
.. literalinclude:: ../examples/exec-helper-config.example
:language: yaml
See also
========
See :ref:`exec-helper-config` (5) for information about the configuration file.
<file_sep>#include "addEnvironment.h"
#include "config/environment.h"
#include "logger.h"
namespace execHelper::plugins {
void AddEnvironment::getVariables(
config::VariablesMap& variables,
const config::FleetingOptionsInterface& /*options*/) noexcept {
if(!variables.add(config::ENVIRONMENT_KEY, config::EnvArgs())) {
LOG(error) << "Failed to add key '" << config::ENVIRONMENT_KEY << "'";
}
}
} // namespace execHelper::plugins
<file_sep>#include <algorithm>
#include <string>
#include <vector>
#include <gsl/string_span>
#include "config/environment.h"
#include "config/path.h"
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/commandLine.h"
#include "plugins/commandLineCommand.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::EnvArgs;
using execHelper::config::ENVIRONMENT_KEY;
using execHelper::config::EnvironmentValue;
using execHelper::config::Path;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsKey;
using execHelper::config::SettingsKeys;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::TaskCollection;
using execHelper::plugins::COMMAND_LINE_KEY;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::utils::getExpectedTasks;
namespace {
const czstring<> PLUGIN_NAME("command-line-command");
const czstring<> WORKING_DIR_KEY("working-dir");
} // namespace
namespace execHelper::plugins::test {
SCENARIO(
"Obtaining the default variables map of the command-line-command plugin",
"[command-line-command]") {
GIVEN("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
CommandLineCommand plugin;
VariablesMap actualVariables(PLUGIN_NAME);
REQUIRE(actualVariables.add(COMMAND_LINE_KEY, CommandLineArgs()));
REQUIRE(actualVariables.add(ENVIRONMENT_KEY, EnvArgs()));
WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO(
"Testing the configuration settings of the command-line-command plugin",
"[command-line-command]") {
MAKE_COMBINATIONS("Of several settings") {
const Pattern pattern1("PATTERN1", {"value1a", "value1b"});
const Pattern pattern2("PATTERN2", {"value2a", "value2b"});
const Pattern pattern3("PATTERN3", {"value3a"});
const Patterns patterns({pattern1, pattern2, pattern3});
const string commandKey("command1");
const CommandLineArgs command1({"command1"});
CommandLineCommand plugin;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
Task expectedTask;
std::vector<TaskCollection> commandLines;
REQUIRE(variables.add(COMMAND_LINE_KEY, command1));
commandLines.push_back(command1);
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
COMBINATIONS("Set multiple command lines") {
const CommandLineArgs multipleCommand1({"multiple-commandA"});
const CommandLineArgs multipleCommand2(
{"{" + pattern1.getKey() + "}/{" + pattern2.getKey() + "}",
"{" + pattern3.getKey() + "}/{" + pattern1.getKey() + "}"});
variables.clear(COMMAND_LINE_KEY);
REQUIRE(variables.add(
SettingsKeys({COMMAND_LINE_KEY, "multiple-commandA"}),
multipleCommand1));
REQUIRE(variables.add({COMMAND_LINE_KEY, "multiple-commandB"},
multipleCommand2));
commandLines.clear();
commandLines.push_back(multipleCommand1);
commandLines.push_back(multipleCommand2);
}
COMBINATIONS("Set environment") {
EnvironmentValue ENV1("VAR1", "environmentValue{" +
pattern1.getKey() + "}");
EnvironmentValue ENV2("VAR2", "environmentValue2");
REQUIRE(variables.add({ENVIRONMENT_KEY, ENV1.first}, ENV1.second));
REQUIRE(variables.add({ENVIRONMENT_KEY, ENV2.first}, ENV2.second));
expectedTask.appendToEnvironment(move(ENV1));
expectedTask.appendToEnvironment(move(ENV2));
}
COMBINATIONS("Set the working directory") {
REQUIRE(variables.replace(WORKING_DIR_KEY,
"{" + pattern2.getKey() + "}/{" +
pattern3.getKey() + "}"));
expectedTask.setWorkingDirectory(
variables.get<Path>(WORKING_DIR_KEY).value());
}
ExecutorStub::TaskQueue unreplacedTasks;
for(const auto& commandLine : commandLines) {
Task newTask = expectedTask;
newTask.append(commandLine);
unreplacedTasks.emplace_back(newTask);
}
const ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(unreplacedTasks, patterns);
Task task;
bool returnCode = plugin.apply(task, variables, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
}
SCENARIO("Testing erroneous configuration conditions for the "
"commandLineCommand plugin",
"[command-line-command]") {
MAKE_COMBINATIONS("Of erroneous setups") {
Task task;
CommandLineCommand plugin;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
COMBINATIONS("Add command line key without value") {
REQUIRE(variables.add(COMMAND_LINE_KEY));
}
THEN_WHEN("We add no parameter and apply") {
bool return_code = plugin.apply(task, variables, Patterns());
THEN_CHECK("The call should not succeed") {
REQUIRE_FALSE(return_code);
}
}
}
}
} // namespace execHelper::plugins::test
<file_sep>#include <exception>
#include <iostream>
#include <string>
#include <string_view>
#include "base-utils/commandUtils.h"
#include "base-utils/executionContent.h"
#include "base-utils/path.h"
using std::cerr;
using std::endl;
using std::exception;
using std::string;
using std::string_view;
using execHelper::test::baseUtils::ExecutionContentClient;
using execHelper::test::baseUtils::ExecutionContentData;
using execHelper::test::baseUtils::Path;
using execHelper::test::baseUtils::RUNTIME_ERROR;
int main(int argc, char** argv, char** envp) {
if(argc < 2) {
cerr << "Insufficient arguments" << endl;
return RUNTIME_ERROR;
}
ExecutionContentData data;
// Ignore the first two arguments, as they are used for instructing this binary itself
for(int i = 2; i < argc; ++i) {
data.args.emplace_back(argv[i]);
}
constexpr string_view DELIMITER("=");
auto index = 0U;
char* envValue;
while((envValue = envp[index]) != nullptr) {
string newEnv(envValue);
size_t pos = newEnv.find_first_of(DELIMITER);
string key = newEnv.substr(0, pos);
string value = newEnv.substr(pos + DELIMITER.size(), newEnv.npos);
data.env.emplace(make_pair(key, value));
++index;
}
Path endpoint(argv[1]);
try {
ExecutionContentClient client(endpoint);
return client.addExecution(data);
} catch(const exception& e) {
cerr << "Caught exception: " << e.what() << endl;
return RUNTIME_ERROR;
}
}
<file_sep>#ifndef ADD_ENVIRONMENT_INCLUDE
#define ADD_ENVIRONMENT_INCLUDE
#include "config/environment.h"
#include "config/fleetingOptionsInterface.h"
#include "config/path.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "pluginUtils.h"
namespace execHelper::plugins {
/**
* \brief Extends the functionality to add environment variables to the environment of the process to execute, using the _environment_ key in the configuration
*/
struct AddEnvironment {
/**
* Adds the variables for this functionality to the given variables map
*
* @param[out] variables The variables map to add the variables to
* @param[in] options The fleeting options to consider
*/
static void
getVariables(config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept;
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
task.appendToEnvironment(getEnvironment(variables));
}
};
} // namespace execHelper::plugins
#endif /* ADD_ENVIRONMENT_INCLUDE */
<file_sep>#include <string>
#include <vector>
#include <gsl/string_span>
#include "config/path.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/executePlugin.h"
#include "plugins/lcov.h"
#include "plugins/memory.h"
#include "unittest/catch.h"
#include "utils/utils.h"
#include "fleetingOptionsStub.h"
using std::shared_ptr;
using std::string;
using std::vector;
using gsl::czstring;
using execHelper::config::Command;
using execHelper::config::CommandCollection;
using execHelper::config::Path;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::ExecutePlugin;
using execHelper::plugins::Lcov;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::utils::getExpectedTasks;
namespace {
const czstring<> PLUGIN_NAME = "lcov";
const czstring<> RUN_COMMAND = "run-command";
const czstring<> INFO_FILE_KEY = "info-file";
const czstring<> BASE_DIR_KEY = "base-directory";
const czstring<> DIR_KEY = "directory";
const czstring<> ZERO_COUNTERS_KEY = "zero-counters";
const czstring<> GEN_HTML_KEY = "gen-html";
const czstring<> GEN_HTML_OUTPUT_KEY = "gen-html-output";
const czstring<> GEN_HTML_TITLE_KEY = "gen-html-title";
const czstring<> GEN_HTML_COMMAND_LINE_KEY = "gen-html-command-line";
const czstring<> EXCLUDES_KEY = "excludes";
const czstring<> MEMORY_KEY = "memory";
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Obtaining the default variables map of the lcov plugin", "[lcov]") {
GIVEN("The default fleeting options") {
FleetingOptionsStub fleetingOptions;
Lcov plugin;
VariablesMap actualVariables(PLUGIN_NAME);
REQUIRE(actualVariables.add(COMMAND_LINE_KEY, CommandLineArgs()));
REQUIRE(actualVariables.add(INFO_FILE_KEY, "lcov-plugin.info"));
REQUIRE(actualVariables.add(BASE_DIR_KEY, "."));
REQUIRE(actualVariables.add(DIR_KEY, "."));
REQUIRE(actualVariables.add(ZERO_COUNTERS_KEY, "no"));
REQUIRE(actualVariables.add(GEN_HTML_KEY, "no"));
REQUIRE(actualVariables.add(GEN_HTML_OUTPUT_KEY, "."));
REQUIRE(actualVariables.add(GEN_HTML_TITLE_KEY, "Hello"));
REQUIRE(
actualVariables.add(GEN_HTML_COMMAND_LINE_KEY, vector<string>()));
REQUIRE(actualVariables.add(EXCLUDES_KEY, vector<string>()));
WHEN("We request the variables map") {
VariablesMap variables = plugin.getVariablesMap(fleetingOptions);
THEN("We should find the same ones") {
REQUIRE(variables == actualVariables);
}
}
}
}
SCENARIO("Test multiple configurations of the lcov plugin", "[lcov]") {
MAKE_COMBINATIONS("Of several configurations") {
const Pattern pattern1("PATTERN1", {"value1a", "value1b"});
const Pattern pattern2("PATTERN2", {"value2a", "value2b"});
const Patterns patterns({pattern1, pattern2});
Lcov plugin;
Task task;
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
CommandCollection runCommands({MEMORY_KEY});
auto infoFile = variables.get<Path>(INFO_FILE_KEY).value();
auto baseDir = variables.get<Path>(BASE_DIR_KEY).value();
auto dir = variables.get<Path>(DIR_KEY).value();
CommandLineArgs commandLine;
bool zeroCounters = false;
bool genHtml = false;
Path genHtmlOutput = variables.get<Path>(GEN_HTML_OUTPUT_KEY).value();
string genHtmlTitle("Hello");
CommandLineArgs genHtmlCommandLine;
vector<string> excludes;
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode(PLUGIN_NAME));
ExecutePlugin::push(Patterns(patterns));
ExecutePlugin::push(
Plugins({{"Memory",
shared_ptr<Plugin>(new execHelper::plugins::Memory())}}));
COMBINATIONS("Change the info file") {
infoFile = "/tmp";
REQUIRE(variables.replace(INFO_FILE_KEY, infoFile.native()));
}
COMBINATIONS("Change the base directory") {
baseDir /= "tmp";
REQUIRE(variables.replace(BASE_DIR_KEY, baseDir.native()));
}
COMBINATIONS("Change the directory") {
dir /= "..";
REQUIRE(variables.replace(DIR_KEY, dir.native()));
}
COMBINATIONS("Add a command line") {
commandLine = {"{" + pattern1.getKey() + "}",
"{" + pattern2.getKey() + "}"};
REQUIRE(variables.add(COMMAND_LINE_KEY, commandLine));
}
COMBINATIONS("Switch on zero counters") {
zeroCounters = true;
REQUIRE(variables.replace(ZERO_COUNTERS_KEY, "yes"));
}
COMBINATIONS("Switch on html generation") {
genHtml = true;
REQUIRE(variables.replace(GEN_HTML_KEY, "yes"));
}
COMBINATIONS("Set html output") {
genHtmlOutput = "/tmp/blaat";
REQUIRE(
variables.replace(GEN_HTML_OUTPUT_KEY, genHtmlOutput.native()));
}
COMBINATIONS("Set html title") {
genHtmlTitle = "World!";
REQUIRE(variables.replace(GEN_HTML_TITLE_KEY, genHtmlTitle));
}
COMBINATIONS("Set gen html command line") {
genHtmlCommandLine = {"{" + pattern1.getKey() + "}",
"{" + pattern2.getKey() + "}"};
REQUIRE(variables.replace(GEN_HTML_COMMAND_LINE_KEY,
genHtmlCommandLine));
}
COMBINATIONS("Set excludes") {
excludes = {"exclude1", "{" + pattern1.getKey() + "}"};
REQUIRE(variables.replace(EXCLUDES_KEY, excludes));
}
ExecutorStub::TaskQueue expectedTasks;
if(zeroCounters) {
Task zeroCountersTask({PLUGIN_NAME, "--base-directory",
baseDir.native(), "--directory",
dir.native(), "--zerocounters"});
zeroCountersTask.append(commandLine);
expectedTasks.emplace_back(zeroCountersTask);
}
for(const auto& command : runCommands) {
REQUIRE(variables.add(RUN_COMMAND, command));
}
Task captureTask({PLUGIN_NAME, "--base-directory", baseDir.native(),
"--directory", dir.native(), "--capture", "--output",
infoFile.native()});
captureTask.append(commandLine);
expectedTasks.emplace_back(captureTask);
if(!excludes.empty()) {
Task excludeTask({PLUGIN_NAME, "--remove", infoFile.native()});
for(const auto& exclude : excludes) {
excludeTask.append(
string(R"(")").append(exclude).append(R"(")"));
}
excludeTask.append({"--output-file", infoFile.native()});
excludeTask.append(commandLine);
expectedTasks.emplace_back(excludeTask);
}
if(genHtml) {
Task genHtml({"genhtml", "--output-directory",
genHtmlOutput.native(), "--title", genHtmlTitle});
genHtml.append(genHtmlCommandLine);
genHtml.append(infoFile.native());
expectedTasks.emplace_back(genHtml);
}
const ExecutorStub::TaskQueue replacedTasks =
getExpectedTasks(expectedTasks, patterns);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, variables, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(replacedTasks == executor.getExecutedTasks());
}
}
ExecutePlugin::popPlugins();
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
}
}
} // namespace execHelper::plugins::test
<file_sep>targets = list(config['targets'])
if targets == nil then
input_error('Undefined selector target: you must define at least one target to select using the "targets" keyword.')
end
run_target(task, targets)
<file_sep>#ifndef WORKING_DIRECTORY_INCLUDE
#define WORKING_DIRECTORY_INCLUDE
#include <string>
#include <gsl/string_span>
#include "config/commandLineOptions.h"
#include "config/variablesMap.h"
#include "core/task.h"
namespace execHelper {
namespace config {
class FleetingOptionsInterface;
} // namespace config
} // namespace execHelper
namespace execHelper {
namespace plugins {
using WorkingDir = std::string;
const gsl::czstring<> WORKING_DIR_KEY = "working-dir";
/**
* \brief Extends the functionality to add the working directory configuration variables of the process to execute, using the _working-dir_ key in the configuration
*/
struct WorkingDirectory {
/**
* Adds the variables for this functionality to the given variables map
*/
inline static void
getVariables(config::VariablesMap& /*variables*/,
const config::FleetingOptionsInterface& /*options*/) noexcept {
;
}
/*! @copydoc JobsLong::apply(core::Task&, const config::VariablesMap&)
*/
inline static void apply(core::Task& task,
const config::VariablesMap& variables) noexcept {
auto workingDir = variables.get<WorkingDir>(WORKING_DIR_KEY);
if(workingDir) {
task.setWorkingDirectory(*workingDir);
}
}
};
} // namespace plugins
} // namespace execHelper
#endif /* WORKING_DIRECTORY_INCLUDE */
<file_sep>#ifndef __TASK_H__
#define __TASK_H__
#include <filesystem>
#include <map>
#include <string>
#include <vector>
#include "config/environment.h"
#include "config/path.h"
namespace execHelper {
namespace core {
using TaskCollection = std::vector<std::string>;
/**
* \brief Represents a task to execute
*/
class Task {
public:
/**
* Create a task
*
*/
Task() noexcept : Task(std::vector<std::string>()) {}
/**
* Create a task
*
* \param[in] subtasks The task subdivided in separate arguments
*/
explicit Task(const std::initializer_list<std::string>& subtasks) noexcept
: Task(std::vector<std::string>(subtasks)) {}
/**
* Create a task
*
* \param[in] subtasks The task subdivided in separate arguments
*/
explicit Task(const std::vector<std::string>& subtasks) noexcept
: Task(subtasks, {}, std::filesystem::current_path()) {}
/**
* Create a task
*
* \param[in] subtasks The dask subdivided in separate arguments
* \param[in] environment The initial environment to use for the task
* \param[in] workingDirectory The working directory from which to execute the task
*/
Task(std::vector<std::string> subtasks,
config::EnvironmentCollection environment,
config::Path workingDirectory) noexcept;
/**
* Returns the task
*
* \returns An ordered collection of all the arguments that are contained in
* the task
*/
const TaskCollection& getTask() const noexcept;
/**
* Returns the task as a string
*
* \returns The task as a string
*/
std::string toString() const;
/**
* Returns the environment in which to execute the task
*
* \returns A collection of the environment
*/
const config::EnvironmentCollection& getEnvironment() const noexcept;
/**
* Sets the working directory of the task
*
* \param[in] workingDirectory The new working directory to set
*/
void setWorkingDirectory(const config::Path& workingDirectory) noexcept;
/**
* Gets the working directory of the task
*
* \returns The working directory associated with this task
*/
const config::Path& getWorkingDirectory() const noexcept;
/**
* Append to this task
*
* \param[in] taskPart The part to add to the task
* \returns True If the part was successfully added to the task
* False Otherwise
*/
bool append(const std::string& taskPart) noexcept;
/*! @copydoc append(const std::string&)
*/
bool append(std::string&& taskPart) noexcept;
/*! @copydoc append(const std::string&)
*/
bool append(const TaskCollection& taskPart) noexcept;
/*! @copydoc append(const std::string&)
*/
bool append(TaskCollection&& taskPart) noexcept;
/**
* Set the environment of the task. Replaces the existing environment for the task.
*
* \param[in] env The environment to set for the task
* \returns True If the new environment was successfully set
* False Otherwise
*/
bool setEnvironment(const config::EnvironmentCollection& env) noexcept;
/*! @copydoc setEnvironment(const config::EnvironmentCollection&)
*/
bool setEnvironment(config::EnvironmentCollection&& env) noexcept;
/**
* Add or replace an additional value to the environment of the task.
* If the new key already exists in the environment, the existing value for
* the key is overwritten.
*
* \param[in] newValue The new value to add or replace to the task
* \returns True If the new value was successfully appended to the task
* False Otherwise
*/
bool appendToEnvironment(config::EnvironmentValue&& newValue) noexcept;
/*! @copydoc appendToEnvironment(config::EnvironmentValue&&)
*/
bool appendToEnvironment(config::EnvironmentCollection&& newValue) noexcept;
/**
* Checks whether other instance equals this instance of the object
*
* \param[in] other The other instance to compare with
* \returns True If the other instance is equal to this instance of the
* object False Otherwise
*/
bool operator==(const Task& other) const noexcept;
/**
* Checks whether other instance does not equal this instance of the object
*
* \param[in] other The other instance to compare with
* \returns ! \ref operator==(const Task&) const
*/
bool operator!=(const Task& other) const noexcept;
private:
TaskCollection m_task;
config::EnvironmentCollection m_env;
config::Path m_workingDirectory;
};
using Tasks = std::vector<Task>;
/**
* Adds the details of the Task object to the given stream
*
* \param[in] os The stream to add the details to
* \param[in] task The task to add the details from
* \returns The given stream expanded with the details of the given task
*/
std::ostream& operator<<(std::ostream& os, const Task& task) noexcept;
} // namespace core
} // namespace execHelper
#endif /* __TASK_H__ */
<file_sep>#ifndef __PATTERN_H__
#define __PATTERN_H__
#include <map>
#include <optional>
#include <string>
#include <vector>
namespace execHelper::config {
using PatternKey = std::string;
using PatternValue = std::string;
using PatternKeys = std::vector<PatternKey>;
using PatternValues = std::vector<PatternValue>;
using ShortOption = std::optional<char>;
using LongOption = std::optional<std::string>;
using PatternCombinations = std::map<PatternKey, PatternValue>;
/**
* \brief A pattern that can be used in expressions
*/
class Pattern {
public:
/**
* Constructor
*
* \param[in] patternKey The key of the pattern
* \param[in] values The initial values for the pattern
* \param[in] shortOption The short option associated with the pattern
* \param[in] longOption The long option associated with the pattern
*/
explicit Pattern(PatternKey patternKey, PatternValues values = {},
ShortOption shortOption = std::nullopt,
LongOption longOption = std::nullopt) noexcept;
/**
* Equality operator
*
* \param[in] other The other object to compare with
* \returns True If the other object is considered equal
* False Otherwise
*/
auto operator==(const Pattern& other) const noexcept -> bool;
/**
* Inequality operator
*
* \param[in] other The other object to compare with
* \returns ! \ref operator==(const Pattern& other) const
*
*/
auto operator!=(const Pattern& other) const noexcept -> bool;
/**
* Getter for the key
*
* \returns The key
*/
[[nodiscard]] auto getKey() const noexcept -> const PatternKey&;
/**
* Getter for the values
*
* \returns The values
*/
[[nodiscard]] auto getValues() const noexcept -> const PatternValues&;
/**
* Set the values for this pattern
*
* \param[in] values The new values
* \returns True if the values were successfully set
* False otherwise
*/
[[nodiscard]] auto setValues(PatternValues values) noexcept -> bool;
/**
* Getter for the short option
*
* \returns The short option
*/
[[nodiscard]] auto getShortOption() const noexcept -> const ShortOption&;
/**
* Getter for the long option
*
* \returns The long option
*/
[[nodiscard]] auto getLongOption() const noexcept -> const LongOption&;
private:
PatternKey m_key;
PatternValues m_values;
ShortOption m_shortOption;
LongOption m_longOption;
};
using Patterns = std::vector<Pattern>;
/**
* Streaming operator for pattern objects
*
* \param os The stream to stream to
* \param pattern The pattern to stream
* \returns os
*/
auto operator<<(std::ostream& os, const Pattern& pattern) noexcept
-> std::ostream&;
} // namespace execHelper::config
#endif /* __PATTERN__H__ */
<file_sep>#ifndef __YAML_H__
#define __YAML_H__
#include <memory>
#include <string>
#include <vector>
#include "config/configInputFile.h"
#include "config/path.h"
#include "config/settingsNode.h"
#include "yamlWrapper.h"
namespace execHelper {
namespace yaml {
/**
* \brief Interface to reading YAML files
*/
class Yaml : public config::ConfigInputFile {
public:
/**
* Constructor
*
* \param[in] file Path to the file to parse
*/
explicit Yaml(const config::Path& file);
/**
* Constructor
*
* \param[in] yamlConfig The content to parse
*/
explicit Yaml(const std::string& yamlConfig);
/*! @copydoc config::Argv::Argv(const Argv&)
*/
Yaml(const Yaml& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
*/
Yaml(Yaml&& other) noexcept = delete;
~Yaml() override = default;
/*! @copydoc config::Argv::operator=(const Argv&)
*/
Yaml& operator=(const Yaml& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
*/
Yaml& operator=(Yaml&& other) noexcept = delete;
/**
* Returns the value associated with the given key list
*
* \param[in] keys A collection of keys to follow
* \returns The associated value
*/
std::string getValue(const std::initializer_list<std::string>& keys);
/**
* Returns the collection of values associated with the given key list
*
* \param[in] keys A collection of keys to follow
* \return The associated values
*/
std::vector<std::string>
getValueCollection(const std::initializer_list<std::string>& keys);
bool getTree(const std::initializer_list<std::string>& keys,
config::SettingsNode* settings) const noexcept override;
private:
YamlWrapper m_yaml;
};
} // namespace yaml
} // namespace execHelper
#endif /*__YAML_H__*/
<file_sep>#ifndef __TEST_UTILS_H__
#define __TEST_UTILS_H__
#define THEN_WHEN(x)
#define THEN_CHECK(x)
#include <initializer_list>
#include <memory>
#include <string>
#include <vector>
#include <gsl/gsl>
#include <gsl/string_span>
#include "base-utils/yaml.h"
#include "config/path.h"
#include "config/pattern.h"
#include "plugins/pluginUtils.h"
#include "executorStub.h"
namespace execHelper {
namespace config {
class SettingsNode;
} // namespace config
namespace test {
namespace baseUtils {
class ConfigFileWriter;
} // namespace baseUtils
} // namespace test
} // namespace execHelper
namespace std {
template <typename T>
std::ostream& operator<<(std::ostream& os, const std::vector<T>& stream) {
for(const auto& element : stream) {
os << element << ", ";
}
return os;
}
template <typename T, typename U>
std::ostream& operator<<(std::ostream& os, const std::pair<T, U>& stream) {
os << stream.first << ": " << stream.second;
return os;
}
} // namespace std
namespace execHelper {
namespace test {
namespace utils {
using Patterns = std::vector<config::Pattern>;
using Arguments = std::vector<std::string>;
struct MainVariables {
int argc;
std::unique_ptr<char*[]> argv;
explicit MainVariables(const Arguments& arguments);
};
template <typename T> void appendVectors(T& appendTo, const T& appendFrom) {
appendTo.insert(std::end(appendTo), std::begin(appendFrom),
std::end(appendFrom));
}
baseUtils::YamlWriter toYaml(const config::SettingsNode& settings,
const config::Patterns& patterns) noexcept;
void writeSettingsFile(
gsl::not_null<baseUtils::ConfigFileWriter*> configFileWriter,
const config::SettingsNode& settings,
const config::Patterns& patterns) noexcept;
std::string convertToConfig(const Patterns& patterns) noexcept;
std::string
convertToConfig(const config::SettingsNode& rootSettings,
const std::string& prepend = std::string()) noexcept;
std::string
convertToConfig(const config::SettingsNode& settings, const Patterns& patterns,
const std::string& prepend = std::string()) noexcept;
std::string convertToConfig(std::string key, std::string value,
const std::string& prepend = std::string());
std::string convertToConfig(const std::string& key,
const std::initializer_list<std::string>& values,
const std::string& prepend = std::string());
std::string convertToConfig(const std::string& key,
const std::vector<std::string>& values,
const std::string& prepend = std::string());
std::string
convertToConfig(const std::initializer_list<std::string>& keys,
const std::string& value,
const std::string& prepend = std::string()) noexcept;
std::string
convertToConfig(const std::initializer_list<std::string>& keys,
const std::initializer_list<std::string>& values,
const std::string& prepend = std::string()) noexcept;
std::string
convertToConfig(const std::initializer_list<std::string>& keys,
const std::vector<std::string>& values,
const std::string& prepend = std::string()) noexcept;
std::string
convertToConfig(const std::vector<std::string>& keys,
const std::initializer_list<std::string>& values,
const std::string& prepend = std::string()) noexcept;
std::string
convertToConfig(const std::vector<std::string>& keys,
const std::vector<std::string>& values,
const std::string& prepend = std::string()) noexcept;
std::string basename(const std::string& file);
config::PatternCombinations createPatternCombination(
const std::initializer_list<config::PatternKey>& keys,
const std::initializer_list<config::PatternValue>& values) noexcept;
config::PatternCombinations
createPatternCombination(const config::PatternKeys& keys,
const config::PatternValues& values) noexcept;
plugins::PatternPermutator
makePatternPermutator(const config::Patterns& patterns) noexcept;
core::test::ExecutorStub::TaskQueue
getExpectedTasks(const core::Task& task,
const config::Patterns patterns) noexcept;
core::test::ExecutorStub::TaskQueue
getExpectedTasks(const core::test::ExecutorStub::TaskQueue& tasks,
const config::Patterns patterns) noexcept;
std::string toString(const config::SettingsNode& settings,
unsigned int nbOfTabs = 0) noexcept;
std::string inheritWorkingDirKey() noexcept;
Patterns getPredefinedPatterns() noexcept;
} // namespace utils
} // namespace test
} // namespace execHelper
#endif /* __TEST_UTILS_H__ */
<file_sep>#include "fleetingOptions.h"
#include <algorithm>
#include <thread>
#include <boost/lexical_cast.hpp>
#include "commandLineOptions.h"
#include "log/logLevel.h"
#include "logger.h"
using std::back_inserter;
using std::string;
using std::thread;
using std::transform;
using boost::lexical_cast;
using execHelper::log::LogLevel;
namespace {
inline auto toPaths(const std::vector<std::string>& toConvert) noexcept
-> execHelper::config::Paths {
execHelper::config::Paths paths;
transform(toConvert.begin(), toConvert.end(), back_inserter(paths),
[](const auto& to) { return execHelper::config::Path{to}; });
return paths;
}
} // namespace
namespace execHelper::config {
FleetingOptions::FleetingOptions(const VariablesMap& optionsMap) noexcept
: m_help(optionsMap.get<HelpOption_t>(HELP_OPTION_KEY).value_or(false)),
m_version(optionsMap.get<VersionOption_t>(VERSION_KEY).value_or(false)),
m_verbose(optionsMap.get<VerboseOption_t>(VERBOSE_KEY).value_or(false)),
m_dryRun(optionsMap.get<DryRunOption_t>(DRY_RUN_KEY).value_or(false)),
m_keepGoing(
optionsMap.get<KeepGoingOption_t>(KEEP_GOING_KEY).value_or(false)),
m_jobs(1U),
m_logLevel(
optionsMap.get<LogLevelOption_t>(LOG_LEVEL_KEY).value_or("warning")),
m_listPlugins(optionsMap.get<ListPluginsOption_t>(LIST_PLUGINS_KEY)
.value_or(false)),
m_appendSearchPaths(toPaths(
optionsMap.get<AppendSearchPathOption_t>(APPEND_SEARCH_PATH_KEY)
.value_or(AppendSearchPathOption_t()))),
m_commands(optionsMap.get<CommandCollection>(COMMAND_KEY)
.value_or(CommandCollection())),
m_autocomplete(
optionsMap.get<AutoCompleteOption_t>(string(AUTO_COMPLETE_KEY))) {
auto jobs = optionsMap.get<JobsOption_t>(JOBS_KEY).value_or("auto");
if(jobs == "auto") {
m_jobs = thread::hardware_concurrency();
} else {
try {
m_jobs = lexical_cast<Jobs_t>(jobs);
} catch(const boost::bad_lexical_cast&) {
LOG(warning)
<< "Bad lexical cast for the number of jobs. Using default.";
}
}
}
auto FleetingOptions::operator==(const FleetingOptions& other) const -> bool {
return m_help == other.m_help && m_verbose == other.m_verbose &&
m_dryRun == other.m_dryRun && m_jobs == other.m_jobs &&
m_logLevel == other.m_logLevel && m_commands == other.m_commands;
}
auto FleetingOptions::operator!=(const FleetingOptions& other) const -> bool {
return !(*this == other);
}
auto FleetingOptions::getHelp() const noexcept -> HelpOption_t {
return m_help;
}
auto FleetingOptions::getVersion() const noexcept -> VersionOption_t {
return m_version;
}
auto FleetingOptions::getVerbosity() const noexcept -> VerboseOption_t {
return m_verbose;
}
auto FleetingOptions::getDryRun() const noexcept -> DryRunOption_t {
return m_dryRun;
}
auto FleetingOptions::getKeepGoing() const noexcept -> KeepGoingOption_t {
return m_keepGoing;
}
auto FleetingOptions::getJobs() const noexcept -> Jobs_t { return m_jobs; }
auto FleetingOptions::getCommands() const noexcept -> const CommandCollection& {
return m_commands;
}
auto FleetingOptions::getLogLevel() const noexcept -> LogLevel {
try {
return log::toLogLevel(m_logLevel);
} catch(const log::InvalidLogLevel&) {
LOG(warning) << "Invalid log level given. Using default.";
return log::none;
}
}
auto FleetingOptions::listPlugins() const noexcept -> ListPluginsOption_t {
return m_listPlugins;
}
auto FleetingOptions::appendedSearchPaths() const noexcept -> const Paths& {
return m_appendSearchPaths;
}
auto FleetingOptions::getDefault() noexcept -> VariablesMap {
VariablesMap defaults("exec-helper");
if(!defaults.add(HELP_OPTION_KEY, "no")) {
LOG(error) << "Failed to add help default option value";
}
if(!defaults.add(VERSION_KEY, "no")) {
LOG(error) << "Failed to add version default option value";
}
if(!defaults.add(VERBOSE_KEY, "no")) {
LOG(error) << "Failed to add verbose default option value";
}
if(!defaults.add(DRY_RUN_KEY, "no")) {
LOG(error) << "Failed to add dry run default option value";
}
if(!defaults.add(KEEP_GOING_KEY, "no")) {
LOG(error) << "Failed to add keep going default option value";
}
if(!defaults.add(JOBS_KEY, "auto")) {
LOG(error) << "Failed to add jobs default option value";
}
if(!defaults.add(SETTINGS_FILE_KEY)) {
LOG(error) << "Failed to add settings file default option value";
}
if(!defaults.add(LOG_LEVEL_KEY, "none")) {
LOG(error) << "Failed to add log level default option value";
}
if(!defaults.add(LIST_PLUGINS_KEY, "no")) {
LOG(error) << "Failed to add 'list plugins' default option value";
}
if(!defaults.add(APPEND_SEARCH_PATH_KEY, AppendSearchPathOption_t())) {
LOG(error) << "Failed to add 'append-search-path' default option value";
}
if(!defaults.add(COMMAND_KEY, CommandCollection())) {
LOG(error) << "Failed to add commands default option value";
}
return defaults;
}
auto FleetingOptions::getAutoComplete() const noexcept
-> const std::optional<AutoCompleteOption_t>& {
return m_autocomplete;
}
} // namespace execHelper::config
<file_sep>import pytest
from py.xml import html
def pytest_html_report_title(report):
report.title = "Exec-helper: System test report"
<file_sep>#ifndef CONFIG_FILE_SEARCHER_INCLUDE
#define CONFIG_FILE_SEARCHER_INCLUDE
#include <optional>
#include <vector>
#include "path.h"
namespace execHelper {
namespace config {
/**
* \brief Searches the config in the order of the given search paths
*/
class ConfigFileSearcher {
public:
/**
* Constructs a config file searcher
*
* \param[in] searchPaths The paths to search for ordered by which paths to
* look in first.
*/
explicit ConfigFileSearcher(Paths searchPaths) noexcept;
/**
* Find the given filename
*
* \param[in] filename The filename to look for in the given search paths
* \returns The path in search path with the highest importance under which
* the filename was found. std::nullopt Otherwise
*/
auto find(const Path& filename) noexcept -> std::optional<Path>;
private:
const Paths m_searchPaths;
};
} // namespace config
} // namespace execHelper
#endif /* CONFIG_FILE_SEARCHER_INCLUDE */
<file_sep>#include <array>
#include <sstream>
#include <vector>
#include <gsl/string_span>
#include "config/argv.h"
#include "unittest/catch.h"
using std::array;
using std::move;
using std::string;
using std::stringstream;
using std::vector;
using gsl::czstring;
namespace execHelper::config::test {
SCENARIO("Test argv construction", "[config][argv]") {
GIVEN("An array of string literals") {
const int argc = 3U;
array<czstring<>, argc + 1U> argv = {{"Hello", "world", "!!!", ""}};
WHEN("We create the argv") {
Argv args(argc, &(argv[0]));
THEN("The number of arguments should match") {
REQUIRE(argc == args.getArgc());
}
THEN("The associated argv char array should be returned") {
char** returnedArgv = args.getArgv();
for(int i = 0U; i < argc; ++i) {
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[i] == args[i]);
}
}
THEN("argv[argc] should be a nullptr") {
char** returnedArgv = args.getArgv();
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[argc] == nullptr);
}
THEN(
"We should find the right elements using the access operator") {
for(size_t i = 0U; i < argc; ++i) {
REQUIRE(
string(args[i]) ==
string(
argv[i])); // NOLINT(cppcoreguidelines-pro-bounds-constant-array-index)
}
}
}
WHEN("We create the const argv") {
const Argv args(argc, &argv[0]);
THEN("The number of arguments should match") {
REQUIRE(argc == args.getArgc());
}
THEN("The associated argv char array should be returned") {
const char* const* returnedArgv = args.getArgv();
for(int i = 0U; i < argc; ++i) {
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[i] == args[i]);
}
}
THEN("argv[argc] should be a nullptr") {
const char* const* returnedArgv = args.getArgv();
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[argc] == nullptr);
}
THEN(
"We should find the right elements using the access operator") {
for(size_t i = 0U; i < argc; ++i) {
REQUIRE(
string(args[i]) ==
string(
argv[i])); // NOLINT(cppcoreguidelines-pro-bounds-constant-array-index)
}
}
}
}
GIVEN("A taskcollection to take the arguments from") {
const vector<string> args({"arg1", "arg2", "arg3"});
WHEN("We create the argv") {
Argv argv(args);
THEN("The number of arguments should match") {
REQUIRE(args.size() == argv.getArgc());
}
THEN("The associated argv char array should be returned") {
char** returnedArgv = argv.getArgv();
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[i] == args[i]);
}
}
THEN("argv[argc] should be a nullptr") {
char** returnedArgv = argv.getArgv();
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[args.size()] == nullptr);
}
THEN(
"We should find the right elements using the access operator") {
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == argv[i]);
}
}
}
WHEN("We create the const argv") {
const Argv argv(args);
THEN("The number of arguments should match") {
REQUIRE(args.size() == argv.getArgc());
}
THEN("The associated argv char array should be returned") {
const char* const* returnedArgv = argv.getArgv();
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[i] ==
args[i]);
}
}
THEN("argv[argc] should be a nullptr") {
const char* const* returnedArgv = argv.getArgv();
REQUIRE(
returnedArgv // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
[args.size()] == nullptr);
}
THEN(
"We should find the right elements using the access operator") {
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == argv[i]);
}
}
}
}
}
SCENARIO("Test the argv copy and move constructor, assignment operators and "
"the swap operator",
"[config][argv]") {
GIVEN("An argv object to copy") {
const vector<string> args({"arg1", "arg2", "arg3", "arg4"});
Argv argv(args);
WHEN("We copy the given object") {
Argv copy( // NOLINT(performance-unnecessary-copy-initialization)
argv);
// Save the pointers to the arguments
vector<char*> argvAddresses;
for(size_t i = 0U; i < argv.getArgc(); ++i) {
argvAddresses.push_back(argv[i]);
}
THEN("The values of the pointers must be equal") {
REQUIRE(args.size() == copy.getArgc());
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == copy[i]);
}
}
THEN("It must have been a deep copy/the actual pointer must be "
"different") {
REQUIRE(argvAddresses.size() == copy.getArgc());
for(size_t i = 0U; i < argvAddresses.size(); ++i) {
REQUIRE(argvAddresses[i] != copy[i]);
}
}
}
WHEN("We copy assign the given object") {
Argv assign(vector<string>({"copy-assign1", "copy-assign2"}));
assign = argv;
// Save the pointers to the arguments
vector<char*> argvAddresses;
for(size_t i = 0U; i < argv.getArgc(); ++i) {
argvAddresses.push_back(argv[i]);
}
THEN("They must be equal") {
REQUIRE(args.size() == assign.getArgc());
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == assign[i]);
}
}
THEN("It must have been a deep copy") {
REQUIRE(argvAddresses.size() == assign.getArgc());
for(size_t i = 0U; i < argvAddresses.size(); ++i) {
REQUIRE(argvAddresses[i] != assign[i]);
}
}
}
WHEN("We move the given object") {
Argv copied(argv); // Make a copy that can be moved
Argv moved(move(copied));
THEN("We must find the expected content") {
REQUIRE(args.size() == moved.getArgc());
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == moved[i]);
}
}
}
WHEN("We move assign the given object") {
Argv copied(argv); // Make a copy that can be moved
Argv assign(vector<string>({"move-assign1", "move-assign2"}));
assign = move(copied); // NOLINT(hicpp-invalid-access-moved)
THEN("We must find the expected content") {
REQUIRE(args.size() == assign.getArgc());
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == assign[i]);
}
}
}
WHEN("We swap the given object") {
const vector<string> swappedContent({"swap1", "swap2", "swap3"});
Argv swapped(swappedContent);
argv.swap(swapped); // NOLINT(hicpp-invalid-access-moved)
THEN("We must find the expected content for the first object") {
REQUIRE(argv.getArgc() == swappedContent.size());
for(size_t i = 0U; i < swappedContent.size(); ++i) {
REQUIRE(argv[i] == swappedContent[i]);
}
}
THEN("We must find the swapped content for the second object") {
REQUIRE(args.size() == swapped.getArgc());
for(size_t i = 0U; i < args.size(); ++i) {
REQUIRE(args[i] == swapped[i]);
}
}
}
}
}
SCENARIO("Test the argv equality operators", "[config][argv]") {
GIVEN("An argv object to compare with") {
const vector<string> args({"eq1", "eq2"});
const Argv argv(args);
WHEN("We compare it with itself") {
THEN("It should compare equal") {
REQUIRE(argv == argv);
REQUIRE_FALSE(argv != argv);
}
}
WHEN("We copy the object and compare") {
const Argv
copy( // NOLINT(performance-unnecessary-copy-initialization)
argv);
THEN("They should not be equal") {
// Note: this is due to the fact that we expect the copy
// constructor to make a deep copy
REQUIRE(argv != copy);
REQUIRE_FALSE(argv == copy);
}
}
WHEN("We create an object with the same parameters") {
const Argv same(args);
THEN("They should not compare equal") {
REQUIRE(argv != same);
REQUIRE_FALSE(argv == same);
}
}
}
}
SCENARIO("Test the argv streaming operator", "[config][argv]") {
GIVEN("An argv object to stream") {
const vector<string> args({"stream1", "stream2", "stream3"});
const Argv argv(args);
WHEN("We stream the argv object") {
stringstream stream;
stream << argv;
THEN("We must find the expected one") {
stringstream correctStream;
for(const auto& arg : args) {
if(arg != args.front()) {
correctStream << ", ";
}
correctStream << arg;
}
REQUIRE(stream.str() == correctStream.str());
}
}
}
}
SCENARIO("Error conditions for the access operator", "[config][argv]") {
GIVEN("An empty object") {
Argv argv(vector<string>({"arg1", "arg2", "arg3"}));
WHEN("We access an out of bounds index") {
auto* receivedArg = argv[argv.getArgc()];
THEN("We should receive a nullptr") {
REQUIRE(receivedArg == nullptr);
}
}
}
}
} // namespace execHelper::config::test
<file_sep>task:add_args({'clang-tidy'})
task:add_args({'-p', one(config['build-dir']) or '.'})
checks = list(config['checks'])
if checks then
task:add_args({'-checks=' .. table.concat(checks, ',')})
end
warning_as_errors = list(config['warning-as-errors'])
if warning_as_errors then
if warning_as_errors[1] == 'auto' then
-- Mirror checks
task:add_args({'-warning-as-errors=' .. table.concat(checks, ',')})
else
task:add_args({'-warning-as-errors=' .. table.concat(warning_as_errors, ',')})
end
end
task:add_args(get_commandline())
task:add_args(list(config['sources']) or {"*.cpp"})
register_task(task)
<file_sep>#ifndef HANDLERS_INCLUDE
#define HANDLERS_INCLUDE
#include <algorithm>
#include <filesystem>
#include "config/environment.h"
#include "config/settingsNode.h"
#include "core/task.h"
#include "plugins/commandLine.h"
#include "unittest/catch.h"
namespace execHelper::plugins::test {
inline void handleEnvironment(const config::EnvironmentCollection& environment,
config::SettingsNode& config,
core::Task& expectedTask) noexcept {
std::for_each(
environment.begin(), environment.end(), [&config](const auto& env) {
REQUIRE(config.add({"environment", env.first}, env.second));
});
expectedTask.setEnvironment(environment);
}
inline void handleCommandLine(const CommandLineArgs& commandLine,
config::SettingsNode& config,
core::Task& expectedTask) noexcept {
REQUIRE(config.add("command-line", commandLine));
expectedTask.append(commandLine);
}
inline void handleWorkingDirectory(const std::filesystem::path& workingDir,
config::SettingsNode& config,
core::Task& expectedTask) noexcept {
REQUIRE(config.add("working-dir", workingDir));
expectedTask.setWorkingDirectory(workingDir);
}
inline void handleVerbosity(bool verbose, const std::string& flag,
config::SettingsNode& config,
core::Task& expectedTask) noexcept {
if(verbose) {
REQUIRE(config.add("verbose", "yes"));
expectedTask.append(flag);
} else {
REQUIRE(config.add("verbose", "no"));
}
}
} // namespace execHelper::plugins::test
#endif /* HANDLERS_INCLUDE */
<file_sep>#include <memory>
#include <optional>
#include <string>
#include <string_view>
#include <vector>
#include "config/commandLineOptions.h"
#include "config/environment.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/executePlugin.h"
#include "plugins/luaPlugin.h"
#include "plugins/memory.h"
#include "base-utils/nonEmptyString.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::optional;
using std::shared_ptr;
using std::string;
using std::string_view;
using std::vector;
using execHelper::config::Command;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::ExecutePlugin;
using execHelper::plugins::MemoryHandler;
using execHelper::core::test::ExecutorStub;
using execHelper::test::FleetingOptionsStub;
using execHelper::test::NonEmptyString;
using execHelper::test::propertyTest;
namespace filesystem = std::filesystem;
namespace {
constexpr string_view buildCommandConfigKey{"build-command"};
auto scriptPath() noexcept -> std::string {
return string(PLUGINS_INSTALL_PATH) + "/clang-static-analyzer.lua";
}
} // namespace
namespace execHelper::plugins::test {
SCENARIO(
"Testing the configuration settings of the clang-static-analyzer plugin",
"[clang-static-analyzer][success]") {
propertyTest("", [](const optional<filesystem::path>& workingDir,
const optional<EnvironmentCollection>& environment,
const std::vector<NonEmptyString>& buildCommand,
const optional<vector<string>>& commandLine,
const optional<bool> verbose) {
RC_PRE(!buildCommand.empty());
Patterns patterns;
const Task task;
Task expectedTask(task);
VariablesMap config("clang-static-analyzer-test");
MemoryHandler memory;
LuaPlugin plugin(scriptPath());
Plugins plugins;
auto memoryPlugin = shared_ptr<Plugin>(new Memory());
for(const auto& command : buildCommand) {
REQUIRE(config.add(string(buildCommandConfigKey), *command));
plugins[*command] = memoryPlugin;
}
expectedTask.append("scan-build");
if(workingDir) {
handleWorkingDirectory(*workingDir, config, expectedTask);
}
if(environment) {
handleEnvironment(*environment, config, expectedTask);
}
if(verbose) {
handleVerbosity(*verbose, "-v", config, expectedTask);
}
if(commandLine) {
handleCommandLine(*commandLine, config, expectedTask);
}
ExecutorStub::TaskQueue expectedTasks(buildCommand.size(),
expectedTask);
FleetingOptionsStub fleetingOptions;
ExecutePlugin::push(std::move(plugins));
ExecutePlugin::push(
gsl::not_null<config::FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(SettingsNode("clang-static-analyzer-test"));
ExecutePlugin::push(Patterns(patterns));
THEN_WHEN("We apply the plugin") {
Task task;
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
const Memory::Memories& memories =
MemoryHandler::getExecutions();
REQUIRE(memories.size() == expectedTasks.size());
auto expectedTask = expectedTasks.begin();
for(auto memory = memories.begin(); memory != memories.end();
++memory, ++expectedTask) {
REQUIRE(memory->task == *expectedTask);
REQUIRE(memory->patterns.empty());
}
}
}
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
ExecutePlugin::popPlugins();
});
}
SCENARIO("Testing invalid configurations", "[clang-static-analyzer][error]") {
GIVEN("An empty setup") {
LuaPlugin plugin(scriptPath());
VariablesMap variables = plugin.getVariablesMap(FleetingOptionsStub());
Task task;
MemoryHandler memory;
WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, variables, Patterns());
THEN("It should fail") { REQUIRE_FALSE(returnCode); }
THEN("Nothing should have been executed") {
const Memory::Memories& memories =
MemoryHandler::getExecutions();
REQUIRE(memories.empty());
}
}
WHEN("We add the build-command as a specific config key without a "
"value") {
REQUIRE(variables.add(string(buildCommandConfigKey)));
bool returnCode = plugin.apply(task, variables, Patterns());
THEN("It should fail") { REQUIRE_FALSE(returnCode); }
}
}
}
} // namespace execHelper::plugins::test
<file_sep>#include "luaPlugin.h"
#include <algorithm>
#include <fstream>
#include <iterator>
#include <optional>
#include <stdexcept>
#include <unordered_map>
#include <utility>
#include <LuaContext.hpp>
#ifdef MSVC
#include <misc/exception.hpp>
#endif
#include <boost/optional.hpp>
#include "config/pattern.h"
#include "core/task.h"
#include "addEnvironment.h"
#include "executePlugin.h"
#include "logger.h"
#include "pluginUtils.h"
#include "threadedness.h"
#include "verbosity.h"
#include "workingDirectory.h"
using std::ifstream;
using std::make_pair;
using std::move;
using std::pair;
using std::string;
using std::to_string;
using std::unordered_map;
using std::vector;
using boost::optional;
using execHelper::config::CommandCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Path;
using execHelper::config::PatternKey;
using execHelper::config::Patterns;
using execHelper::config::PatternValues;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::TaskCollection;
using execHelper::core::Tasks;
namespace {
class Config {
public:
explicit Config(const VariablesMap& config) noexcept : m_config(config) {}
auto get(const string& key) noexcept
-> optional<unordered_map<string, string>> {
if(!m_config.contains(key)) {
LOG(debug) << "key '" << key << "' does not exist in config"
<< std::endl;
return boost::none;
}
const auto& subnode = m_config[key];
std::unordered_map<std::string, std::string> result;
if(!subnode.values()) {
LOG(debug) << key << " has no values";
return result;
}
auto values = *(subnode.values());
if(values.empty()) {
LOG(debug) << key << " is empty";
return result;
}
const auto& subsubnode = subnode[(*subnode.values()).front()];
if(subsubnode.values()) {
// Construct a map
for(const auto& value : values) {
result[value] = *(subnode.get<std::string>(value));
}
} else {
// Construct an array with indices as keys
for(size_t i = 0; i < values.size(); ++i) {
result[to_string(i)] = values[i];
}
}
return result;
}
private:
const VariablesMap& m_config;
};
class PatternWrapper {
public:
explicit PatternWrapper(const Patterns& patterns) : m_patterns(patterns) {}
auto get(const string& key) noexcept
-> optional<vector<pair<int, string>>> {
auto found = find_if(
m_patterns.begin(), m_patterns.end(),
[&key](const auto& pattern) { return key == pattern.getKey(); });
if(found == m_patterns.end()) {
// Key not found
return boost::none;
}
vector<pair<int, string>> result;
auto index = 1; // Lua arrays start at index 1
auto values = found->getValues();
transform(values.begin(), values.end(), back_inserter(result),
[&index](const auto& value) {
auto convertedValue = make_pair(index, value);
++index;
return convertedValue;
});
return result;
}
private:
const Patterns& m_patterns;
};
} // namespace
namespace execHelper::plugins {
LuaPlugin::LuaPlugin(Path script) noexcept : m_script(std::move(script)) { ; }
auto LuaPlugin::getVariablesMap(const FleetingOptionsInterface& fleetingOptions)
const noexcept -> VariablesMap {
VariablesMap defaults{"luaPlugin"};
VerbosityLong::getVariables(defaults, fleetingOptions);
JobsLong::getVariables(defaults, fleetingOptions);
return defaults;
}
auto LuaPlugin::apply(Task task, const VariablesMap& config,
const Patterns& patterns) const noexcept -> bool {
Tasks tasks;
LuaContext lua;
WorkingDirectory::apply(task, config);
AddEnvironment::apply(task, config);
try {
lua.writeVariable("verbose",
config.get<Verbosity>(VERBOSITY_KEY, false));
lua.writeVariable("jobs", config.get<Jobs>(JOBS_KEY, 1U));
lua.executeCode("function get_commandline() "
"return list(config['command-line']) or {} "
"end");
lua.executeCode("function get_environment() "
"return config['environment'] or {} "
"end");
lua.executeCode("function get_verbose(verbose_command)"
"if one(config['verbose']) "
"then "
"if one(config['verbose']) == 'yes' "
"then "
"return {verbose_command} "
"end "
"else "
"if verbose "
"then "
"return {verbose_command} "
"end "
"end "
"return {} "
"end");
// Define the Config class
Config configWrapper(config);
lua.writeVariable("config", configWrapper);
lua.writeFunction<optional<unordered_map<string, string>>(
Config&, const std::string&)>(
"config", LuaContext::Metatable, "__index",
[](Config& config, const std::string& key) {
return config.get(key);
});
// Define the Task class
lua.writeVariable("task", task);
lua.registerFunction<Task, Task()>(
"new", [](const Task& /*task*/) { return Task(); });
lua.registerFunction<Task, Task()>(
"copy", [](const Task& task) { return Task(task); });
lua.registerFunction<Task, void(const vector<pair<int, string>>&)>(
"add_args", [](Task& task, const vector<pair<int, string>>& args) {
std::for_each(
args.begin(), args.end(),
[&task](const auto& arg) { task.append(arg.second); });
});
// Define the Pattern class
PatternWrapper patternWrapper(patterns);
lua.writeVariable("patterns", patternWrapper);
lua.writeFunction<optional<vector<pair<int, string>>>(PatternWrapper&,
const string&)>(
"patterns", LuaContext::Metatable, "__index",
[](PatternWrapper& wrapper, const string& key) {
return wrapper.get(key);
});
lua.writeFunction("register_task", [&tasks](const Task& task) {
tasks.push_back(task);
});
lua.writeFunction<bool(const Task&, const vector<pair<int, string>>&)>(
"run_target",
[&patterns](const Task& task,
const vector<pair<int, string>>& commands) {
for(const auto& combination : makePatternPermutator(patterns)) {
CommandCollection commandsToExecute;
commandsToExecute.reserve(commands.size());
transform(commands.begin(), commands.end(),
back_inserter(commandsToExecute),
[&combination](const auto& command) {
return replacePatternCombinations(
command.second, combination);
});
ExecutePlugin executePlugin(commandsToExecute);
core::Task newTask =
replacePatternCombinations(task, combination);
if(!executePlugin.apply(move(newTask),
VariablesMap("subtask"),
Patterns())) {
return false;
}
}
return true;
});
lua.writeFunction<optional<string>(
const optional<unordered_map<string, string>>&)>(
"one",
[](const optional<unordered_map<string, string>>& values)
-> optional<string> {
if(!values) {
return boost::none;
}
return (*values).at("0");
});
lua.writeFunction("input_error", [](const string& message) {
throw std::runtime_error(message);
});
lua.writeFunction("user_feedback", [](const string& message) {
user_feedback_error(message);
});
lua.writeFunction("user_feedback_error", [](const string& message) {
user_feedback_error(message);
});
lua.writeFunction(
"list",
[](const optional<unordered_map<string, string>>& values)
-> optional<std::vector<pair<int, string>>> {
if(!values) {
return boost::none;
}
vector<pair<int, string>> result;
result.reserve(values->size());
auto index = 1;
for(size_t i = 0U; i < values->size(); ++i) {
auto listValue =
make_pair(index, (*values).at(std::to_string(i)));
result.push_back(listValue);
++index;
}
return result;
});
} catch(std::exception& e) {
user_feedback_error("Internal error");
LOG(error) << "Internal error: '" << e.what() << "'";
return false;
}
try {
ifstream executionCode(m_script);
lua.executeCode(executionCode);
} catch(const LuaContext::SyntaxErrorException& e) {
user_feedback_error("Syntax error detected in lua file '" +
m_script.string() + "': " + e.what());
LOG(error) << "Syntax error detected in lua file '" +
m_script.string() + "': " + e.what();
return false;
} catch(const LuaContext::ExecutionErrorException& e) {
try {
std::rethrow_if_nested(e);
} catch(const std::runtime_error& e) {
user_feedback_error(e.what());
LOG(error) << e.what();
} catch(...) {
user_feedback_error("Module '" + m_script.string() +
"' reported an error: " + e.what());
LOG(error) << "Module '" + m_script.string() +
"': reported an error" + e.what();
}
return false;
} catch(const std::runtime_error& e) {
LOG(error) << e.what();
return false;
}
for(const auto& task : tasks) {
for(const auto& combination : makePatternPermutator(patterns)) {
core::Task newTask = replacePatternCombinations(task, combination);
if(!registerTask(newTask)) {
return false;
}
}
}
return true;
}
auto LuaPlugin::summary() const noexcept -> std::string {
return "Lua plugin for module " + m_script.string();
}
} // namespace execHelper::plugins
<file_sep>task:add_args({'make'})
task:add_args({'--directory', one(config['build-dir']) or '.'})
task:add_args(get_verbose('--debug'))
task:add_args({'--jobs', one(config['jobs']) or jobs})
task:add_args(get_commandline())
register_task(task)
<file_sep>#ifndef VARIABLES_MAP_INCLUDE
#define VARIABLES_MAP_INCLUDE
#include "settingsNode.h"
namespace execHelper {
namespace config {
using VariablesMap = SettingsNode;
} // namespace config
} // namespace execHelper
#endif /* VARIABLES_MAP_INCLUDE */
<file_sep>#include "threadedness.h"
#include "logger.h"
namespace execHelper::plugins {
void JobsLong::getVariables(
config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept {
if(!variables.add(JOBS_KEY, std::to_string(options.getJobs()))) {
LOG(error) << "Failed to add key '" << JOBS_KEY << "'";
}
}
} // namespace execHelper::plugins
<file_sep>#ifndef LOGGER_INCLUDE
#define LOGGER_INCLUDE
#include <gsl/string_span>
#include "log/log.h"
BOOST_LOG_GLOBAL_LOGGER(exec_helper_test_logger, execHelper::log::LoggerType);
static const gsl::czstring<> LOG_CHANNEL = "test";
#define LOG(x) \
BOOST_LOG_STREAM_CHANNEL_SEV(exec_helper_test_logger::get(), LOG_CHANNEL, \
execHelper::log::x) \
<< boost::log::add_value(fileLog, __FILE__) \
<< boost::log::add_value(lineLog, __LINE__)
#endif /* LOGGER_INCLUDE */
<file_sep>#ifndef CAST_INCLUDE
#define CAST_INCLUDE
#include <optional>
namespace execHelper {
namespace config {
namespace detail {
/**
* \brief Cast the given type U to an optional of type T
*/
template <typename T, typename U> class Cast {
public:
/**
* Cast the given values to type T
*
* \param[in] values The values to cast from
* \returns T if the values could be casted
* boost::none otherwise
*/
static std::optional<T> cast(const U& values) noexcept;
};
} // namespace detail
} // namespace config
} // namespace execHelper
#endif /* CAST_INCLUDE */
<file_sep>set(MODULE_NAME Gsl)
set(MODULE_INCLUDES gsl)
find_package(PkgConfig)
pkg_check_modules(${MODULE_NAME} QUIET ${MODULE_NAME})
if(NOT ${MODULE_NAME}_FOUND)
find_path(${MODULE_NAME}_INCLUDE_DIR NAMES ${MODULE_INCLUDES} PATH_SUFFIXES gsl)
# This module is header only
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(${MODULE_NAME} DEFAULT_MSG
${MODULE_NAME}_INCLUDE_DIR)
mark_as_advanced(${MODULE_NAME}_INCLUDE_DIR)
endif()
<file_sep>set(EXE_NAME exec-helper)
set(SRCS
exec-helper.cpp
logger.cpp
)
set(DEPENDENCIES
#dl
filesystem
log
core
commander
)
set(VERSION_FILE version.h)
set(BINARY_NAME ${PROJECT_NAME})
set(VERSION "DEVELOPER-VERSION" CACHE STRING "Set the version to show in the version message")
set(COPYRIGHT "COPYRIGHT (c)" CACHE STRING "Set the copyright message to show in the version message")
configure_file(${VERSION_FILE}.in ${VERSION_FILE})
set(VERSION_INCLUDE_DIR ${CMAKE_CURRENT_BINARY_DIR})
add_executable(${EXE_NAME} ${SRCS} ${VERSION_FILE})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES} ${VERSION_INCLUDE_DIR})
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
if(MSVC)
set(SHORT_EXE_NAME eh.exe)
add_custom_command(TARGET ${EXE_NAME} POST_BUILD
COMMAND cmake -E copy ${EXE_NAME}.exe ${SHORT_EXE_NAME}
MAIN_DEPENDENCY ${EXE_NAME}
COMMENT "Linking ${SHORT_EXE_NAME} -> ${EXE_NAME}"
)
else()
set(SHORT_EXE_NAME eh)
add_custom_command(TARGET ${EXE_NAME} POST_BUILD
COMMAND ${CMAKE_COMMAND} -E create_symlink ${EXE_NAME} ${SHORT_EXE_NAME}
MAIN_DEPENDENCY ${EXE_NAME}
COMMENT "Linking ${SHORT_EXE_NAME} -> ${EXE_NAME}"
)
endif()
install(TARGETS ${EXE_NAME}
DESTINATION ${BIN_DIRECTORY}
COMPONENT runtime
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${SHORT_EXE_NAME}
DESTINATION ${BIN_DIRECTORY}
COMPONENT runtime
)
<file_sep>#include <map>
#include <sstream>
#include <string>
#include <string_view>
#include <boost/core/null_deleter.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/log/sinks/sync_frontend.hpp>
#include <boost/log/sinks/text_ostream_backend.hpp>
#include <boost/log/sources/logger.hpp>
#include <boost/xpressive/xpressive.hpp>
#include "log/generators.h"
#include "log/logLevel.h"
#include "log/logger.h"
#include "unittest/catch.h"
#include "unittest/rapidcheck.h"
using std::cout;
using std::endl;
using std::exception;
using std::map;
using std::move;
using std::ostream;
using std::string;
using std::string_view;
using std::stringbuf;
using std::stringstream;
using std::terminate;
using boost::xpressive::smatch;
using boost::xpressive::sregex;
using execHelper::log::all;
using execHelper::log::debug;
using execHelper::log::error;
using execHelper::log::fatal;
using execHelper::log::info;
using execHelper::log::none;
using execHelper::log::trace;
using execHelper::log::warning;
using execHelper::log::Channel;
using execHelper::log::getLogLevels;
using execHelper::log::InvalidLogLevel;
using execHelper::log::LogLevel;
using execHelper::log::toLogLevel;
using execHelper::log::toString;
using execHelper::test::propertyTest;
namespace {
auto getLogLevelStringMapping() -> const map<LogLevel, string_view>& {
try {
using namespace std::literals;
static const map<LogLevel, string_view> LOG_LEVEL_STRINGS(
{{none, "none"sv},
{fatal, "fatal"sv},
{error, "error"sv},
{warning, "warning"sv},
{info, "info"sv},
{debug, "debug"sv},
{trace, "trace"sv},
{all, "all"sv}});
return LOG_LEVEL_STRINGS;
} catch(exception& e) {
cout << e.what() << endl;
}
terminate();
}
class LogMessage {
public:
LogMessage(string date, string time, Channel channel, LogLevel level,
string file, unsigned int lineNumber, string message)
: m_date(move(date)),
m_time(move(time)),
m_channel(move(channel)),
m_level(level),
m_file(move(file)),
m_lineNumber(lineNumber),
m_message(move(message)) {
;
}
[[nodiscard]] auto getDate() const noexcept -> const string& {
return m_date;
}
[[nodiscard]] auto getTime() const noexcept -> const string& {
return m_time;
}
[[nodiscard]] auto getChannel() const noexcept -> const Channel& {
return m_channel;
}
[[nodiscard]] auto getLevel() const noexcept -> const LogLevel& {
return m_level;
}
[[nodiscard]] auto getFile() const noexcept -> const string& {
return m_file;
}
[[nodiscard]] auto getLineNumber() const noexcept -> unsigned int {
return m_lineNumber;
}
[[nodiscard]] auto getMessage() const noexcept -> const string& {
return m_message;
}
private:
string m_date;
string m_time;
Channel m_channel;
LogLevel m_level;
string m_file;
unsigned int m_lineNumber;
string m_message;
};
auto toMessage(const string& message) -> LogMessage {
sregex regex = sregex::compile(
R"((?P<date>\S*)\s(?P<time>\S*)\s<(?P<severity>\S*)>\s\[(?P<channel>\S*)\]\s(?P<file>.*):(?P<lineNumber>\d*)\s(?P<message>.*)\s$)");
smatch parsedResult;
if(regex_search(message, parsedResult, regex)) {
string lineNumberString =
parsedResult["lineNumber"]; // Using lexical_cast to a non-string
// type on the match type directly
// results in undefined behaviour. so
// convert it to an intermediate string
// first.
string logLevelString = parsedResult["severity"];
LogLevel logLevel = toLogLevel(logLevelString);
return LogMessage(parsedResult["date"], parsedResult["time"],
parsedResult["channel"], logLevel,
parsedResult["file"],
boost::lexical_cast<unsigned int>(lineNumberString),
parsedResult["message"]);
}
return LogMessage("", "", "", none, "", 0U, "");
}
auto getRelativeLogLevel(LogLevel currentLogLevel, int relativeLevel) noexcept
-> LogLevel {
int newLogLevel = static_cast<int>(currentLogLevel) + relativeLevel;
// Saturate at the edges
if(newLogLevel < all) {
newLogLevel = all;
}
if(newLogLevel > none) {
newLogLevel = none;
}
return static_cast<LogLevel>(newLogLevel);
}
} // namespace
namespace execHelper::config::test {
SCENARIO("Test the conversion of correct a log level to a string", "[log]") {
REQUIRE(
getLogLevels().size() ==
getLogLevelStringMapping()
.size()); // Make sure the log level mappings have at least the same size
propertyTest(
"A log level should be converted to the correct string",
[](LogLevel severity) {
const auto& logLevelMapping = getLogLevelStringMapping();
REQUIRE(logLevelMapping.count(severity) == 1U);
THEN_WHEN("We convert the severity to a string") {
THEN_CHECK("We should get the right string") {
REQUIRE(toString(severity) == logLevelMapping.at(severity));
}
}
THEN_WHEN("We stream the severity") {
std::stringstream stream;
stream << severity;
THEN_CHECK("We should get the right string") {
REQUIRE(stream.str() == logLevelMapping.at(severity));
}
}
});
}
SCENARIO("Test the conversion of a correct string to a log level", "[log]") {
REQUIRE(
getLogLevels().size() ==
getLogLevelStringMapping()
.size()); // Make sure the log level mappings have at least the same size
propertyTest("A log level string should be converted to the correct level",
[](LogLevel severity) {
const auto& logLevelMapping = getLogLevelStringMapping();
REQUIRE(logLevelMapping.count(severity) == 1U);
THEN_WHEN("We convert the string to a severity") {
auto actualSeverity =
toLogLevel(logLevelMapping.at(severity));
THEN_CHECK("We should get the right severity") {
REQUIRE(severity == actualSeverity);
}
}
});
}
SCENARIO("Test the conversion of a wrong string to a log level", "[log]") {
REQUIRE(
getLogLevels().size() ==
getLogLevelStringMapping()
.size()); // Make sure the log level mappings have at least the same size
propertyTest(
"A wrong log level string should not be converted to a log level",
[](const std::string& logLevelString) {
// Make sure the received string is invalid
for(const auto& logLevelPair : getLogLevelStringMapping()) {
RC_PRE(logLevelPair.second != logLevelString);
}
THEN_WHEN("We convert the string to a log level") {
THEN_CHECK("We should get an exception") {
REQUIRE_THROWS_AS(toLogLevel(logLevelString),
InvalidLogLevel);
}
}
});
}
SCENARIO("Write a log message with the severity enabled", "[log]") {
propertyTest("An enabled log level should give the right output",
[](LogLevel severity) {
stringbuf logBuffer;
ostream logStream(&logBuffer);
execHelper::log::LogInit logInit(logStream);
const string message1("Hello world!!!");
THEN_WHEN("We switch on the right severity") {
logInit.setSeverity(LOG_CHANNEL, severity);
logInit.setSeverity("other-channel",
getRelativeLogLevel(severity, 1));
// Switch off clang-format, since it will put the line assignment
// on a separate line, annihilating the purpose of the test
// clang-format off
LOG(severity) << message1; const unsigned int line = __LINE__;
// clang-format on
THEN_CHECK(
"We should find the message in the stream") {
LogMessage result = toMessage(logBuffer.str());
REQUIRE(result.getChannel() == LOG_CHANNEL);
REQUIRE(result.getLevel() == severity);
REQUIRE(result.getFile() == __FILE__);
REQUIRE(result.getLineNumber() == line);
REQUIRE(result.getMessage() == message1);
}
}
});
}
SCENARIO("Write a log message with the severity disabled", "[log]") {
propertyTest(
"A disabled log level should have no output", [](LogLevel severity) {
RC_PRE(severity != none);
stringbuf logBuffer;
ostream logStream(&logBuffer);
execHelper::log::LogInit logInit(logStream);
const string message1("Hello world!!!");
THEN_WHEN("We disable the right severity") {
logInit.setSeverity(LOG_CHANNEL,
getRelativeLogLevel(severity, 1));
logInit.setSeverity("other-channel", severity);
LOG(severity) << message1;
THEN_CHECK("We should not find the message in the stream") {
REQUIRE(logBuffer.in_avail() == 0);
REQUIRE(logBuffer.str().empty());
}
}
});
}
} // namespace execHelper::config::test
<file_sep>#include "patternsHandler.h"
#include <string>
#include <log/assertions.h>
#include "logger.h"
#include "variablesMap.h"
using std::string;
using std::optional;
namespace execHelper::config {
PatternsHandler::PatternsHandler(const Patterns& other) {
for(const auto& pattern : other) {
m_patterns.emplace(pattern.getKey(), pattern);
}
}
PatternsHandler::PatternsHandler(Patterns&& other) noexcept {
for(auto&& pattern : other) {
m_patterns.emplace(pattern.getKey(), pattern);
}
}
auto PatternsHandler::operator==(const PatternsHandler& other) const noexcept
-> bool {
return m_patterns == other.m_patterns;
}
auto PatternsHandler::operator!=(const PatternsHandler& other) const noexcept
-> bool {
return !(*this == other);
}
void PatternsHandler::addPattern(const Pattern& pattern) noexcept {
m_patterns.emplace(pattern.getKey(), pattern);
}
auto PatternsHandler::contains(const PatternKey& key) const noexcept -> bool {
return m_patterns.count(key) > 0U;
}
auto PatternsHandler::getPattern(const PatternKey& key) const noexcept
-> const Pattern& {
const PatternCollection& constPatterns = m_patterns;
ensures(constPatterns.count(key) > 0U);
return constPatterns.at(key);
}
auto PatternsHandler::getDefaultPatternMap(const PatternKey& key) noexcept
-> config::VariablesMap {
return VariablesMap(key);
}
auto PatternsHandler::toPattern(const PatternKey& key,
const VariablesMap& patternMap) noexcept
-> optional<Pattern> {
static const string DEFAULT_VALUES_KEY("default-values");
static const string SHORT_OPTION_KEY("short-option");
static const string LONG_OPTION_KEY("long-option");
auto defaultValues = patternMap.get<PatternValues>(DEFAULT_VALUES_KEY);
if(defaultValues == std::nullopt || defaultValues.value().empty()) {
user_feedback_error(
"The default values must be defined and can not be empty");
return std::nullopt;
}
auto shortOption = patternMap.get<char>(SHORT_OPTION_KEY);
auto longOptionOpt = patternMap.get<string>(LONG_OPTION_KEY);
return Pattern(key, *defaultValues, shortOption, longOptionOpt);
}
} // namespace execHelper::config
<file_sep>#ifndef TEST_COMMAND_INCLUDE
#define TEST_COMMAND_INCLUDE
#include <memory>
#include <string>
#include <vector>
#include <gsl/gsl>
#include "statement.h"
#include "yaml.h"
namespace execHelper {
namespace test {
namespace baseUtils {
using CommandKey = std::string;
using CommandKeys = std::vector<CommandKey>;
using Statements = std::vector<std::shared_ptr<Statement>>;
class TestCommand {
public:
TestCommand(CommandKey commandKey,
Statements initialStatements = {}) noexcept;
std::shared_ptr<Statement> operator[](size_t index) const noexcept;
Statements::const_iterator begin() const noexcept;
Statements::const_iterator end() const noexcept;
size_t size() const noexcept;
std::string get() const noexcept;
unsigned int getNbOfStatements() const noexcept;
unsigned int getNumberOfStatementExecutions()
const noexcept; // Returns the sum of executions of all statements
void add(std::shared_ptr<Statement> statement) noexcept;
void resetExecutions() noexcept;
void write(gsl::not_null<YamlWriter*> yaml) const noexcept;
private:
std::string m_command;
Statements m_statements;
};
using Commands = std::vector<TestCommand>;
template <typename T, typename... Args>
inline std::shared_ptr<Statement> createStatement(Args... args) noexcept {
return std::static_pointer_cast<Statement>(std::make_shared<T>(args...));
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* TEST_COMMAND_INCLUDE */
<file_sep>set(MODULE_NAME Yaml-cpp)
find_package(PkgConfig QUIET)
if(${PKG_CONFIG_FOUND})
pkg_check_modules(${MODULE_NAME} yaml-cpp)
endif()
# Attempt to find it if not configured in pkgconfig
if(NOT ${MODULE_NAME}_FOUND)
MESSAGE(STATUS "Looking manually")
set(${MODULE_NAME}_LIBRARIES yaml-cpp)
find_path(${MODULE_NAME}_INCLUDE_DIRS NAMES yaml.h PATH_SUFFIXES yaml-cpp)
find_library(${MODULE_NAME}_LIBRARY_DIRS NAMES ${${MODULE_NAME}_LIBRARIES})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(${MODULE_NAME}
FOUND_VAR ${MODULE_NAME}_FOUND
REQUIRED_VARS ${MODULE_NAME}_INCLUDE_DIRS ${MODULE_NAME}_LIBRARY_DIRS
)
mark_as_advanced(${MODULE_NAME}_INCLUDE_DIRS)
mark_as_advanced(${MODULE_NAME}_LIBRARIES)
mark_as_advanced(${MODULE_NAME}_LIBRARY_DIRS)
endif()
<file_sep>set(LIBRARY_NAME ${PROJECT_NAME}-catch-main)
set(SRCS src/catch.cpp
src/logger.cpp
)
set(DEPENDENCIES
catch
log
#core
)
configure_file(include/unittest/config.h.in include/unittest/config.h)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/unittest)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${CMAKE_CURRENT_BINARY_DIR}/include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(unittest ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>#include "envp.h"
#include <cstring>
#include <iostream>
#include <string>
#include <string_view>
#include <gsl/gsl>
#include "log/assertions.h"
using std::string;
using gsl::czstring;
using gsl::owner;
using gsl::span;
using namespace std::literals;
namespace execHelper::config {
Envp::Envp(const EnvironmentCollection& env) noexcept {
static const string DELIMITER("=");
m_envp.reserve(env.size() + 1);
for(const auto& envVar : env) {
string envVarString(envVar.first);
envVarString.append(DELIMITER).append(envVar.second);
auto* newVar = // NOLINT(cppcoreguidelines-owning-memory)
new char[envVarString.size() + 1U];
strncpy(newVar, envVarString.c_str(), envVarString.size() + 1U);
m_envp.emplace_back(newVar);
}
m_envp.emplace_back(nullptr);
}
Envp::Envp(const Envp& other) noexcept {
m_envp.reserve(other.m_envp.size());
deepCopy(other);
}
Envp::Envp(Envp&& other) noexcept { swap(other); }
Envp::~Envp() noexcept { clear(); }
auto Envp::operator=(const Envp& other) noexcept -> Envp& {
if(this != &other) {
m_envp.reserve(other.m_envp.size());
deepCopy(other);
}
return *this;
}
auto Envp::operator=(Envp&& other) noexcept -> Envp& {
swap(other);
return *this;
}
void Envp::swap(Envp& other) noexcept { m_envp.swap(other.m_envp); }
auto Envp::size() const noexcept -> size_t {
ensures(!m_envp.empty());
return m_envp.size() - 1U;
}
auto Envp::getEnvp() noexcept -> char** { return &m_envp.at(0); }
auto Envp::getEnvp() const noexcept -> const char* const* {
return &m_envp.at(0);
}
void Envp::clear() noexcept {
for(const auto& arg : m_envp) {
delete[] arg; // NOLINT(cppcoreguidelines-owning-memory)
}
m_envp.clear();
}
void Envp::deepCopy(const Envp& other) noexcept {
clear(); // Clear the current content first
for(const auto& otherElement : other.m_envp) {
if(otherElement == nullptr) {
break;
}
size_t length = strlen(otherElement) + 1U;
auto* newArg = // NOLINT(cppcoreguidelines-owning-memory)
new char[length];
strncpy(newArg, otherElement, length);
m_envp.emplace_back(newArg);
}
m_envp.emplace_back(nullptr);
}
auto operator<<(std::ostream& os, const Envp& envp) noexcept -> std::ostream& {
const span<const czstring<>> envs(envp.getEnvp(), envp.size());
bool firstIteration = true;
for(const auto& env : envs) {
if(!firstIteration) {
os << ", "sv;
} else {
firstIteration = false;
}
os << env;
}
return os;
}
} // namespace execHelper::config
<file_sep># this one is important
SET(CMAKE_SYSTEM_NAME Linux)
#this one not so much
SET(CMAKE_SYSTEM_VERSION 1)
set(TRIPLET @TRIPLET@)
set(COMPILER_FAMILY @COMPILER_FAMILY@)
set(CMAKE_C_COMPILER_TARGET ${TRIPLET})
set(CMAKE_CXX_COMPILER_TARGET ${TRIPLET})
# Determine C compiler from CXX compiler
if(${COMPILER_FAMILY} STREQUAL "gcc")
SET(CMAKE_C_COMPILER "${TRIPLET}${COMPILER_FAMILY}")
SET(CMAKE_CXX_COMPILER "${TRIPLET}g++")
elseif(${COMPILER_FAMILY} STREQUAL "clang")
SET(CMAKE_C_COMPILER "${TRIPLET}${COMPILER_FAMILY}")
SET(CMAKE_CXX_COMPILER "${TRIPLET}clang++")
else()
message(ERROR "The compiler family '${COMPILER_FAMILY}' is not supported")
endif()
# where is the target environment
SET(CMAKE_SYSROOT @SYSROOT@)
SET(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT})
if(CMAKE_SYSTEM_NAME MATCHES "^(Linux|kFreeBSD|GNU)$" AND EXISTS "/etc/debian_version")
# The combination of Debian triplets and cross compilation is not completely waterproof. This is due to a conflict between the cross compiler not being aware of its associated triplet, while native systems (like the sysroot) will be aware of its triplet. Therefore we try to add it manually
INCLUDE(GNUInstallDirs)
SET(CMAKE_CXX_FLAGS "-isystem ${CMAKE_SYSROOT}/usr/${CMAKE_INSTALL_LIBDIR}")
endif()
# search for programs in the build host directories
SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
# for libraries and headers in the target directories
SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
<file_sep>set(CHANGELOG_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/../.gitchangelog.rc)
set(CHANGELOG_OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/exec-helper.changelog)
find_package(Gitchangelog)
if(Gitchangelog_FOUND)
add_custom_command(OUTPUT ${CHANGELOG_OUTPUT}
COMMAND ${Gitchangelog} > ${CHANGELOG_OUTPUT}
MAIN_DEPENDENCY ${CHANGELOG_SOURCES}
VERBATIM)
add_custom_target(changelog
SOURCES ${CHANGELOG_OUTPUT}
COMMENT "Generating changelog"
VERBATIM)
install(FILES ${CHANGELOG_OUTPUT} DESTINATION share/doc/exec-helper COMPONENT changelog)
add_custom_target(all-changelog ALL)
add_dependencies(all-changelog changelog)
else()
MESSAGE(STATUS "Disabled 'changelog' target")
endif()
<file_sep>#ifndef ENVIRONMENT_INCLUDE
#define ENVIRONMENT_INCLUDE
#include <map>
#include <string>
#include <vector>
#include <gsl/string_span>
namespace execHelper {
namespace config {
using EnvArg = std::string;
using EnvArgs = std::vector<EnvArg>;
using EnvironmentCollection = std::map<std::string, EnvArg>;
using EnvironmentValue = std::pair<std::string, EnvArg>;
static const gsl::czstring<> ENVIRONMENT_KEY = "environment";
} // namespace config
} // namespace execHelper
#endif /* ENVIRONMENT_INCLUDE */
<file_sep>#include <algorithm>
#include <string>
#include <gsl/string_span>
#include "config/config.h"
#include "config/pattern.h"
#include "base-utils/configFileWriter.h"
#include "unittest/catch.h"
#include "utils/utils.h"
using gsl::czstring;
using std::find;
using std::string;
using std::to_string;
using execHelper::config::SettingsNode;
using execHelper::test::baseUtils::ConfigFileWriter;
using execHelper::test::utils::writeSettingsFile;
namespace execHelper::config::test {
SCENARIO("Parse properly written settings files", "[config][config-config]") {
MAKE_COMBINATIONS("Of several settings") {
SettingsNode settings("exec-helper");
REQUIRE(settings.add("dummy-key", "dummy-value"));
Patterns patterns;
COMBINATIONS("Add multiple keys") {
const unsigned int SIZE = 100U;
for(auto i = 0U; i < SIZE; ++i) {
SettingsKey key("key-value-key");
key.append(to_string(i));
SettingsValue value("key-value-value");
value.append(to_string(i));
REQUIRE(settings.add(key, value));
}
}
COMBINATIONS("Add multiple arrays") {
const unsigned int KEY_SIZE = 20U;
const unsigned int VALUE_SIZE = KEY_SIZE;
for(auto i = 0U; i < KEY_SIZE; ++i) {
SettingsKey key("map-key");
key.append(to_string(i));
SettingsValues values;
values.reserve(VALUE_SIZE);
for(auto j = 0U; j < VALUE_SIZE; ++j) {
SettingsValue newValue("map-value");
newValue.append(to_string(j));
values.emplace_back(newValue);
}
REQUIRE(settings.add(key, values));
}
}
// COMBINATIONS("Add a big tree") {
// const unsigned int SIZE = 4U;
// const unsigned int VALUE_SIZE = SIZE;
// SettingsKeys keys({"tree-key"});
// for(auto i = 0U; i < SIZE; ++i) {
// SettingsKey key("key");
// key.append(to_string(i));
// keys.emplace_back(key);
// settings.add(keys, string("some-value").append(to_string(i)));
// SettingsValues values;
// for(auto j = 0U; j < VALUE_SIZE; ++j) {
// SettingsValue newValue("value");
// newValue.append(to_string(j));
// values.emplace_back(newValue);
//}
// settings.add(keys, values);
//}
//}
COMBINATIONS("Add a pattern") {
patterns.emplace_back(Pattern("pattern1", {"value1a", "value1b"}));
}
COMBINATIONS("Add multiple patterns") {
const unsigned int SIZE = 50U;
for(auto i = 0U; i < SIZE; ++i) {
PatternKey newKey("multiple-pattern");
newKey.append(to_string(i));
PatternValue newValue("multiple-pattern-value");
newValue.append(to_string(i));
patterns.emplace_back(
Pattern(newKey, {newValue}, std::nullopt, newKey));
}
}
ConfigFileWriter configFile;
writeSettingsFile(gsl::not_null<ConfigFileWriter*>(&configFile),
settings, patterns);
auto parsedSettingsFile = parseSettingsFile(configFile.getPath());
THEN_CHECK("It must succeed") {
REQUIRE(parsedSettingsFile != std::nullopt);
}
THEN_CHECK("We must find the proper settings and patterns") {
auto foundPatterns = parsedSettingsFile.value().first;
auto foundSettings = parsedSettingsFile.value().second;
REQUIRE(foundSettings == settings);
// Patterns are vectors, but their order is not necessarily
// preserved by the config reader and writer, so we can not simply
// use the comparison operator to compare them.
REQUIRE(foundPatterns.size() == patterns.size());
for(const auto& pattern : patterns) {
REQUIRE(find(foundPatterns.begin(), foundPatterns.end(),
pattern) != foundPatterns.end());
}
}
}
}
} // namespace execHelper::config::test
<file_sep>#ifndef __COMMAND_LINE_COMMAND_HPP
#define __COMMAND_LINE_COMMAND_HPP
#include "plugin.h"
namespace execHelper::plugins {
/**
* \brief Plugin for executing arbitrary command lines
*/
class CommandLineCommand : public Plugin {
public:
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
};
} // namespace execHelper::plugins
#endif /* __COMMAND_LINE_COMMAND_HPP */
<file_sep>#ifndef __LCOV_H__
#define __LCOV_H__
#include "config/path.h"
#include "core/task.h"
#include "commandLine.h"
#include "plugin.h"
namespace execHelper {
namespace plugins {
/**
* \brief Plugin for running the lcov code coverage analysis tool
*/
class Lcov : public Plugin {
public:
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
private:
using BaseDir = config::Path;
using Dir = config::Path;
using InfoFile = config::Path;
using Excludes = std::vector<std::string>;
/**
* Add the command to generate html
*
* \param[in] infoFile The infofile to use
* \param[in] variables The variables map
* \param[in] task The current task
* \returns True if the command was successfully added to the given task
* False otherwise
*/
static core::Task generateGenHtmlTask(const InfoFile& infoFile,
const config::VariablesMap& variables,
const core::Task& task) noexcept;
/**
* Add the command to zero all counters
*
* \param[in] baseDirectory The base directory to use. See lcov manpage
* \param[in] directory The directory to use. See lcov manpage
* \param[in] commandLine The additional command line arguments to add
* \param[in] task The current task
* \returns True if the command was successfully added to the given task
* False otherwise
*/
static core::Task
generateZeroCountersTask(const BaseDir& baseDirectory, const Dir& directory,
const CommandLineArgs& commandLine,
const core::Task& task) noexcept;
/**
* Get the excludes
*
* \param[in] variables The variables map
* \returns A collection of directories to exclude from the analysis
*/
static Excludes getExcludes(const config::VariablesMap& variables) noexcept;
/**
* Add the command to exclude directories
*
* \param[in] variables The variables map
* \param[in] infoFile The info file associated with the analysis
* \param[in] commandLine The additional command line arguments to add
* \param[in] task The current task
* \returns True if the command was successfully added to the given task
* False otherwise
*/
static core::Task generateExcludeTask(const config::VariablesMap& variables,
const InfoFile& infoFile,
const CommandLineArgs& commandLine,
const core::Task& task) noexcept;
/**
* Add the command to capture the coverage
*
* \param[in] baseDirectory The base directory to use. See lcov manpage
* \param[in] directory The directory to use. See lcov manpage
* \param[in] infoFile The info file associated with the analysis
* \param[in] commandLine The additional command line arguments to add
* \param[in] task The current task
*/
static core::Task generateCaptureTask(const BaseDir& baseDirectory,
const Dir& directory,
const InfoFile& infoFile,
const CommandLineArgs& commandLine,
const core::Task& task) noexcept;
};
} // namespace plugins
} // namespace execHelper
#endif /* __LCOV_H__ */
<file_sep>#ifndef EXECUTE_INCLUDE
#define EXECUTE_INCLUDE
#include <filesystem>
#include <string>
#include <vector>
#include <gsl/string_span>
#include "path.h"
namespace execHelper {
namespace test {
namespace baseUtils {
namespace execution {
using CommandLine = std::vector<std::string>;
int execute(const CommandLine& commandLine,
const Path& workingDir = std::filesystem::current_path()) noexcept;
} // namespace execution
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* EXECUTE_INCLUDE */
<file_sep>.. describe:: working-dir
The working directory of the command. Can be an absolute path are a relative one w.r.t. the path to the considered configuration file. Commands that should be executed relative to the current working dir can use the :code:`{EH_WORKING_DIR}` pattern.
<file_sep>#ifndef __LOG_H__
#define __LOG_H__
#include <iostream>
#include <ostream>
#include <string>
#include "consoleLogger.h"
#include "logLevel.h"
#include "logMessage.h"
namespace execHelper {
namespace log {
/**
* \brief Class for initializing and destructing the logging system. Must be created and kept alive while the logging system is being used.
*
* RAII object for initalizing and deinitializing the logging system
*/
class LogInit {
public:
/**
* Initialize logging
*/
LogInit() noexcept;
/**
* Initialize logging
*
* \param[in] logStream The output stream to log to
*/
explicit LogInit(std::ostream& logStream) noexcept;
~LogInit();
/**
* Set the minimum severity of each shown log message for a channel to the given
* severity
*
* \param[in] channel The channel to set the severity from
* \param[in] severity The severity to set the minimal severity to
*/
void setSeverity(const Channel& channel, LogLevel severity);
private:
/**
* Common init script initializing the logging system
*
* \param[in] logStream The output stream to log to
*/
void init(std::ostream& logStream) noexcept;
std::unique_ptr<ConsoleLogger> m_consoleLogger;
};
} // namespace log
namespace color {
enum ColorCode {
FG_RED = 31,
FG_GREEN = 32,
FG_BLUE = 34,
FG_DEFAULT = 39,
BG_RED = 41,
BG_GREEN = 42,
BG_BLUE = 44,
BG_DEFAULT = 49
};
/**
* \brief Modifies the command line output format
*/
struct Modifier {
ColorCode code; //!< The color code to use for the modifier
/**
* The modifier to use
*
* \param[in] cCode The color code to use
*/
explicit Modifier(ColorCode cCode) noexcept : code(cCode) {}
};
std::ostream& operator<<(std::ostream& os, const Modifier& mod);
static Modifier def(FG_DEFAULT);
static Modifier blue(FG_BLUE);
static Modifier red(FG_RED);
} // namespace color
} // namespace execHelper
#define user_feedback(x) \
(std::cout << x << std::endl) // NOLINT(misc-macro-parentheses)
#define user_feedback_info(x) \
(std::cout << execHelper::color::blue << x << execHelper::color::def \
<< std::endl); // NOLINT(misc-macro-parentheses)
#define user_feedback_error(x) \
(std::cerr << execHelper::color::red << x << execHelper::color::def \
<< std::endl); // NOLINT(misc-macro-parentheses)
#endif /* __LOG_H__ */
<file_sep>#ifndef EXECUTION_CONTENT_INCLUDE
#define EXECUTION_CONTENT_INCLUDE
#include <atomic>
#include <map>
#include <string>
#include <thread>
#include <vector>
#include <boost/asio.hpp>
#include <boost/serialization/map.hpp>
#include <boost/serialization/vector.hpp>
#include <gsl/gsl>
#include "base-utils/commandUtils.h"
#include "base-utils/execution.h"
#include "base-utils/tmpFile.h"
namespace execHelper {
namespace test {
namespace baseUtils {
class IoService {
public:
IoService() = default;
IoService(const IoService& other) = delete;
IoService(IoService&& other) = delete;
~IoService() noexcept;
IoService& operator=(const IoService& other) = delete;
IoService& operator=(IoService&& other) = delete;
void start() noexcept;
void stop() noexcept;
void run() noexcept;
boost::asio::io_service& get() noexcept;
private:
boost::asio::io_context m_service;
std::atomic<bool> m_isRunning = {false};
std::thread m_thread;
};
struct ExecutionContentData {
std::vector<std::string> args;
std::map<std::string, std::string> env;
template <typename Archive>
void serialize(Archive& ar, const unsigned int /*version*/) {
ar& args& env;
}
};
struct ExecutionContentDataReply {
ReturnCode returnCode;
ExecutionContentDataReply(ReturnCode returnCode) noexcept
: returnCode(returnCode) {
;
}
};
class ExecutionContentServer {
public:
using ConfigCommand = std::vector<std::string>;
ExecutionContentServer(ReturnCode returnCode) noexcept;
~ExecutionContentServer() noexcept;
ExecutionContentServer(const ExecutionContentServer& other) = delete;
ExecutionContentServer(ExecutionContentServer&& other) noexcept;
ExecutionContentServer&
operator=(const ExecutionContentServer& other) = delete;
ExecutionContentServer& operator=(ExecutionContentServer&& other) noexcept;
void swap(ExecutionContentServer& other) noexcept;
ConfigCommand getConfigCommand() const noexcept;
unsigned int getNumberOfExecutions() const noexcept;
const std::vector<ExecutionContentData>& getReceivedData() const noexcept;
void clear() noexcept;
static void registerIoService(gsl::not_null<IoService*> ioService) noexcept;
private:
/**
* Opens the acceptor explicitly rather than using the appropriate constructor for it. This is to work around
* the fact that the appropriate constructor will throw, causing an exception to leak out of the interface
*
* \throws boost::system::system_error If the acceptor can not be opened
*/
void openAcceptor();
void init() noexcept;
void accept() noexcept;
ExecutionContentDataReply addData(ExecutionContentData data) noexcept;
std::vector<ExecutionContentData> m_receivedData;
uint32_t m_numberOfExecutions = {0};
ReturnCode m_returnCode = {SUCCESS};
TmpFile m_file;
boost::asio::local::stream_protocol::endpoint m_endpoint;
boost::asio::local::stream_protocol::socket m_socket;
boost::asio::local::stream_protocol::acceptor m_acceptor;
static IoService* m_ioService;
};
using ExecutionContent = ExecutionContentServer;
class ExecutionContentClient {
public:
ExecutionContentClient(const Path& file);
ReturnCode addExecution(const ExecutionContentData& data);
private:
boost::asio::local::stream_protocol::endpoint m_endpoint;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* EXECUTION_CONTENT_INCLUDE */
<file_sep>set(MODULE_NAME commander)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/commander.cpp
src/logger.cpp
)
set(DEPENDENCIES
log
core
plugins
)
set(LIBRARY_LINKED_LIBS
${${PROJECT_NAME}-plugins_LIBRARIES}
${${PROJECT_NAME}-core_LIBRARIES}
${${PROJECT_NAME}-log_LIBRARIES}
)
include_directories(
include/commander
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/commander)
target_include_directories(${LIBRARY_NAME} PUBLIC include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
<file_sep>Import('env')
hello = env.Program('hello.cpp')
env.Alias('hello', hello)
world = env.Program('world.cpp')
env.Alias('world', world)
<file_sep>#include "verbosity.h"
#include "logger.h"
namespace execHelper::plugins {
void VerbosityLong::getVariables(
config::VariablesMap& variables,
const config::FleetingOptionsInterface& options) noexcept {
const auto* const verbosity = options.getVerbosity() ? "yes" : "no";
if(!variables.add(VERBOSITY_KEY, verbosity)) {
LOG(error) << "Failed to add key '" << VERBOSITY_KEY << "'";
}
}
} // namespace execHelper::plugins
<file_sep>#include "cast-impl.h"
using std::string;
using std::vector;
namespace execHelper::config::detail {
template class Cast<string, vector<string>>;
template class Cast<vector<string>, vector<string>>;
template class Cast<bool, vector<string>>;
template class Cast<Path, vector<string>>;
template class Cast<char, vector<string>>;
template class Cast<uint32_t, vector<string>>;
} // namespace execHelper::config::detail
<file_sep>#include <algorithm>
#include <filesystem>
#include <string>
#include <string_view>
#include <vector>
#include "config/environment.h"
#include "config/pattern.h"
#include "config/variablesMap.h"
#include "plugins/luaPlugin.h"
#include "unittest/catch.h"
#include "unittest/config.h"
#include "unittest/rapidcheck.h"
#include "utils/commonGenerators.h"
#include "utils/utils.h"
#include "executorStub.h"
#include "fleetingOptionsStub.h"
#include "handlers.h"
using std::for_each;
using std::optional;
using std::string;
using std::string_view;
using std::vector;
using execHelper::config::EnvArgs;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Path;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::core::test::ExecutorStub;
using execHelper::test::propertyTest;
using execHelper::test::utils::getExpectedTasks;
namespace filesystem = std::filesystem;
namespace execHelper::plugins::test {
SCENARIO("Testing the configuration settings of the clang-tidy plugin",
"[clang-tidy]") {
propertyTest("", [](const optional<filesystem::path>& buildDir,
const optional<filesystem::path>& workingDir,
const optional<vector<filesystem::path>>& sources,
const optional<vector<string>>& checks,
const optional<vector<string>>& warningAsErrors,
const optional<vector<string>>& commandLine,
const optional<EnvironmentCollection>& environment) {
const Task task;
Task expectedTask(task);
Patterns patterns;
VariablesMap config("clang-tidy-test");
LuaPlugin plugin(std::string(PLUGINS_INSTALL_PATH) + "/clang-tidy.lua");
ExecutorStub executor;
ExecuteCallback executeCallback = [&executor](const Task& task) {
executor.execute(task);
};
registerExecuteCallback(executeCallback);
expectedTask.append("clang-tidy");
const string directoryOption("-p");
if(buildDir) {
REQUIRE(config.add("build-dir", buildDir->string()));
expectedTask.append({directoryOption, buildDir->string()});
} else {
expectedTask.append({directoryOption, "."});
}
if(workingDir) {
handleWorkingDirectory(*workingDir, config, expectedTask);
}
if(checks) {
string checkString{"-checks="};
REQUIRE(config.add("checks", *checks));
for(auto it = checks->begin(); it != checks->end(); ++it) {
if(it != checks->begin()) {
checkString.append(",");
}
checkString.append(*it);
}
expectedTask.append(move(checkString));
}
if(warningAsErrors) {
REQUIRE(config.add("warning-as-errors", *warningAsErrors));
string warningAsErrorString{"-warning-as-errors="};
for(auto it = warningAsErrors->begin();
it != warningAsErrors->end(); ++it) {
if(it != warningAsErrors->begin()) {
warningAsErrorString.append(",");
}
warningAsErrorString.append(*it);
}
expectedTask.append(move(warningAsErrorString));
} else if(checks) {
// Set warning-as-errors to auto and expect the checks to be mirrored
REQUIRE(config.add("warning-as-errors", "auto"));
string warningAsErrorString{"-warning-as-errors="};
for(auto it = checks->begin(); it != checks->end(); ++it) {
if(it != checks->begin()) {
warningAsErrorString.append(",");
}
warningAsErrorString.append(*it);
}
expectedTask.append(move(warningAsErrorString));
}
if(commandLine) {
handleCommandLine(*commandLine, config, expectedTask);
}
if(sources) {
if(sources->empty()) {
REQUIRE(config.add("sources"));
} else {
for_each(sources->begin(), sources->end(),
[&expectedTask, &config](const auto& source) {
expectedTask.append(source.string());
REQUIRE(config.add("sources", source.string()));
});
}
} else {
expectedTask.append("*.cpp");
}
if(environment) {
handleEnvironment(*environment, config, expectedTask);
}
ExecutorStub::TaskQueue expectedTasks =
getExpectedTasks(expectedTask, patterns);
THEN_WHEN("We apply the plugin") {
bool returnCode = plugin.apply(task, config, patterns);
THEN_CHECK("It should succeed") { REQUIRE(returnCode); }
THEN_CHECK("It called the right commands") {
REQUIRE(expectedTasks == executor.getExecutedTasks());
}
}
});
}
} // namespace execHelper::plugins::test
<file_sep>set(EXE_NAME execution-content)
set(SRCS
execution-content.cpp
)
set(DEPENDENCIES
test-base-utils
boost-serialization
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES})
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
install(TARGETS ${EXE_NAME} DESTINATION ${INTEGRATION_TEST_BIN_DIR} COMPONENT integration)
<file_sep>#ifndef CONFIG_INCLUDE
#define CONFIG_INCLUDE
#include <optional>
#include <string>
#include <utility>
#include "path.h"
#include "pattern.h"
#include "settingsNode.h"
namespace execHelper {
namespace config {
class OptionDescriptions;
}
} // namespace execHelper
namespace execHelper {
namespace config {
using PatternSettingsPair = std::pair<Patterns, config::SettingsNode>;
auto parseSettingsFile(const Path& file) noexcept
-> std::optional<PatternSettingsPair>;
} // namespace config
} // namespace execHelper
#endif /* CONFIG_INCLUDE */
<file_sep>#ifndef EXECUTION_HANDLER_INCLUDE
#define EXECUTION_HANDLER_INCLUDE
#include <map>
#include <gsl/gsl>
#include "executionContent.h"
namespace execHelper {
namespace test {
namespace baseUtils {
class ExecutionHandler {
public:
void add(const std::string& key, ExecutionContent&& content) noexcept;
const ExecutionContent& at(const std::string& key) const noexcept;
private:
using ExecutionContentCollection = std::map<std::string, ExecutionContent>;
/**
* \brief Used for handling an execution iteration
*/
class ExecutionHandlerIterationRAII {
public:
explicit ExecutionHandlerIterationRAII(
gsl::not_null<ExecutionContentCollection*> outputs);
~ExecutionHandlerIterationRAII();
ExecutionHandlerIterationRAII(
const ExecutionHandlerIterationRAII& other) = default;
ExecutionHandlerIterationRAII(ExecutionHandlerIterationRAII&& other) =
default;
ExecutionHandlerIterationRAII&
operator=(const ExecutionHandlerIterationRAII& other) = default;
ExecutionHandlerIterationRAII&
operator=(ExecutionHandlerIterationRAII&& other) noexcept =
default; // NOLINT(misc-noexcept-move-constructor)
const ExecutionContent& at(const std::string& key) const noexcept;
private:
gsl::not_null<ExecutionContentCollection*> m_outputs;
};
ExecutionContentCollection m_outputs;
public:
ExecutionHandlerIterationRAII startIteration() noexcept;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* EXECUTION_HANDLER_INCLUDE */
<file_sep>install(FILES init-completion.sh completion.bash completion.zsh DESTINATION share/exec-helper/completions COMPONENT runtime)
install(FILES completion.bash RENAME eh DESTINATION share/bash-completion COMPONENT runtime)
install(FILES completion.bash RENAME exec-helper DESTINATION share/bash-completion COMPONENT runtime)
install(FILES _eh _exec-helper DESTINATION share/zsh/functions/Completion/Unix COMPONENT runtime)
<file_sep>#include "logger.h"
BOOST_LOG_GLOBAL_LOGGER_INIT( // NOLINT(modernize-use-trailing-return-type)
exec_helper_yaml_logger, execHelper::log::LoggerType) {
return execHelper::log::LoggerType(
boost::log::keywords::
channel = // NOLINT(fuchsia-default-arguments-calls)
LOG_CHANNEL);
}
<file_sep>#include <memory>
#include <string>
#include "core/immediateExecutor.h"
#include "unittest/catch.h"
#include "shellStub.h"
using std::make_shared;
using std::static_pointer_cast;
using std::string;
using execHelper::core::ImmediateExecutor;
using execHelper::core::Shell;
namespace {
ImmediateExecutor::Callback // NOLINT(fuchsia-statically-constructed-objects)
IGNORE_CALLBACK = [](Shell::ShellReturnCode /* returnCode */) noexcept {
}; // NOLINT(cert-err58-cpp)
} // namespace
namespace execHelper::core::test {
SCENARIO("Test the execution of the immediateExecutor",
"[ExecutorInterface][ImmediateExecutor]") {
GIVEN("Some tasks we want to execute and an executor") {
Task task1;
task1.append("task1");
Task task2;
task2.append("task2");
Task task3;
task3.append("task3");
ShellStub::TaskQueue actualTasks = {task1, task2, task3};
auto shell = make_shared<ShellStub>();
ImmediateExecutor executor(static_pointer_cast<Shell>(shell),
IGNORE_CALLBACK);
WHEN("We schedule each task and run the executor") {
executor.execute(task1);
executor.execute(task2);
executor.execute(task3);
THEN("We should get the same tasks again") {
ShellStub::TaskQueue executedTasks = shell->getExecutedTasks();
REQUIRE(executedTasks == actualTasks);
}
}
}
}
SCENARIO("Test the failing of the execution",
"[ExecutorInterface][ImmediateExecutor]") {
GIVEN("A shell that fails to execute and a task to execute") {
Task task1;
task1.append("task1");
const Shell::ShellReturnCode actualReturnCode = 42U;
Shell::ShellReturnCode realReturnCode = 0U;
ImmediateExecutor::Callback callback =
[&realReturnCode](Shell::ShellReturnCode returnCode) {
realReturnCode = returnCode;
};
auto shell = make_shared<ShellStub>(actualReturnCode);
ImmediateExecutor executor(static_pointer_cast<Shell>(shell), callback);
WHEN("We schedule the task for execution") {
executor.execute(task1);
THEN("We should receive the failed return code") {
REQUIRE(realReturnCode == actualReturnCode);
}
}
}
}
} // namespace execHelper::core::test
<file_sep>#include "commander.h"
#include <filesystem>
#include <iostream>
#include <utility>
#include <gsl/gsl>
#include "config/fleetingOptionsInterface.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/executePlugin.h"
#include "logger.h"
using std::move;
using gsl::czstring;
using gsl::not_null;
using execHelper::config::EnvironmentCollection;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Path;
using execHelper::config::Pattern;
using execHelper::config::Patterns;
using execHelper::config::SettingsNode;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using execHelper::plugins::ExecutePlugin;
using execHelper::plugins::Plugins;
namespace filesystem = std::filesystem;
namespace {
const czstring<> WORKING_DIR_PATTERN_KEY = "EH_WORKING_DIR";
inline auto addPredefinedPatterns(Patterns patterns) -> Patterns {
patterns.emplace_back(Pattern(WORKING_DIR_PATTERN_KEY,
{filesystem::current_path().string()}));
return patterns;
}
} // namespace
namespace execHelper {
namespace commander {
auto Commander::run(const FleetingOptionsInterface& fleetingOptions,
SettingsNode settings, Patterns patterns,
const Path& workingDirectory,
const EnvironmentCollection& env,
Plugins&& plugins) noexcept -> bool {
patterns = addPredefinedPatterns(patterns);
ExecutePlugin::push(
not_null<const FleetingOptionsInterface*>(&fleetingOptions));
ExecutePlugin::push(move(settings));
ExecutePlugin::push(move(patterns));
ExecutePlugin::push(move(plugins));
Task task({}, env, workingDirectory);
auto commands = fleetingOptions.getCommands();
if(commands.empty()) {
user_feedback_error("You must define at least one command");
return false;
}
ExecutePlugin plugin(move(commands));
auto returnCode = plugin.apply(task, VariablesMap("commands"), patterns);
ExecutePlugin::popPlugins();
ExecutePlugin::popFleetingOptions();
ExecutePlugin::popSettingsNode();
ExecutePlugin::popPatterns();
return returnCode;
}
} // namespace commander
} // namespace execHelper
<file_sep>.. describe:: patterns
A list of patterns to apply on the command line. See :ref:`exec-helper-config-patterns` (5).
<file_sep>#include "memory.h"
#include <gsl/string_span>
#include "config/fleetingOptionsInterface.h"
#include "config/variablesMap.h"
using std::string;
using gsl::czstring;
using execHelper::config::FleetingOptionsInterface;
using execHelper::config::Patterns;
using execHelper::config::VariablesMap;
using execHelper::core::Task;
using namespace std::string_literals;
namespace {
const czstring<> PLUGIN_NAME = "memory";
} // namespace
namespace execHelper::plugins {
Memory::Memories // NOLINT(fuchsia-statically-constructed-objects)
Memory::m_executions = {}; // NOLINT(readability-redundant-declaration)
bool Memory::m_returnCode = true; // NOLINT(readability-redundant-declaration)
auto Memory::getVariablesMap(
const FleetingOptionsInterface& /*fleetingOptions*/) const noexcept
-> VariablesMap {
return VariablesMap(PLUGIN_NAME);
}
auto Memory::apply(Task task, const VariablesMap& variables,
const Patterns& patterns) const noexcept -> bool {
Memory_t newElement(task, variables, patterns);
m_executions.emplace_back(newElement);
return m_returnCode;
}
auto Memory::summary() const noexcept -> std::string {
return "Memory (internal)";
}
auto Memory::getExecutions() noexcept -> const Memory::Memories& {
return m_executions;
}
void Memory::reset() noexcept {
m_executions.clear();
m_returnCode = true;
}
void Memory::setReturnCode(bool returnCode) noexcept {
m_returnCode = returnCode;
}
MemoryHandler::MemoryHandler() {
// Reset the memory here too, just to be sure
Memory::reset();
}
MemoryHandler::~MemoryHandler() { Memory::reset(); }
auto MemoryHandler::getExecutions() noexcept -> const Memory::Memories& {
return Memory::getExecutions();
}
void MemoryHandler::reset() noexcept { Memory::reset(); }
void MemoryHandler::setReturnCode(bool returnCode) noexcept {
Memory::setReturnCode(returnCode);
}
SpecialMemory::SpecialMemory(bool returnCode) noexcept
: m_returnCode(returnCode) {
;
}
auto SpecialMemory::getVariablesMap(
const config::FleetingOptionsInterface& /*fleetingOptions*/) const noexcept
-> config::VariablesMap {
return VariablesMap(PLUGIN_NAME);
}
auto SpecialMemory::apply(core::Task task,
const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept
-> bool {
Memory_t newElement(task, variables, patterns);
m_executions.emplace_back(newElement);
return m_returnCode;
}
auto SpecialMemory::summary() const noexcept -> string {
return "SpecialMemory (internal)";
}
auto SpecialMemory::getExecutions() noexcept -> const Memories& {
return m_executions;
}
} // namespace execHelper::plugins
<file_sep>set(LIB_NAME log-generators)
add_library(${LIB_NAME} INTERFACE)
target_include_directories(${LIB_NAME} INTERFACE include)
set(EXE_NAME ${PROJECT_NAME}-log-unittest)
set(SRCS src/logTest.cpp
)
set(DEPENDENCIES
boost-log
log
core
rpcheck
unittest
log-generators
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES})
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
add_test(${EXE_NAME} ${EXE_NAME})
install(TARGETS ${EXE_NAME} DESTINATION ${UNITTEST_BIN_DIR})
<file_sep>include(CMakeDependentOption)
CMAKE_DEPENDENT_OPTION(BUILD_API_HTML_DOCUMENTATION "Create and install the HTML API documentation (requires Doxygen)" ON "BUILD_API_DOCUMENTATION" OFF)
CMAKE_DEPENDENT_OPTION(BUILD_API_XML_DOCUMENTATION "Create and install the MAN API documentation (requires Doxygen)" ON "BUILD_API_DOCUMENTATION" OFF)
find_package(Doxygen 1.8.15 REQUIRED)
if(CMAKE_VERBOSE_MAKEFILE)
set(DOXYGEN_QUIET NO)
else()
set(DOXYGEN_QUIET YES)
endif()
if(ENABLE_WERROR)
set(WARN_AS_ERROR YES)
else()
set(WARN_AS_ERROR NO)
endif()
if(BUILD_API_HTML_DOCUMENTATION)
add_custom_target(docs-api-html ALL)
set(GENERATE_HTML "YES")
else()
set(GENERATE_HTML "NO")
endif()
if(BUILD_API_XML_DOCUMENTATION)
add_custom_target(docs-api-xml ALL)
set(GENERATE_XML "YES")
else()
set(GENERATE_XML "NO")
endif()
set(doxyfile_in Doxyfile.in)
set(doxyfile ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile)
configure_file(${doxyfile_in} ${doxyfile} @ONLY)
add_custom_target(docs-api
SOURCES ${doxyfile_in}
COMMAND ${DOXYGEN_EXECUTABLE} ${doxyfile}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen"
VERBATIM)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/api/html DESTINATION ${CMAKE_INSTALL_DOCDIR}/api COMPONENT docs-api-html)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/api/xml DESTINATION ${CMAKE_INSTALL_DOCDIR}/api COMPONENT docs-api-xml)
add_dependencies(docs docs-api)
if(BUILD_API_HTML_DOCUMENTATION)
add_dependencies(docs-api-html docs-api)
endif()
if(BUILD_API_XML_DOCUMENTATION)
add_dependencies(docs-api-xml docs-api)
endif()
<file_sep>import os
from pathlib import Path
from command import Command
class CustomPlugin():
_suffix = '.lua'
def __init__(self, name, directory):
assert(directory.is_dir())
self._id = name
self._file = Path(directory).joinpath(self._id + self._suffix)
self.write(self._id, self._file)
def __del__(self):
self.remove()
pass
@property
def id(self):
return self._id
@staticmethod
def write(id, file):
with open(file, 'w') as f:
f.write('task:add_args(get_commandline())\n')
f.write('register_task(task)\n')
def remove(self):
self._file.unlink()
<file_sep>#ifndef __CREATE_OBJECT_H__
#define __CREATE_OBJECT_H__
#include <utility>
namespace execHelper {
namespace core {
namespace detail {
/**
* \brief Create an object using the values in the given tuple as the arguments
* for the constructor of the new object
*/
template <typename ReturnType, typename Tup, bool end, int Total, int... N>
struct createObject {
/**
* Create the object
*
* \param[in] tuple The tuple containing the arguments for creating the
* object \returns The created object
*/
static ReturnType getObject(Tup&& tuple) {
// Recursively index the list for the number of arguments in N, so that
// we can use std::get<1,2,3..Total>() on the tuple
// once the whole tuple has been indexed
return createObject<ReturnType, Tup, Total == 1 + sizeof...(N), Total,
N...,
sizeof...(N)>::getObject(std::forward<Tup>(tuple));
}
};
/*! @copydoc createObject
*
* Partial template specialization: End condition for the indexing.
* See \ref createObject for the documentation.
*/
template <typename ReturnType, typename Tup, int Total, int... N>
struct createObject<ReturnType, Tup, true, Total, N...> {
/**
* This is a partial template specialization. Please check the generic
* template declaration for the documentation.
*
* \param[in] tuple A tuple containing the arguments to pass to the
* constructor of the ReturnType \returns A ReturnType object constructed
* using the given arguments
*/
static ReturnType getObject(Tup&& tuple) {
// Convert the tuple to a function call with arguments using
// std::get<1,2,3..Total>()
return ReturnType(std::get<N>(std::forward<Tup>(tuple))...);
}
};
} // namespace detail
} // namespace core
} // namespace execHelper
#endif /* __CREATE_OBJECT_H__ */
<file_sep>import os
from pathlib import Path
import pytest
from pytest_bdd import scenarios, given, when, then
from scenarios_run import *
scenarios('../feature/working-dir', example_converters=dict(command = str, start_working_dir = Path, expected_working_dir = Path))
@given('a current working directory of <start_working_dir>')
def working_dir(run_environment, start_working_dir):
path = Path(start_working_dir)
path.mkdir(parents=True, exist_ok=True)
run_environment.set_working_dir(path)
run_environment.config_is_external()
return path
@then('the working directory should be the environment root dir')
def check_working_dir(run_environment, command):
expected_working_dir = run_environment.root_dir
last_run = run_environment.config.commands[command].runs[-1]
assert(last_run.working_dir == expected_working_dir)
@then('the PWD environment variable should be the environment root dir')
def check_pwd(run_environment, command):
expected_working_dir = run_environment.root_dir
last_run = run_environment.config.commands[command].runs[-1]
key = 'PWD'
assert(key in last_run.environment)
assert(Path(last_run.environment[key]) == expected_working_dir)
<file_sep>#include "reportingExecutor.h"
#include <iostream>
#include "log/log.h"
#include "shell.h"
#include "task.h"
namespace execHelper::core {
ReportingExecutor::ReportingExecutor() noexcept { ; }
void ReportingExecutor::execute(const Task& task) noexcept {
user_feedback_info("Executing " << task.toString());
}
} // namespace execHelper::core
<file_sep>set(EXE_NAME ${PROJECT_NAME}-commander-unittest)
set(SRCS
src/commanderTest.cpp
)
set(DEPENDENCIES
commander
test-utils
unittest
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES})
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
add_test(${EXE_NAME} ${EXE_NAME})
install(TARGETS ${EXE_NAME} DESTINATION ${UNITTEST_BIN_DIR})
<file_sep>.. _exec-helper-custom-plugins:
Custom plugins
**************
Where to put your plugins
=========================
Exec-helper searches dynamically for (most of) its plugins in all the plugin search paths. It searches in the following locations (earlier listed locations take precedence over later listed locations for plugins with the same name):
1. Using the :code:`--additional-search-path` command-line option. Multiple paths can be passed to it using multiple arguments. Earlier mentioned paths take precedence over later mentioned paths. The paths can be absolute or relative w.r.t. the used :program:`exec-helper` configuration file. E.g.::
exec-helper build --additional-search-path blaat /tmp
will add the relative path *blaat* and the absolute path */tmp* to the plugin search paths.
2. Using the :code:`additional-search-paths` key in the :program:`exec-helper` configuration file. The key takes an ordered list containing absolute or relative (w.r.t. the :program:`exec-helper` configuration file it is mentioned in) paths. Earlier listed elements take precedence over lower listed elements. E.g.::
additional-search-paths:
- blaat
- /tmp
3. The system plugin paths. These paths contain (most of) the default modules bundled with :program:`exec-helper`. It is not recommended to add your custom plugins to any of these paths.
Listing the modules
===================
Exec-helper lists the modules it currently finds by using the :code:`--list-plugins` command-line option.
Writing a lua plugin
====================
Exec-helper supports luaJIT 2.0.5. LuaJIT is a Lua 5.1 implementation with some additional features from Lua 5.2. All LuaJIT functionality is embedded in the :program:`exec-helper` binary, no LuaJIT install is required for running the plugin.
Exec-helper treats all files in the plugin search paths with a *lua* suffix as a compatible lua plugin. The name of the module is derived from the rest of the filename.
The interface
=============
A lua plugin is called within a wider (lua) context containing some objects and (convenience) functions.
Exec-helper specific functions
------------------------------
The following :program:`exec-helper` specific functions are available next to the lua 5.1 functions:
.. highlight:: lua
.. describe:: get_commandline()
Returns a list of the command-line arguments set by the :code:`command-line` key in the configuration. Use this to allow users of your plugin to freely set additional, plugin-specific command-line settings that can not be set by other configuration options. These additional command-line settings must be added explicitly by this plugin in the right position. E.g::
task:add_args(get_commandline())
.. describe:: get_environment()
Returns a two-level Lua table containing the environment in which the task will be executed. The plugin can read and modify this environment. Values set by the `environment` key in the configuration are added automatically to this list before this plugin is called, there is no need to do this explicitly.
**Note**: The *PWD* environment variable, following POSIX convention, is set by the application to the working directory of the task. Therefore, its value cannot be overriden in a custom module.
.. describe:: get_verbose(string arg)
Add :code:`arg` to the current tasks' command line if verbose mode is activated. This function does nothing if verbose mode is not activated. E.g.::
task:add_args(get_verbose('--debug'))
.. describe:: register_task(Task task)
Registers the given :code:`task` as a task to execute by the executor(s). Patterns associated with the task will be automatically permutated and substituted. E.g.::
register_task(task)
.. describe:: user_feedback_error(string message)
Show the given :code:`message` as an error to the user. E.g.::
user_feedback_error('You should not do that!')
.. describe:: input_error(string message)
Show the given :code:`message` as an error to the user and stop execution of this module. E.g.::
input_error('Cowardly refusing to perform that action!')
Exec-helper specific types
--------------------------
The following types (classes) are available in your module:
.. describe:: Config
Behaves like an ordinary lua table. Only reading from it using the access operator (`[key]`) is allowed. The access operator takes a string and returns a Lua table.
.. describe:: Task
Contains the task that is being built. It has the following member functions:
* :code:`add_args(array<string> args)`: Append the given arguments to this task.
* :code:`new(Task task)`: Create a new, default task with an empty command line.
* :code:`copy(Task task)`: Returns a copy of the given task.
Pre-defined objects
-------------------
The following pre-defined objects are automatically present when your module is called:
.. describe:: verbose
A boolean indicating whether the verbose command-line flag was set for this invocation.
.. describe:: jobs
Integer indicating the number of jobs to use for executing this plugin, if the plugin supports parallel job execution. Ignore this if this is not the case.
Example::
task:add_args({'--jobs', jobs})
Adds :code:`--jobs \<value\>` to the command line of the given task where `<value>` is the value of the configured number of jobs.
.. describe:: config
A pure Lua table containing the configuration of the particular :program:`exec-helper` configuration into one easy-to-navigate syntax tree. The tree may contain multiple levels. Accessing a table value in Lua returns a new Lua table. Use the :code:`one()` and :code:`list()` function to convert the table to a single value or list respectively. These functions will return `nil` when the given key has no value. The functions distinguish between no value (`nil`) and an empty value (e.g. an empty list).
Example::
task:add_args({'--directory', one(config['build-dir']) or '.'})
Adds :code:`--directory \<value\>` to the task command line, where `<value>` is one value set by the `build-dir` key or `.` when no such key exists in the configuration of this plugin.
.. describe:: task
A Task object containing the current context for executing the task, this may include prefixes from other plugins. It is *not* possible to erase these prefixes. If your module requires pre- or post-tasks, you can create one or more new tasks and register it. Similarly, it is possible to create new tasks with the same context as the given *task* variable by copy constructing it. Use the Lua *:* operator for calling member functions of a task.
For example, to create a module that calls `echo hello` on its invocation, use::
task:add_args({'echo', 'hello'})
.. describe:: patterns
A Lua table of all patterns and resolved pattern values for all *configured* patterns for the module. Note that patterns can be left as-is or added to a task as a pattern and do *NOT* have to be replaced or permutated by the plugin itself. This object is mainly useful for use cases where subcommands are called based on a pattern, for example the :ref:`exec-helper-plugins-selector`::
pattern_key = one(config['patterns'])
if pattern_key == nil then
input_error('Undefined selector pattern: you must define a pattern to select using the "patterns" keyword.')
end
values = patterns[pattern_key]
if values == nil then
input_error('Selector: Inactive or unknown select pattern used')
end
run_target(task, values)
Example
=======
A module for a directly callable tool
-------------------------------------
Let's implement a simple module for calling `make` called :code:`make`:
*make.lua*:
.. literalinclude:: ../src/scripts/make.lua
:language: lua
This module adds `make` with some additional arguments from the config and the options to the existing :code:`task` task. At the end, it registers the task for execution.
The relevant section in the users' :program:`exec-helper` configuration may look like:
.. code-block:: yaml
commands:
build: Build the project
patterns:
MODE:
default-values:
- debug
- release
short-option: m
long-option: mode
build:
- make
make:
patterns:
- MODE
build:
build-dir: "build/{MODE}"
jobs: 3
command-line: [ --dry-run, --keep-going]
Running :code:`eh build --mode release --verbose` will execute the command-line:
.. code-block:: bash
make --directory build/release --debug --jobs 3 --dry-run --keep-going
A module calling an other command
---------------------------------
Let's implement a simple module for `clang-static-analyzer`. Per the docs, this analyzer is used by prepending :code:`scan-build <options> <build command>` to the build command line. Obviously, users will already have configured a command (e.g. `build`) for building the project without any analysis. For maintenance and convenience purposes, we do not want the user to replicate this build command for this plugin, but rather, we want our plugin to add some arguments to the tasks' command line and call the configured build-command for extending the task with the actual build configuration.
Let's implement this module, called under the name :code:`some-analyzer`:
*some-analyzer.lua*:
.. literalinclude:: ../src/scripts/clang-static-analyzer.lua
:language: lua
This module adds :code:`scan-build` and some additional arguments to the command line of the task. Next, it takes the :code:`build-command` configuration values, does some validity checks on it, and requests :program:`exec-helper` to extend the command with the arguments of the given command values.
The relevant section in the users' :program:`exec-helper` configuration (combined with the module above for implementing the build command) may look like:
.. code-block:: yaml
build:
- make
make:
build-dir: build
some-analyzer:
build-command: build
command-line:
- --keep-going
Running :code:`eh some-analyzer --jobs 4` would execute the command line:
.. code-block:: bash
scan-build --keep-going make --directory build --jobs 4
<file_sep>#ifndef BASE_UTILS_GENERATORS_INCLUDE
#define BASE_UTILS_GENERATORS_INCLUDE
#include "unittest/rapidcheck.h"
#include "tmpFile.h"
// NOTE: Must be in rc namespace!
namespace rc {
template <> struct Arbitrary<execHelper::test::baseUtils::TmpFile> {
static Gen<execHelper::test::baseUtils::TmpFile> arbitrary() {
return gen::construct<execHelper::test::baseUtils::TmpFile>();
};
};
} // namespace rc
#endif /* BASE_UTILS_GENERATORS_INCLUDE */
<file_sep>#ifndef COMMAND_LINE_OPTIONS_INCLUDE
#define COMMAND_LINE_OPTIONS_INCLUDE
#include <string>
#include <string_view>
#include <vector>
namespace execHelper::config {
const std::string HELP_OPTION_KEY{"help"};
using HelpOption_t = bool;
const std::string VERSION_KEY{"version"};
using VersionOption_t = bool;
const std::string VERBOSE_KEY{"verbose"};
using VerboseOption_t = bool;
const std::string DRY_RUN_KEY{"dry-run"};
using DryRunOption_t = bool;
const std::string KEEP_GOING_KEY{"keep-going"};
using KeepGoingOption_t = bool;
const std::string JOBS_KEY{"jobs"};
using JobsOption_t =
std::string; // Must be string, since the 'auto' keyword is also supported
using Jobs_t = uint32_t;
const std::string SETTINGS_FILE_KEY{"settings-file"};
using SettingsFileOption_t = std::string;
const std::string COMMAND_KEY{"command"};
using Command = std::string;
using CommandCollection = std::vector<Command>;
const std::string LIST_PLUGINS_KEY{"list-plugins"};
using ListPluginsOption_t = bool;
const std::string APPEND_SEARCH_PATH_KEY{"additional-search-path"};
using AppendSearchPathOption_t = std::vector<std::string>;
const std::string LOG_LEVEL_KEY("debug");
using LogLevelOption_t = std::string;
const std::string_view AUTO_COMPLETE_KEY{"auto-complete"};
using AutoCompleteOption_t = std::string;
} // namespace execHelper::config
#endif /* COMMAND_LINE_OPTIONS_INCLUDE */
<file_sep>#ifndef NON_EMPTY_STRING_INCLUDE
#define NON_EMPTY_STRING_INCLUDE
#include <string>
#include "unittest/rapidcheck.h"
namespace execHelper {
namespace test {
/**
* \brief Generates string that contain only graphically representable characters for your locale
*/
class NonEmptyString {
public:
NonEmptyString(const std::string& string) : m_string(string) { ; }
const std::string& operator*() const noexcept { return m_string; }
bool operator==(const NonEmptyString& other) const noexcept {
return m_string == other.m_string;
}
bool operator!=(const NonEmptyString& other) const noexcept {
return !(*this == other);
}
bool operator<(const NonEmptyString& other) const noexcept {
return m_string < other.m_string;
}
private:
std::string m_string;
};
inline std::ostream& operator<<(std::ostream& os,
const NonEmptyString& obj) noexcept {
os << *obj;
return os;
}
} // namespace test
} // namespace execHelper
namespace rc {
template <> struct Arbitrary<execHelper::test::NonEmptyString> {
static Gen<execHelper::test::NonEmptyString> arbitrary() {
return gen::construct<execHelper::test::NonEmptyString>(
gen::nonEmpty(gen::string<std::string>()));
};
};
} // namespace rc
#endif /* NON_EMPTY_STRING_INCLUDE */
<file_sep>#ifndef __EXECUTE_PLUGIN_H__
#define __EXECUTE_PLUGIN_H__
#include <memory>
#include <optional>
#include <stdexcept>
#include <string>
#include <vector>
#include <gsl/gsl>
#include "config/commandLineOptions.h"
#include "config/fleetingOptionsInterface.h"
#include "config/pattern.h"
#include "config/patternsHandler.h"
#include "config/settingsNode.h"
#include "plugin.h"
namespace execHelper::plugins {
using Plugins = std::map<std::string, std::shared_ptr<const Plugin>>;
/**
* \brief Exception thrown when the requested plugin is invalid
*
* Exception thrown when the requested plugin is invalid e.g. due to the fact that it can not be found
*/
struct InvalidPlugin : public std::runtime_error {
public:
/**
* Create an invalid plugin
*
* \param[in] msg A message detailing the specifics of the exception
*/
inline explicit InvalidPlugin(const std::string& msg)
: std::runtime_error(msg) {}
/*! @copydoc InvalidPlugin(const std::string&)
*/
inline explicit InvalidPlugin(const char* msg) : std::runtime_error(msg) {}
};
/**
* \brief Plugin for executing arbitrary configured commands and/or plugins
*
* The ExecutePlugin handles the context of and calls all plugins. It uses the prototype pattern for retrieving a map of plugins it can call.
*/
class ExecutePlugin : public Plugin {
public:
/**
* Create an executePlugin instance
*
* \param[in] commandsToExecute The commands to execute with this plugin
* instance
*/
explicit ExecutePlugin(
const config::CommandCollection& commandsToExecute) noexcept;
/**
* \param[in] commandsToExecute The commands to execute
* \param[in] initialCommand The initial command that is being executed
*/
ExecutePlugin(const config::CommandCollection& commandsToExecute,
const config::Command& initialCommand) noexcept;
config::VariablesMap
getVariablesMap(const config::FleetingOptionsInterface& fleetingOptions)
const noexcept override;
bool apply(core::Task task, const config::VariablesMap& variables,
const config::Patterns& patterns) const noexcept override;
std::string summary() const noexcept override;
/**
* Returns a list with the names of all known plugins
*
* @returns A list of plugin names
*/
static auto getPluginNames() noexcept -> std::vector<std::string>;
/**
* Returns an instance of the plugin associated with the given name
*
* \param[in] pluginName The plugin to get the associated instance from
* \returns A pointer to the new instance
* \throws InvalidPlugin When no plugin associated with the given pluginName is found
*/
static std::shared_ptr<const Plugin>
getPlugin(const std::string& pluginName);
/**
* Push the given fleeting options on the stack
*
* \param[in] fleetingOptions The fleeting options to use. The last option
* on the stack will be used for calling the commands. \returns True if
* the options were successfully pushed False otherwise
*/
static bool push(gsl::not_null<const config::FleetingOptionsInterface*>
fleetingOptions) noexcept;
/**
* Push the given settings node on the stack
*
* \param[in] settings The settings node to use. The last settings node on
* the stack will be used for calling the commands. \returns True if the
* settings node was successfully pushed False otherwise
*/
static bool push(config::SettingsNode&& settings) noexcept;
/**
* Push the given patterns on the stack
*
* \param[in] patterns The patterns to use. The last patterns on the stack
* will be used for calling the commands.
* \returns True if the patterns were successfully pushed
* False otherwise
*/
static bool push(config::Patterns&& patterns) noexcept;
/**
* Push the plugin prototypes to the stack
*
* \param[in] plugins Mapping of discovered plugin prototypes
*/
static void push(Plugins&& plugins) noexcept;
/**
* Pop the last fleeting options from the stack
*
*/
static void popFleetingOptions() noexcept;
/**
* Pop the last settings node from the stack
*/
static void popSettingsNode() noexcept;
/**
* Pop the last patterns from the stack
*/
static void popPatterns() noexcept;
/**
* Pop the last plugin prototypes from the stack
*/
static void popPlugins() noexcept;
private:
static auto getNextStep(const config::Command& command,
const config::Command& originalCommand) noexcept
-> std::shared_ptr<const Plugin>;
static bool
getVariablesMap(config::VariablesMap* variables,
const std::vector<config::SettingsKeys>& keys,
const config::SettingsNode& rootSettings) noexcept;
static void index(config::VariablesMap* variables,
const config::SettingsNode& settings,
const config::SettingsKeys& key) noexcept;
const config::CommandCollection m_commands;
const config::CommandCollection m_initialCommands;
static std::vector<gsl::not_null<const config::FleetingOptionsInterface*>>
m_fleeting;
static std::vector<config::SettingsNode> m_settings;
static std::vector<config::PatternsHandler> m_patterns;
static std::vector<Plugins> m_plugins;
};
} // namespace execHelper::plugins
#endif /* __EXECUTE_PLUGIN_H__ */
<file_sep>#include "generateRandom.h"
#include <algorithm>
#include <cassert>
#include <limits>
using std::string;
using std::vector;
namespace {
auto NB_OF_VALID_RANDOM_CHARS = 62U;
inline char convertToAlphaNumeric(uint8_t index) {
if(index < 10U) {
return index + '0';
}
index -= 10U;
if(index < 26U) {
return index + 'a';
}
index -= 26U;
return index + 'A';
}
} // namespace
namespace execHelper {
namespace test {
namespace baseUtils {
char generateRandomChar() noexcept {
return convertToAlphaNumeric(
generateRandomInt<uint8_t>(0, NB_OF_VALID_RANDOM_CHARS - 1U));
}
vector<char> generateRandomChar(size_t length) noexcept {
vector<char> result;
for(size_t i = 0U; i < length; ++i) {
result.emplace_back(generateRandomChar());
}
return result;
}
string generateRandomString(size_t length) noexcept {
auto randomChars = generateRandomChar(length);
return string(randomChars.begin(), randomChars.end());
}
} // namespace baseUtils
} // namespace test
} // namespace execHelper
<file_sep>set(MODULE_NAME test-base-utils)
set(LIBRARY_NAME ${PROJECT_NAME}-${MODULE_NAME})
set(SRCS
src/executionContent.cpp
src/tmpFile.cpp
src/configFileWriter.cpp
src/generateRandom.cpp
src/executionHandler.cpp
src/execution.cpp
src/configBuilder.cpp
src/testCommand.cpp
src/indirectStatement.cpp
)
set(DEPENDENCIES
filesystem
gsl
boost-system
boost-filesystem
boost-serialization
yaml-3rd
unittest
pthread
)
add_library(${LIBRARY_NAME} ${SRCS})
target_include_directories(${LIBRARY_NAME} PRIVATE include/base-utils)
target_include_directories(${LIBRARY_NAME} PUBLIC ../stubs include ${DEPENDENCIES})
target_link_libraries(${LIBRARY_NAME} PUBLIC ${DEPENDENCIES})
add_library(${MODULE_NAME} ALIAS ${LIBRARY_NAME})
install(TARGETS ${LIBRARY_NAME} DESTINATION ${LIB_DIRECTORY})
add_subdirectory(application)
<file_sep>#ifndef TMP_FILE_INCLUDE
#define TMP_FILE_INCLUDE
#include "path.h"
namespace execHelper {
namespace test {
namespace baseUtils {
class TmpFile {
public:
explicit TmpFile(const std::string& model = "exec-helper.tmp.%%%%");
TmpFile(const TmpFile& other) = delete;
TmpFile(TmpFile&& other) =
default; // NOLINT(misc-noexcept-move-constructor)
~TmpFile() noexcept;
TmpFile& operator=(const TmpFile& other) = delete;
TmpFile& operator=(TmpFile&& other) =
default; // NOLINT(misc-noexcept-move-constructor)
bool exists() const noexcept;
bool create(const std::string& content = "") const noexcept;
bool createDirectories() const noexcept;
void clear() noexcept;
Path getPath() const noexcept;
std::string toString() const noexcept;
std::string getFilename() const noexcept;
std::string getParentDirectory() const noexcept;
private:
Path m_path;
};
} // namespace baseUtils
} // namespace test
} // namespace execHelper
#endif /* TMP_FILE_INCLUDE */
<file_sep>.. _exec-helper-plugins-clang-static-analyzer:
Clang-static-analyzer plugin
********************************
Description
===========
The clang-static-analyzer plugin is used for executing the clang-static-analyzer static code analysis tool.
Mandatory settings
==================
The configuration of the clang-static-analyzer plugin must contain the follwing settings:
.. program:: exec-helper-plugins-clang-static-analyzer
.. describe:: build-command
The :program:`exec-helper` build target command or plugin to execute for the analysis.
Optional settings
=================
The configuration of the clang-static-analyzer plugin may contain the following settings:
.. program:: exec-helper-plugins-clang-static-analyzer
.. include:: patterns.rst
.. include:: command-line.rst
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/clang-static-analyzer.example
:language: yaml
Additional files
----------------
In order for the above example to work, the following file hierarchy needs to be created in the directory:
*Makefile*:
.. literalinclude:: ../examples/Makefile
:language: make
*src/hello.cpp:*
.. literalinclude:: ../examples/src/hello.cpp
:language: cpp
*src/world.cpp:*
.. literalinclude:: ../examples/src/world.cpp
:language: cpp
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
--------
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep>import pytest
from pytest_bdd import scenarios, given, when, then
from scenarios_run import *
scenarios('../feature/config', example_converters=dict(command = str, environment = Environment, pattern = PatternType))
@given('the <environment> is configured for <command> command in the configuration')
def add_environment_to_command(config, command, environment):
config.commands[command].set_environment(environment)
@given('the <pattern> is configured for <command> command in the configuration')
def add_pattern_to_command(config, command, pattern):
config.commands[command].add_pattern(pattern)
@then('the runtime environment for <command> should contain the given <environment>')
def check_environment(run_environment, command, environment):
runs = run_environment.config.commands[command].runs
actual_env = runs[-1].environment
assert(len(runs) > 0)
for key,value in environment.items():
for permutation in run_environment.config.commands[command].pattern_generator():
replaced_key = key
replaced_value = value
for pattern_key, pattern_value in permutation.items():
replaced_key = replaced_key.replace('{' + pattern_key + '}', pattern_value)
replaced_value = replaced_value.replace('{' + pattern_key + '}', pattern_value)
assert(replaced_key in actual_env)
assert(replaced_value == actual_env[replaced_key])
<file_sep>.. _exec-helper-plugins-docker:
Docker plugin
*************
Description
===========
The Docker plugin is used for running or attaching to a Docker container.
Mandatory settings
==================
Mandatory settings change depending on which mode is selected. See *mode* for more information.
Optional settings
=================
The configuration of the make plugin may contain the following settings:
Settings for all modes
----------------------
.. program:: exec-helper-plugins-docker
.. include:: patterns.rst
.. include:: environment.rst
.. include:: command-line.rst
.. include:: working-dir.rst
.. describe:: mode
Set the mode of the Docker call for the specific command. Default: :code:`exec`.
Supported modes are:
- **run**: Create a new container based on the given *image* and runs the given command. Note: use :code:`--rm` as an additional command line argument to automatically clean up the created container.
- **exec**: Run the command in the given, actively running, *container*.
.. describe:: env
A map of environment key/value pairs set *inside* the container. Default: an empty map.
.. describe:: interactive
Boolean indicating whether to run interactively inside the container. Check the Docker documentation for more information. Default: same as the used Docker default.
.. describe:: tty
Boolean indicating whether to use a pseudo-tty inside the container. Check the Docker documentation for more information. Default: same as the used Docker default.
.. describe:: privileged
Boolean indicating whether to run the container in privileged mode. Check the Docker documentation for more information. Default: :code:`no`.
.. describe:: user
Set the given user *inside* the container. Check the Docker documentation for more information. Default: the container default.
Settings for the *run* mode
---------------------------
.. program:: exec-helper-plugins-docker
.. describe:: volumes
List of volumes to be mounted into the container. Eeach value maps directly to a Docker volume configuration. Check the Docker documentation for all the options and formats that can be used. Default: an empty list.
.. describe:: image
The Docker *image* to use as the base image for creating a new container. This configuration option is *mandatory* when the plugin is in *run* mode.
Settings for the *exec* mode
----------------------------
.. program:: exec-helper-plugins-docker
.. describe:: container
The Docker *container* to execute the command in. Note that the container *must* already be running when this command is called. This configuration option is *mandatory* when the plugin is in *exec* mode.
Example
=======
Configuration
-------------
.. literalinclude:: ../examples/docker.example
:language: yaml
Usage
-----
Save the example to an :program:`exec-helper` configuration file and execute in the same directory:
.. code-block:: bash
eh example
See also
========
See :ref:`exec-helper` (1) for information about the usage of :program:`exec-helper`.
See :ref:`exec-helper-config` (5) for information about the configuration file format.
See :ref:`exec-helper-plugins` (5) for information about the configuration file format.
<file_sep># Use this script to add completions to your shell
# Usage: Add
# source "<full path to this script>/init-completion.sh"
# to your profile or bashrc
#
# Note: package maintainers can install the completion files directly to the right folders in the OS (if supported by the OS) to enable auto discovery of the completion files.
if [ -n "$BASH_VERSION" ]; then
root="$(dirname "${BASH_SOURCE[0]}")"
source "$root/completion.bash"
elif [ -n "$ZSH_VERSION" ]; then
root="$(dirname "$0")"
source "$root/completion.zsh"
fi
<file_sep>#ifndef __COMMANDER_H__
#define __COMMANDER_H__
#include <map>
#include <memory>
#include <string>
#include "config/environment.h"
#include "config/path.h"
#include "config/pattern.h"
#include "core/task.h"
namespace execHelper {
namespace config {
class FleetingOptionsInterface;
class SettingsNode;
} // namespace config
namespace core {
class Options;
}
namespace plugins {
class Plugin;
using Plugins = std::map<std::string, std::shared_ptr<const Plugin>>;
} // namespace plugins
} // namespace execHelper
namespace execHelper {
namespace commander {
/**
* \brief Reads the commands to be run and kicks all the plugins related to
* these commands
*/
class Commander {
public:
/**
* Creates a commander
*
* \param[in] fleetingOptions The fleeting options
* \param[in] settings The settings node context to use
* \param[in] patterns The patterns context to use
* \param[in] workingDirectory The working directory for the commander
* \param[in] env The environment to apply the plugins in
* \param[in] plugins A map of plugin prototypes where each key is associated with a certain plugin
* \returns True If the command was run successfully
* False Otherwise
*/
auto run(const config::FleetingOptionsInterface& fleetingOptions,
config::SettingsNode settings, config::Patterns patterns,
const config::Path& workingDirectory,
const config::EnvironmentCollection& env,
plugins::Plugins&& plugins) noexcept -> bool;
};
} // namespace commander
} // namespace execHelper
#endif /* __COMMANDER_H__ */
<file_sep>#include <locale>
#include <string>
#include <utility>
#include <vector>
#include <boost/algorithm/string/replace.hpp>
#include "unittest/catch.h"
#include "config/environment.h"
#include "config/variablesMap.h"
#include "core/task.h"
#include "plugins/pluginUtils.h"
#include "base-utils/nonEmptyString.h"
#include "log/generators.h"
#include "unittest/rapidcheck.h"
#include "utils/utils.h"
using std::make_pair;
using std::string;
using execHelper::config::ENVIRONMENT_KEY;
using execHelper::config::EnvironmentCollection;
using execHelper::config::EnvironmentValue;
using execHelper::config::PatternCombinations;
using execHelper::config::VariablesMap;
using execHelper::core::TaskCollection;
using execHelper::test::NonEmptyString;
using execHelper::test::propertyTest;
using NonEmptyPatternCombinations = std::map<NonEmptyString, std::string>;
namespace {
inline auto toReplacementPattern(const std::string& pattern) noexcept
-> std::string {
return std::string("{").append(pattern).append("}");
}
} // namespace
namespace execHelper::plugins::test {
SCENARIO("Test the patterns key", "[plugin-utils]") {
GIVEN("The correct patterns key") {
string correctPatternsKey("patterns");
WHEN("We get the patterns key") {
const string& actualPatternsKey = getPatternsKey();
THEN("They should match") {
REQUIRE(actualPatternsKey == correctPatternsKey);
}
}
}
}
SCENARIO("Test getting the environment", "[plugin-utils]") {
GIVEN("Some configured environment variables") {
const EnvironmentValue ENVIRONMENT_VALUE1 =
make_pair("ENVIRONMENT_KEY1", "ENVIRONMENT_VALUE1");
const EnvironmentValue ENVIRONMENT_VALUE2 =
make_pair("ENVIRONMENT_KEY2", "ENVIRONMENT_VALUE2");
const EnvironmentCollection ENVIRONMENT_VALUES(
{ENVIRONMENT_VALUE1, ENVIRONMENT_VALUE2});
VariablesMap variables("test");
for(const auto& env : ENVIRONMENT_VALUES) {
REQUIRE(variables.add({ENVIRONMENT_KEY, env.first}, env.second));
}
WHEN("We get the environment") {
const EnvironmentCollection result = getEnvironment(variables);
THEN("We should find the right environment") {
REQUIRE(ENVIRONMENT_VALUES == result);
}
}
}
}
SCENARIO("Test the working directory key", "[plugin-utils]") {
GIVEN("The correct working directory key") {
string correctWorkingDirKey("working-dir");
WHEN("We get the working directory key") {
const string& actualWorkingDirKey = getWorkingDirKey();
THEN("They should match") {
REQUIRE(actualWorkingDirKey == correctWorkingDirKey);
}
}
}
}
SCENARIO("Test replacing patterns in the environment", "[plugin-utils]") {
propertyTest(
"Replace a pattern in the environment",
[](const NonEmptyPatternCombinations& nonEmptyPatternCombinations,
const EnvironmentCollection& startEnvironment) {
// Replace '{' and '}' with '%'
PatternCombinations patternCombinations;
std::transform(
nonEmptyPatternCombinations.begin(),
nonEmptyPatternCombinations.end(),
std::inserter(patternCombinations, patternCombinations.end()),
[](const auto& combination) {
auto key = boost::algorithm::replace_all_copy(
*(combination.first), "{", "%");
boost::algorithm::replace_all(key, "}", "%");
auto value = boost::algorithm::replace_all_copy(
combination.second, "{", "%");
boost::algorithm::replace_all(value, "}", "%");
return make_pair(key, value);
});
// Replace '{' and '}' with '%'
EnvironmentCollection replacedEnvironment;
std::transform(
startEnvironment.begin(), startEnvironment.end(),
std::inserter(replacedEnvironment, replacedEnvironment.end()),
[](const auto& env) {
auto key =
boost::algorithm::replace_all_copy(env.first, "{", "%");
boost::algorithm::replace_all(key, "}", "%");
auto value = boost::algorithm::replace_all_copy(env.second,
"{", "%");
boost::algorithm::replace_all(value, "}", "%");
return make_pair(key, value);
});
EnvironmentCollection inputEnvironment(replacedEnvironment);
EnvironmentCollection expected(replacedEnvironment);
for(const auto& combination : patternCombinations) {
auto replacementPattern =
toReplacementPattern(combination.first);
inputEnvironment.emplace(
make_pair("key-" + replacementPattern,
"value-" + replacementPattern));
expected.emplace(make_pair("key-" + combination.second,
"value-" + combination.second));
}
THEN_WHEN("We replace the patterns") {
auto actual = replacePatternsInEnvironment(inputEnvironment,
patternCombinations);
THEN_CHECK("The actual task equals the expected task") {
if(actual != expected) {
std::for_each(patternCombinations.begin(),
patternCombinations.end(),
[](const auto& combination) {
std::cout << combination.first
<< std::endl;
});
}
REQUIRE(actual == expected);
}
}
});
}
} // namespace execHelper::plugins::test
<file_sep>#include "patterns.h"
#include <string>
#include <boost/algorithm/string/replace.hpp>
#include "logger.h"
using std::string;
namespace execHelper::core {
auto replacePatterns(const string& subject, const string& pattern,
const string& replacement) noexcept -> string {
const auto needle = std::string("{").append(pattern).append("}");
return boost::algorithm::replace_all_copy(subject, needle, replacement);
}
} // namespace execHelper::core
<file_sep>#ifndef FLEETING_OPTIONS_INTERFACE_INCLUDE
#define FLEETING_OPTIONS_INTERFACE_INCLUDE
#include <exception>
#include <optional>
#include <string>
#include "log/logLevel.h"
#include "commandLineOptions.h"
#include "path.h"
namespace execHelper::config {
/**
* \brief Class for collecting the values of variables that are expected to
* change often
*/
class FleetingOptionsInterface {
public:
virtual ~FleetingOptionsInterface() = default;
/**
* Returns the status of the help setting
*
* \returns True If the help setting was set
* False Otherwise
*/
virtual HelpOption_t getHelp() const noexcept = 0;
/**
* Returns whether the version option has been set
*
* \returns True If the version option was set
* False Otherwise
*/
virtual VersionOption_t getVersion() const noexcept = 0;
/**
* Returns the status of the verbose setting
*
* \returns True If verbose mode is set
* False Otherwise
*/
virtual VerboseOption_t getVerbosity() const noexcept = 0;
/**
* Get the value of the log level option
*
* \returns The log level associated with the log level option
*/
virtual auto getLogLevel() const noexcept -> log::LogLevel = 0;
/**
* Returns whether dry run mode is set
*
* \returns True If dry run mode is set
* False Otherwise
*/
virtual DryRunOption_t getDryRun() const noexcept = 0;
/**
* Returns whether keep going mode is set
*
* \returns True If keep going mode is set
* False Otherwise
*/
virtual auto getKeepGoing() const noexcept -> KeepGoingOption_t = 0;
/**
* Returns the maximum number of jobs to use for a task
*
* \returns The number of jobs to use for a task
*/
virtual Jobs_t getJobs() const noexcept = 0;
/**
* Return whether the plugins must be listed
*
* \returns True if the plugins must be listed
* False otherwise
*/
virtual ListPluginsOption_t listPlugins() const noexcept = 0;
/**
* Return the additional search paths
*
* \returns A list of additional search paths
*/
[[nodiscard]] virtual const Paths& appendedSearchPaths() const noexcept = 0;
/**
* Returns the commands that were set on the command line
*
* \returns A collection of commands given on the command line
*/
virtual const CommandCollection& getCommands() const noexcept = 0;
/**
*
* Returns the autocomplete option value
*
* \returns string The given value associated with the autocomplete option
* none If the option is not set
*/
virtual auto getAutoComplete() const noexcept
-> const std::optional<AutoCompleteOption_t>& = 0;
protected:
FleetingOptionsInterface() = default;
/*! @copydoc config::Argv::Argv(const config::Argv&)
*/
FleetingOptionsInterface(const FleetingOptionsInterface& other) = default;
/*! @copydoc config::Argv::Argv(config::Argv&&)
*/
FleetingOptionsInterface(FleetingOptionsInterface&& other) = default;
/*! @copydoc config::Argv::operator=(const config::Argv&)
*/
FleetingOptionsInterface&
operator=(const FleetingOptionsInterface& other) = default;
/*! @copydoc config::Argv::operator=(config::Argv&&)
*/
FleetingOptionsInterface&
operator=(FleetingOptionsInterface&& other) = default;
};
} // namespace execHelper::config
#endif /* FLEETING_OPTIONS_INTERFACE_INCLUDE */
<file_sep>#include "pluginUtils.h"
#include <map>
#include <utility>
#include <vector>
#include "config/environment.h"
#include "config/path.h"
#include "config/pattern.h"
#include "config/settingsNode.h"
#include "config/variablesMap.h"
#include "core/patterns.h"
#include "commandLineCommand.h"
#include "logger.h"
using std::map;
using std::ostream;
using std::string;
using std::vector;
using execHelper::config::Command;
using execHelper::config::ENVIRONMENT_KEY;
using execHelper::config::EnvironmentCollection;
using execHelper::config::Path;
using execHelper::config::PatternCombinations;
using execHelper::config::PatternKey;
using execHelper::config::PatternKeys;
using execHelper::config::Patterns;
using execHelper::config::PatternValue;
using execHelper::config::PatternValues;
using execHelper::config::SettingsKeys;
using execHelper::config::VariablesMap;
using execHelper::core::replacePatterns;
using execHelper::core::Task;
using execHelper::core::TaskCollection;
using execHelper::plugins::PatternPermutator;
namespace execHelper::plugins {
auto getPatternsKey() noexcept -> const PatternKey& {
static const PatternKey key("patterns");
return key;
}
auto makePatternPermutator(const Patterns& patterns) noexcept
-> PatternPermutator {
std::map<PatternKey, PatternValues> patternValuesMap;
if(patterns.empty()) {
// If no patterns were given, iterate once
patternValuesMap.emplace("NO-KEY", PatternValues({"NO-VALUE"}));
}
for(const auto& pattern : patterns) {
patternValuesMap.emplace(pattern.getKey(), pattern.getValues());
}
return plugins::PatternPermutator(patternValuesMap);
}
auto replacePatternsInEnvironment(
const EnvironmentCollection& env,
const PatternCombinations& patternCombinations) noexcept
-> EnvironmentCollection {
EnvironmentCollection replaced;
for(const auto& keyValue : env) {
auto key = keyValue.first;
auto value = keyValue.second;
for(const auto& pattern : patternCombinations) {
key = replacePatterns(key, pattern.first, pattern.second);
value = replacePatterns(value, pattern.first, pattern.second);
}
replaced.emplace(std::make_pair(key, value));
}
return replaced;
}
auto replacePatternCombinations(
const Task& task, const PatternCombinations& patternCombinations) noexcept
-> Task {
Task replacedTask;
replacedTask.setEnvironment(replacePatternsInEnvironment(
task.getEnvironment(), patternCombinations));
auto newWorkingDir = replacePatternCombinations(
task.getWorkingDirectory().string(), patternCombinations);
replacedTask.setWorkingDirectory(move(newWorkingDir));
for(auto argument : task.getTask()) {
argument = replacePatternCombinations(argument, patternCombinations);
replacedTask.append(move(argument));
}
return replacedTask;
}
auto replacePatternCombinations(
std::string element,
const config::PatternCombinations& patternCombinations) noexcept
-> std::string {
for(const auto& pattern : patternCombinations) {
element = replacePatterns(element, pattern.first, pattern.second);
}
return element;
}
auto getEnvironment(const VariablesMap& variables) noexcept
-> EnvironmentCollection {
EnvironmentCollection result;
SettingsKeys key({ENVIRONMENT_KEY});
auto environmentOpt = variables.get<vector<string>>(key);
for(auto variableName : environmentOpt.value_or(vector<string>())) {
auto variableValueOpt =
variables.get<string>({ENVIRONMENT_KEY, variableName});
if(!variableValueOpt) {
LOG(warning) << "Environment variable '" << variableName
<< "' does not have an associated value. Ignoring it.";
continue;
}
result.emplace(variableName, move(variableValueOpt.value()));
}
return result;
}
auto getWorkingDirKey() noexcept -> const string& {
static const string workingDirKey("working-dir");
return workingDirKey;
}
auto toString(const PatternKeys& values) noexcept -> string {
string result;
if(values.empty()) {
return result;
}
result.append(values.front());
for(auto it = values.begin() + 1; it != values.end(); ++it) {
result.append(", ").append(*it);
}
return result;
}
} // namespace execHelper::plugins
<file_sep>set(EXE_NAME ${PROJECT_NAME}-plugins-unittest)
set(SRCS
src/genericPluginTest.cpp
src/clangStaticAnalyzerTest.cpp
src/clangTidyTest.cpp
src/pmdTest.cpp
src/commandLineCommandTest.cpp
src/commandPluginTest.cpp
src/cppcheckTest.cpp
src/pluginUtilsTest.cpp
src/bootstrapTest.cpp
src/executePluginTest.cpp
src/valgrindTest.cpp
src/selectorTest.cpp
src/sconsTest.cpp
src/makeTest.cpp
src/lcovTest.cpp
src/ninjaTest.cpp
src/cmakeTest.cpp
src/dockerTest.cpp
)
set(DEPENDENCIES
plugins
test-utils
unittest
log-generators
config-generators
core-generators
rpcheck
)
add_executable(${EXE_NAME} ${SRCS})
target_include_directories(${EXE_NAME} PRIVATE ${DEPENDENCIES} include/plugins)
target_link_libraries(${EXE_NAME} PRIVATE ${DEPENDENCIES})
add_test(${EXE_NAME} ${EXE_NAME})
install(TARGETS ${EXE_NAME} DESTINATION ${UNITTEST_BIN_DIR})
<file_sep>#ifndef __CONFIG_INPUT_FILE_H__
#define __CONFIG_INPUT_FILE_H__
#include <string>
#include "config/settingsNode.h"
namespace execHelper {
namespace config {
/**
* \brief Interface for configuration files
*/
class ConfigInputFile {
public:
/*! @copydoc config::Argv::Argv(const Argv&)
* \note Deleted
*/
ConfigInputFile(const ConfigInputFile& other) = delete;
/*! @copydoc config::Argv::Argv(Argv&&)
* \note Deleted
*/
ConfigInputFile(ConfigInputFile&& other) noexcept = delete;
virtual ~ConfigInputFile() = default;
/*! @copydoc config::Argv::operator=(const Argv&)
* \note Deleted
*/
ConfigInputFile& operator=(const ConfigInputFile& other) = delete;
/*! @copydoc config::Argv::operator=(Argv&&)
* \note Deleted
*/
ConfigInputFile& operator=(ConfigInputFile&& other) noexcept = delete;
/**
* Returns the settings under the node defined by keys
*
* \param[in] keys The path to the root node to get the tree from
* \param[in] settings The settings node to fill
* \return True If the settings node was successfully filled
* False Otherwise
*/
virtual bool getTree(const std::initializer_list<std::string>& keys,
SettingsNode* settings) const noexcept = 0;
protected:
ConfigInputFile() = default;
};
} // namespace config
} // namespace execHelper
#endif /* __CONFIG_INPUT_FILE_H__ */
| 4ebb8e816f12d440eb068b4f8ede97658b83d84a | [
"CMake",
"reStructuredText",
"Lua",
"Markdown",
"Makefile",
"INI",
"Python",
"Text",
"C++",
"C",
"Dockerfile",
"Shell"
] | 279 | C | bverhagen/exec-helper | 8869989a59b352f340406ae8859958bf343be776 | 00a772a982b2a7592b00b581fb9fd49cadc5dbfe |
refs/heads/master | <repo_name>rscircus/GeomLabCode<file_sep>/src/geomlab/covidloader.py
import pandas as pd
import numpy as np
import re
from datetime import datetime
# Date Pattern
date_pattern = re.compile(r"\d{1,2}/\d{1,2}/\d{2}")
def reformat_dates(col_name: str) -> str:
"""Date colums are rewritten to day/month/year format."""
try:
return date_pattern.sub(
datetime.strptime(col_name, "%m/%d/%y").strftime("%d/%m/%Y"),
col_name,
count=1,
)
except ValueError:
return col_name
print()
print("Downloading most recent COVID-19 data...")
print()
# Get most recent confirmed cases, recovered and deaths
confirmed_url = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_global.csv"
recovered_url = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_recovered_global.csv"
deaths_url = "https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_global.csv"
renamed_columns_map = {
"Country/Region": "country",
"Province/State": "location",
"Lat": "latitude",
"Long": "longitude",
}
cols_to_drop = ["location", "latitude", "longitude"]
print("1/3: All confirmed cases...")
confirmed_cases_df = (
pd.read_csv(confirmed_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.drop(columns=cols_to_drop)
)
print("2/3: All death cases...")
deaths_df = (
pd.read_csv(deaths_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.drop(columns=cols_to_drop)
)
print("3/3: Recovered cases...")
recovered_df = (
pd.read_csv(recovered_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.drop(columns=cols_to_drop)
)
print()
print("✨ COVID-19 downloads from John Hopkins University successful. ✨")
print()
geo_data_df = confirmed_cases_df[["country"]].drop_duplicates()
country_codes_df = pd.read_csv(
"data/country_code_mapping.csv",
usecols=["country", "alpha-3_code"],
index_col="country",
)
geo_data_df = geo_data_df.join(country_codes_df, how="left", on="country").set_index(
"country"
)
dates_list = deaths_df.filter(regex=r"(\d{2}/\d{2}/\d{4})", axis=1).columns.to_list()
# create a mapping of date -> DataFrame, where each df holds the daily counts of cases and deaths per country
for date in dates_list:
confirmed_cases_day_df = confirmed_cases_df.filter(like=date, axis=1).rename(
columns=lambda col: "confirmed_cases"
)
deaths_day_df = deaths_df.filter(like=date, axis=1).rename(
columns=lambda col: "deaths"
)
cases_df = confirmed_cases_day_df.join(deaths_day_df).set_index(
confirmed_cases_df["country"]
)
date_df = (
geo_data_df.join(cases_df)
.groupby("country")
.agg({"confirmed_cases": "sum", "deaths": "sum", "alpha-3_code": "first"})
)
date_df = date_df[date_df["confirmed_cases"] > 0].reset_index()
renamed_columns_map = {
"Country/Region": "country",
"Province/State": "location",
"Lat": "latitude",
"Long": "longitude",
}
confirmed_cases_df = (
pd.read_csv(confirmed_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.fillna(method="bfill", axis=1)
)
deaths_df = (
pd.read_csv(deaths_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.fillna(method="bfill", axis=1)
)
recovered_df = (
pd.read_csv(recovered_url)
.rename(columns=renamed_columns_map)
.rename(columns=reformat_dates)
.fillna(method="bfill", axis=1)
)
geo_data_cols = ["country", "location", "latitude", "longitude"]
geo_data_df = confirmed_cases_df[geo_data_cols]
# Rewrite date to European style
dates_list = confirmed_cases_df.filter(
regex=r"(\d{2}/\d{2}/\d{4})", axis=1
).columns.to_list()
# We'll use this data dict to connect to the symbolic page display
cases_by_date = {}
# Fill cases_by_date with current data
for date in dates_list:
confirmed_cases_day_df = confirmed_cases_df[["country", "location", date]].copy()
confirmed_cases_day_df.rename(
columns={"country": "country", date: "confirmed_cases"}, inplace=True
)
confirmed_cases_day_df["confirmed_cases"] = pd.to_numeric(
confirmed_cases_day_df["confirmed_cases"]
)
recovered_day_df = recovered_df[["country", "location", date]].copy()
recovered_day_df.rename(columns={date: "recovered"}, inplace=True)
recovered_day_df["recovered"] = pd.to_numeric(recovered_day_df["recovered"])
deaths_day_df = deaths_df[["country", "location", date]].copy()
deaths_day_df.rename(columns={"country": "country", date: "deaths"}, inplace=True)
deaths_day_df["deaths"] = pd.to_numeric(deaths_day_df["deaths"])
cases_df = geo_data_df.merge(
confirmed_cases_day_df, how="left", on=["country", "location"]
)
cases_df = cases_df.merge(recovered_day_df, how="left", on=["country", "location"])
cases_df = cases_df.merge(deaths_day_df, how="left", on=["country", "location"])
# TODO: This is quite dangerous and might mask other errors
cases_df.replace(np.nan, 0)
cases_by_date[date] = cases_df
<file_sep>/noxfile.py
# noxfile.py
import nox
locations = "src", "noxfile.py"
@nox.session(python=["3.7"])
def lint(session):
args = session.posargs or locations
session.install("flake8")
session.run("flake8", *args)
@nox.session(python="3.7")
def black(session):
args = session.posargs or locations
session.install("black")
session.run("black", *args)
<file_sep>/src/geomlab/symbolicstacking.py
import numpy as np
import math
import random
import copy
color1PIL = "rgb(254, 201, 201)" # blau
color2PIL = "rgb(249, 102, 94)" # rot
color3PIL = "rgb(100, 100, 100)" # grau
color4PIL = "rgb(200, 239, 245)" # grün
color1 = [254, 201, 201] # blau
color2 = [249, 102, 94] # rot
color3 = [100, 100, 100] # grau
color4 = [200, 239, 245] # grün
def calculatePointOnCircle(c, angle):
cosangle = np.cos(angle)
sinangle = np.sin(angle)
return cosangle * c[2] + c[0], sinangle * c[2] + c[1]
#############################################################################
###############caculating of the occluded circumference of a Circle##########
###################given the circles which lie above it######################
#############################################################################
# calculates Intersection of to circles (if they exist!!) p,q=(x,y,r)
def calcIntersectionPoints(p, q):
int1 = np.array([0.0, 0.0])
int2 = np.array([0.0, 0.0])
d = np.sqrt((p[0] - q[0]) * (p[0] - q[0]) + (p[1] - q[1]) * (p[1] - q[1]))
ex = (q[0] - p[0]) / d
ey = (q[1] - p[1]) / d
x = (p[2] * p[2] - q[2] * q[2] + d * d) / (2 * d)
y = np.sqrt(p[2] * p[2] - x * x)
int1[0] = p[0] + x * ex - y * ey
int1[1] = p[1] + x * ey + y * ex
int2[0] = p[0] + x * ex + y * ey
int2[1] = p[1] + x * ey - y * ex
return int1, int2
# calculates Intersection of to circles and returns relative state (one inside the other
# or intersecting or far apart
def calculateCircleIntersection(p, q):
d = np.sqrt((p[0] - q[0]) * (p[0] - q[0]) + (p[1] - q[1]) * (p[1] - q[1]))
if d > (p[2] + q[2]):
return None, None, 3
if d + q[2] < p[2]:
return None, None, 2
if d == 0:
return None, None, 4
if d + p[2] < q[2]:
return None, None, 1
x, y = calcIntersectionPoints(p, q)
return x, y, 0
# calculates the relative angle of to points on a circle
def calculateRelativeAngle(p, q):
x = q[0] - p[0]
y = q[1] - p[1]
angle = np.arctan2(y, x)
if angle < 0:
return 2 * np.pi + angle
return angle
# given to points on a circle calculate the intervall which lies between them
def calculateSingleCoverInterval(p, q):
p1, p2, check = calculateCircleIntersection(p, q)
if check == 0:
a1 = calculateRelativeAngle(p, p1)
a2 = calculateRelativeAngle(p, p2)
aMiddle = calculateRelativeAngle(p, q)
if (a1 < aMiddle and aMiddle < a2) or (aMiddle < a2 and a2 < a1):
return a1, a2
else:
return a2, a1, True
if check == 1:
return 0, 2 * np.pi, True
if check == 4:
return 0, 2 * np.pi, True
if check == 2 or check == 3:
return None, None, False
# test if a circle is completly covered by the intervals I
def testCompletlyCovered(I):
a0 = 2 * np.pi + I[0][0]
x = I[0][1]
i = 0
while x < a0:
if I[i][1] < I[i][0]:
I[i][1] = I[i][1] + 2 * np.pi
if I[i][0] < x:
x = np.max([x, I[i][1]])
else:
return False
i = i + 1
if i == len(I):
if x >= a0:
return True
else:
return False
return True
# if the interval is not completely covered there exist a point relative to which
# all intervalls lie in [0,2pi]
def findStartingPoint(I):
c = 0
minimum = float('inf')
minindex = 0
for i in range(0, len(I)):
if I[i][1] == 0:
c = c + 1
else:
c = c - 1
if c == minimum:
minindex = i
else:
if c < minimum:
minimum = c
minindex = i
if minimum == 0:
return 0
if minindex == len(I) - 1:
return 0
else:
return minindex + 1
# calculates the maximal non intersecting intervals covered by the Interval
# given the shift that all intervals lie in [0,2pi]
def calculateMaxIntervalsWithShift(I, shift):
n = len(I)
c = 0
resultArray = []
for i in range(0, n):
ind = (shift + i) % n
if c == 0:
resultArray.append(I[ind][0])
c = c + 1
else:
if I[ind][1] == 0:
c = c + 1
else:
c = c - 1
if c == 0:
resultArray.append(I[ind][0])
return resultArray
# given disjoint intervals on a circle calculates the covered Circumference
def calcCirc(Arr):
result = 0
for i in range(0, int(len(Arr) / 2)):
x = Arr[2 * i + 1] - Arr[2 * i]
if x < 0:
x = x + 2 * np.pi
result = result + x
return result
# given a circle and all circles N which lie above it calculates the covered circumference
def calculateCoveredCircumference(c, N):
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
#generate all covering intervalls (may not be disjoint)
a, b, bo = calculateSingleCoverInterval(c, n)
if bo is True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return 0
if not testCompletlyCovered(CoverIntervals2D):
#calculate the disjoint intervalls if not fully covered
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
return calcCirc(CoverArray)
else:
return 2 * np.pi
#############################################################################
################Different utilitys and costs for circles####################
#############################################################################
def calculateAbsoluteBoundaryUtility(circle, Neighbours):
x = 2 * np.pi - calculateCoveredCircumference(circle, Neighbours)
return x * circle[2]
def calculateRelativeBoundaryUtility(circle, Neighbours):
x = 2 * np.pi - calculateCoveredCircumference(circle, Neighbours)
return x
#############################################################################
################Pie Charts####################
#############################################################################
# given a circle and some angles for the deviding lines of the pies and the
# circles that lie above the circle calculates all disjoint Intervals in which
# the first deviding line can be positioned
def caculateFeasibleIntervall(c, piePiecesC, N):
#calculate the cover intervals of the boundary
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
a, b, bo = calculateSingleCoverInterval(c, n)
if bo is True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return [[0, 2 * np.pi]]
if not testCompletlyCovered(CoverIntervals2D):
#if not completely covered we generate all the of the intervals discussed in the lab notes (Lemma1)
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
coverArray2D = []
for i in range(0, int((len(CoverArray) / 2))):
coverArray2D.append([CoverArray[2 * i], CoverArray[2 * i + 1]])
distances = []
for j in range(0, len(piePiecesC)):
distances.append(piePiecesC[j])
for d in distances:
for intervall in coverArray2D:
a = intervall[0] - d # -0.1
if a < 0:
a = a + 2 * np.pi
b = intervall[1] - d # +0.1
if b < 0:
b = b + 2 * np.pi
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
#calculate the visible intervals from the covered ones
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
result = []
result.append([CoverArray[len(CoverArray) - 1], CoverArray[0]])
for i in range(0, int((len(CoverArray) / 2) - 1)):
result.append([CoverArray[2 * i + 1], CoverArray[2 * i + 2]])
return result
else:
return None
else:
return None
# calculates the angle of the first deviding line given the feasible intervals
# output (angle, length of visibility thingy)
def calculateAngle(intervals):
val = -1
result = 0
if intervals is None:
return None, None
for i in intervals:
a = i[0]
b = i[1]
if a > b:
b = b + 2 * np.pi
length = np.absolute(b - a)
if length > val:
val = length
result = a + length / 2
return result, val
# given a circle and dividinglines and the circles that lie above it
# calculates the postion and value of the deviding line
def caculateOneAnglePie(c, piePiecesC, N):
tmp = caculateFeasibleIntervall(c, piePiecesC, N)
if tmp is None:
return None, None
else:
angle, value = calculateAngle(tmp)
if angle is None:
return None, None
else:
return angle, value
# calculates for an arrangement of Pies the pie which should be the lowest one
# in the stacking returns the index of the circle in the list and the angle
# of the first devidingline
def calculateLowestPie(circles, piePieces):
locPiePieces = copy.deepcopy(piePieces)
hasFound = False
resultIndex = 0
resultMax = -1
resultAngle = 0
while hasFound is False:
#find the max value
for i in range(0, len(circles)):
tmpC = circles[i]
tmpPieces = locPiePieces[i]
tmpN = circles[:i] + circles[i + 1:]
angle, value = caculateOneAnglePie(tmpC, tmpPieces, tmpN)
if not angle == None:
hasFound = True
if value * tmpC[2] > resultMax:
resultAngle = angle
resultMax = value * tmpC[2]
resultIndex = i
if hasFound is True:
return resultIndex, resultAngle
if len(locPiePieces[0]) == 0:
break
if len(locPiePieces) == 0:
break
#heuristic just ignores the last line
for p in locPiePieces:
p.pop(len(p) - 1)
return 0, 0
#############################################################################
##########################Stacking algorithms################################
#############################################################################
# calculates best pie stacking
# input circles: [[x,y,r]...] piePieces [[p1,p2,...]...] 0 is always a deviding line
# every circle has to have at least 1 more deviding line!
# output:
# resultOrder new Stackingorder
# resultPieces the pieces in the same order as the circles
# resultAngles for every pie the angle of the 0 devidingline
def algorithmPieChartsStacking(circles, piePieces):
resultAngles = []
resultOrder = []
resultPieces = []
localCircles = circles.copy()
localPiePieces = piePieces.copy()
while len(localCircles) > 0:
ind, angle = calculateLowestPie(localCircles, localPiePieces)
tmpCircle = localCircles.pop(ind)
tmpPieces = localPiePieces.pop(ind)
resultAngles.append(angle)
resultOrder.append(tmpCircle)
resultPieces.append(tmpPieces)
return resultOrder, resultPieces, resultAngles
# calculates the value largest visible CONTINUOUS Interval
def calculateLargestContinousCirc(circle, neighbours):
x = caculateVisibleIntervall(circle, neighbours)
maximum = -1
if x is None:
return 0
for i in x:
if i[0] > i[1]:
i[1] = i[1] + 2 * np.pi
tmpvalue = i[1] - i[0]
if maximum < tmpvalue:
maximum = tmpvalue
return maximum
#calculate maximum with respect to the largest visible CONTINUOUS Interval
def calculateLowestHawaiian(Circles):
maximum = -1
index = -1
for i in range(0, len(Circles)):
tmp = Circles[:i] + Circles[i + 1:]
tmpValue = calculateLargestContinousCirc(Circles[i], tmp)
if tmpValue * Circles[i][2] > maximum:
index = i
maximum = tmpValue * Circles[i][2]
return index, maximum
# given some circles of the form (x,y,r1,r2,...) where r1>r2>...
# returns a algorithmHawaiianStacking
# form: for each circle with subcircles there are now multiple circles
# output has form [[x1,y1,r1],[x2,y2,r2],....,[x1',y1',r1'],[x2',y2'.r2']...]
def algorithmHawaiianStacking(circles):
local = circles.copy()
stacking = []
stackingAllCircles = []
#calculate stacking
for i in range(0, len(circles)):
index, value = calculateLowestHawaiian(local)
tmp = local.pop(index)
stacking.append(tmp)
#calculates the new postions
for i in range(0, len(stacking)):
N = stacking[i + 1:]
visbleInt = caculateVisibleIntervall(stacking[i], N)
maximum = -1
angle = 0
if visbleInt is None:
angle = 0
else:
#calculation of the anchor point
for interval in visbleInt:
if interval[1] < interval[0]:
interval[1] = interval[1] + 2 * np.pi
tmp = np.absolute(interval[1] - interval[0])
if tmp > maximum:
maximum = tmp
angle = interval[0] + (interval[1] - interval[0]) / 2
onCircleX, onCircleY = calculatePointOnCircle(
[int(stacking[i][0]), int(stacking[i][1]), int(stacking[i][2])], angle
)
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2]
deltaY = deltaY / stacking[i][2]
#calculates the new centers of the subcircles
for j in range(2, len(stacking[i])):
offSet = 0
x0 = onCircleX + deltaX * (stacking[i][j] - offSet)
y0 = onCircleY + deltaY * (stacking[i][j] - offSet)
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
# [for hawaiian] calculates the visible parts of a circle and the circles N
#which lie above it
def caculateVisibleIntervall(c, N):
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
a, b, bo = calculateSingleCoverInterval(c, n)
if bo is True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return [[0, 0]]
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
visibleArray2D = []
visibleArray2D.append([CoverArray[len(CoverArray) - 1], CoverArray[0]])
for i in range(0, int((len(CoverArray) / 2)) - 1):
visibleArray2D.append([CoverArray[2 * i + 1], CoverArray[2 * i + 2]])
return visibleArray2D
# calculates the lowest circle (for circles without subcircles)
# for the maximizing the minimum of the visible area
def calculateLowestCircleMaxMin(Circles, mode):
maximum = -1
index = -1
for i in range(0, len(Circles)):
tmp = Circles[:i] + Circles[i + 1:]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(Circles[i], tmp)
else:
tmpValue = calculateRelativeBoundaryUtility(Circles[i], tmp)
if tmpValue > maximum:
index = i
maximum = tmpValue
return index, maximum
# calculates the lowest circle (for circles with subcircles)
## for maximizing the minimum of the visible area of the minimal subcircle
# mode:"absolute" or "relative"
def calculateLowestCircleMaxMinMinK(realCircles, mode):
Circles = copy.deepcopy(realCircles)
maximum = -1
index = -1
while maximum <= 0:
for i in range(0, len(Circles)):
tmp = Circles[:i] + Circles[i + 1:]
tmp = np.array(tmp)
if not len(tmp) == 0:
tmp = tmp[:, :3]
tmpMin = float("inf")
for k in range(0, len(Circles[0]) - 2):
tmpCircle = [Circles[i][0], Circles[i][1], Circles[i][2 + k]]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(tmpCircle, tmp)
else:
tmpValue = calculateRelativeBoundaryUtility(tmpCircle, tmp)
if tmpValue < tmpMin:
tmpMin = tmpValue
if tmpMin > maximum:
index = i
maximum = tmpMin
if maximum > 0:
return index, maximum
#heuristic just pops the innermost circles
if maximum <= 0:
for i in range(0, len(Circles)):
Circles[i].pop(len(Circles[i]) - 1)
# calculates the lowest circle (for circles with subcircles)
# for maximizing the sum of the visible areas of the minimal subcircle
# mode:"absolute" or "relative"
def calculateLowestCircleMaxMinSumK(Circles, mode):
maximum = -1
index = -1
for i in range(0, len(Circles)):
tmpSum = 0
tmp = Circles[:i] + Circles[i + 1:]
tmp = np.array(tmp)
if not len(tmp) == 0:
tmp = tmp[:, :3]
for k in range(0, len(Circles[0]) - 2):
tmpCircle = [Circles[i][0], Circles[i][1], Circles[i][2 + k]]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(tmpCircle, tmp)
tmpSum = tmpSum + tmpValue
if mode == "relative":
tmpValue = calculateRelativeBoundaryUtility(tmpCircle, tmp)
tmpSum = tmpSum + tmpValue
if tmpSum > maximum:
index = i
maximum = tmpSum
return index, maximum
# input: circles nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# output: nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# maximizes minimum of minimal subcircles
def algorithmNestedDisksStackingMinMin(circles, mode):
local = copy.deepcopy(circles)
solution = []
objective = 0
for i in range(0, len(circles)):
index, cur_objective = calculateLowestCircleMaxMinMinK(local, mode)
tmp = local.pop(index)
solution.append(tmp)
objective += cur_objective
return solution, objective
# input: circles nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# output: nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# maximizes minimum of sum of the subcircles
def algorithmNestedDisksStackingMinSum(circles, mode):
local = circles.copy()
solution = []
objective = 0
for i in range(0, len(circles)):
index, cur_objective = calculateLowestCircleMaxMinSumK(local, mode)
tmp = local.pop(index)
solution.append(tmp)
objective += cur_objective
return solution, objective
########################################################################
#####Algorithms for comparison##########################################
########################################################################
# input: circles nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
def algorithmNestedDisksPainter(circles):
"""Sorts circles by outer radius which is at index 2 in ascending order."""
local = circles.copy()
local.sort(key=lambda x: x[2], reverse=True)
return local
def algorithmNestedDisksLeftToRight(circles):
"""Sorts circles by y-value at index 1 that is left to right on most screens in ascending order."""
local = circles.copy()
local.sort(key=lambda x: x[1], reverse=False)
return local
def algorithmNestedDisksRightToLeft(circles):
"""Sorts circles by y-value at index 1 that is left to right on most screens in descending order."""
local = circles.copy()
local.sort(key=lambda x: x[1], reverse=True)
return local
def algorithmNestedDisksRandom(circles):
"""A monkey shakes a box filled with circles and returns it."""
local = circles.copy()
random.shuffle(local)
return local
def algorithmHawaiianLeftToRight(circles):
"""Sorts circles by y-value at index 1 that is left to right on most screens in ascending order and shifts the
inner circles by a delta and returns all circles in the format [x0, y0, r0]."""
local = circles.copy()
stackingAllCircles = []
stacking = local
stacking.sort(key=lambda x: x[1], reverse=False)
for i in range(0, len(stacking)):
onCircleX = stacking[i][0]
onCircleY = stacking[i][1] - stacking[i][2]
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2]
deltaY = deltaY / stacking[i][2]
for j in range(2, len(stacking[i])):
x0 = onCircleX + deltaX * (stacking[i][j])
y0 = onCircleY + deltaY * (stacking[i][j])
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
def algorithmHawaiianRightToLeft(circles):
"""Sorts circles by y-value at index 1 that is left to right on most screens in descending order and shifts the
inner circles by a delta and returns all circles in the format [x0, y0, r0]."""
local = circles.copy()
stackingAllCircles = []
stacking = local
stacking.sort(key=lambda x: x[1], reverse=True)
for i in range(0, len(stacking)):
onCircleX = stacking[i][0]
onCircleY = stacking[i][1] + stacking[i][2]
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2]
deltaY = deltaY / stacking[i][2]
for j in range(2, len(stacking[i])):
x0 = onCircleX + deltaX * (stacking[i][j])
y0 = onCircleY + deltaY * (stacking[i][j])
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
def algorithmHawaiianRandom(circles):
"""A monkey shakes a box filled with circles and returns them all in pieces a [x0, y0, z0]."""
local = circles.copy()
stackingAllCircles = []
stacking = local
random.shuffle(stacking)
for i in range(0, len(stacking)):
N = stacking[i + 1:]
visbleInt = caculateVisibleIntervall(stacking[i], N)
maximum = -1
angle = 0
if visbleInt is None:
onCircleX = stacking[i][0] + 2
onCircleY = stacking[i][1] + 2
else:
for interval in visbleInt:
if interval[1] < interval[0]:
interval[1] = interval[1] + 2 * np.pi
tmp = np.absolute(interval[1] - interval[0])
if tmp > maximum:
maximum = tmp
angle = interval[0] + (interval[1] - interval[0]) / 2
onCircleX, onCircleY = calculatePointOnCircle(
[int(stacking[i][0]), int(stacking[i][1]), int(stacking[i][2])], angle
)
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2]
deltaY = deltaY / stacking[i][2]
for j in range(2, len(stacking[i])):
x0 = onCircleX + deltaX * (stacking[i][j])
y0 = onCircleY + deltaY * (stacking[i][j])
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
def algorithmHawaiianPainter(circles):
"""Shifts subcircles on longest outer visible perimeter and returns all circles."""
local = circles.copy()
stackingAllCircles = []
stacking = local
stacking.sort(key=lambda x: x[2], reverse=True)
#moving the centers with respect to the anchorpoint
for i in range(0, len(stacking)):
N = stacking[i + 1:]
visbleInt = caculateVisibleIntervall(stacking[i], N)
maximum = -1
angle = 0
if visbleInt is None:
onCircleX = stacking[i][0] + 2
onCircleY = stacking[i][1] + 2
else:
for interval in visbleInt:
if interval[1] < interval[0]:
interval[1] = interval[1] + 2 * np.pi
tmp = np.absolute(interval[1] - interval[0])
if tmp > maximum:
maximum = tmp
angle = interval[0] + (interval[1] - interval[0]) / 2
onCircleX, onCircleY = calculatePointOnCircle(
[int(stacking[i][0]), int(stacking[i][1]), int(stacking[i][2])], angle
)
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2]
deltaY = deltaY / stacking[i][2]
for j in range(2, len(stacking[i])):
x0 = onCircleX + deltaX * (stacking[i][j])
y0 = onCircleY + deltaY * (stacking[i][j])
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
def algorithmPieChartsPainter(pies, piepieces):
"""Performas a painter on outer circle-radius at index 2 and moves the inner pieces according to heuristic."""
n = len(piepieces[0])
localPies = []
localPiePieces = []
localAngles = []
local = np.concatenate((pies, piepieces), axis=1)
local = sorted(local, key=lambda x: x[2], reverse=True)
for l in local:
localPies.append([l[0], l[1], l[2]])
tmp = []
for i in range(1, n + 1):
tmp.append(l[2 + i])
localPiePieces.append(tmp)
for i in range(0, len(localPies)):
angle, value = caculateOneAnglePie(
localPies[i], localPiePieces[i], localPies[i + 1:]
)
if angle is None:
x = localPiePieces[i].copy()
while angle is None:
if len(x) == 0:
angle = 0
break
x.pop(len(x) - 1)
angle, value = caculateOneAnglePie(localPies[i], x, localPies[i + 1:])
localAngles.append(angle)
return localPies, localPiePieces, localAngles
def algorithmPieChartsPainterRandom(pies, piepieces):
"""Performas a painter on outer circle-radius at index 2 and rotates the pie pieces randomly."""
n = len(piepieces[0])
localPies = []
localPiePieces = []
local = np.concatenate((pies, piepieces), axis=1)
local = sorted(local, key=lambda x: x[2], reverse=True)
for l in local:
localPies.append([l[0], l[1], l[2]])
tmp = []
for i in range(1, n + 1):
tmp.append(l[2 + i])
localPiePieces.append(tmp)
localAngles = randomAngles(len(pies))
return localPies, localPiePieces, localAngles
def algorithmPieChartsRandom(pies, piepieces):
"""Shuffles outer discs and moves all pie pieces with respect to our algorithm."""
n = len(piepieces[0])
localPies = []
localPiePieces = []
localAngles = []
localNp = np.concatenate((pies, piepieces), axis=1)
local = list(localNp)
random.shuffle(local)
print(local)
for l in local:
localPies.append([l[0], l[1], l[2]])
tmp = []
for i in range(1, n + 1):
tmp.append(l[2 + i])
localPiePieces.append(tmp)
for i in range(0, len(localPies)):
angle, value = caculateOneAnglePie(
localPies[i], localPiePieces[i], localPies[i + 1:]
)
if angle is None:
x = localPiePieces[i].copy()
while angle is None:
if len(x) == 0:
angle = 0
break
x.pop(len(x) - 1)
angle, value = caculateOneAnglePie(localPies[i], x, localPies[i + 1:])
localAngles.append(angle)
return localPies, localPiePieces, localAngles
def algorithmPieChartsLeftToRight(pies, piepieces):
"""Outer circles are sorted in ascending order and the pieces according to heuristics."""
n = len(piepieces[0])
localPies = []
localPiePieces = []
localAngles = []
local = np.concatenate((pies, piepieces), axis=1)
local = sorted(local, key=lambda x: x[1], reverse=False)
for l in local:
localPies.append([l[0], l[1], l[2]])
tmp = []
for i in range(1, n + 1):
tmp.append(l[2 + i])
localPiePieces.append(tmp)
for i in range(0, len(localPies)):
angle, value = caculateOneAnglePie(
localPies[i], localPiePieces[i], localPies[i + 1:]
)
if angle is None:
x = localPiePieces[i].copy()
while angle is None:
if len(x) == 0:
angle = 0
break
x.pop(len(x) - 1)
angle, value = caculateOneAnglePie(localPies[i], x, localPies[i + 1:])
localAngles.append(angle)
return localPies, localPiePieces, localAngles
def algorithmPieChartsRightToLeft(pies, piepieces):
"""Outer circles are sorted in descending order and the pieces according to heuristics."""
local = np.concatenate((pies, piepieces), axis=1)
local = sorted(local, key=lambda x: x[1], reverse=True)
n = len(piepieces[0])
localPies = []
localPiePieces = []
localAngles = []
local = np.concatenate((pies, piepieces), axis=1)
local = sorted(local, key=lambda x: x[1], reverse=True)
for l in local:
localPies.append([l[0], l[1], l[2]])
tmp = []
for i in range(1, n + 1):
tmp.append(l[2 + i])
localPiePieces.append(tmp)
for i in range(0, len(localPies)):
angle, value = caculateOneAnglePie(
localPies[i], localPiePieces[i], localPies[i + 1:]
)
if angle is None:
x = localPiePieces[i].copy()
while angle is None:
if len(x) == 0:
angle = 0
break
x.pop(len(x) - 1)
angle, value = caculateOneAnglePie(localPies[i], x, localPies[i + 1:])
localAngles.append(angle)
return localPies, localPiePieces, localAngles
################################################################################
##################comparisons###################################################
def formatChangeNestedDisks(circles):
"""Extracts circles and number of nestings and returns them."""
n = len(circles[0]) - 2
result = []
for c in circles:
for i in range(2, len(c)):
result.append([c[0], c[1], c[i]])
return result, n
def circumferenceValuesNestedDisks(circles, numberOfNestings):
"""Return relative visibility and number of covered circles."""
j = 0
resultArray = []
resultCovered = 0
coverCircles = []
tmp = []
for i in range(0, int(len(circles) / numberOfNestings)):
coverCircles.append(circles[i * numberOfNestings])
for i in range(0, len(circles)):
tmpvis = caculateVisibleIntervall(
circles[i], coverCircles[(math.floor(i / numberOfNestings) + 1):]
)
tmpValue = 0
if tmpvis is None:
resultCovered = resultCovered + 1
tmp.append(0)
else:
# print(tmpvis)
for k in tmpvis:
if k[1] <= k[0]:
k[1] = k[1] + 2 * np.pi
tmpValue = tmpValue + (k[1] - k[0])
tmp.append(tmpValue)
if j == numberOfNestings - 1:
resultArray.append(tmp)
j = -1
tmp = []
j = j + 1
return resultArray, resultCovered
def utilitysNestedDisks(circles):
"""Calculate utilities for nested disk case."""
x, y = formatChangeNestedDisks(circles)
(
minAvgOnSingleGlyph,
percentageRelative,
percentageAbsolute,
minRelativeNonZero,
minAbsoluteNonZero,
covered,
) = utilitysHawaiian(x, y)
return (
minAvgOnSingleGlyph,
percentageRelative,
percentageAbsolute,
minRelativeNonZero,
minAbsoluteNonZero,
covered,
)
def utilitysHawaiian(circles, numberOfNestings):
"""Calculate utilities for hawaiian disk case."""
percentageRelative = 0
percentageAbsolute = 0
minRelativeNonZero = float('inf')
minAbsoluteNonZero = float('inf')
minAbsoluteAvg = float('inf')
sumOfCirc = 0
relativeVis, covered = circumferenceValuesNestedDisks(circles, numberOfNestings)
absoluteVis = copy.deepcopy(relativeVis)
for i in range(0, len(absoluteVis)):
for j in range(0, len(absoluteVis[0])):
absoluteVis[i][j] = absoluteVis[i][j] * circles[i * numberOfNestings + j][2]
sumOfCirc = 2 * np.pi * circles[i * numberOfNestings + j][2] + sumOfCirc
for i in range(0, len(absoluteVis)):
tmpForAvg = 0
for j in range(0, len(absoluteVis[0])):
tmpForAvg = tmpForAvg + absoluteVis[i][j]
percentageRelative = relativeVis[i][j] + percentageRelative
percentageAbsolute = absoluteVis[i][j] + percentageAbsolute
if (not (absoluteVis[i][j] == 0)) and relativeVis[i][
j
] < minRelativeNonZero:
minRelativeNonZero = relativeVis[i][j]
if (not (absoluteVis[i][j] == 0)) and absoluteVis[i][
j
] < minAbsoluteNonZero:
minAbsoluteNonZero = absoluteVis[i][j]
if tmpForAvg < minAbsoluteAvg:
minAbsoluteAvg = tmpForAvg
percentageRelative = percentageRelative / (
2 * np.pi * len(absoluteVis) * len(absoluteVis[0])
)
percentageAbsolute = percentageAbsolute / sumOfCirc
minAvgOnSingleGlyph = minAbsoluteAvg
return (
covered,
round(minRelativeNonZero, 3),
round(minAbsoluteNonZero, 3),
round(minAvgOnSingleGlyph, 3),
round(percentageRelative, 3),
round(percentageAbsolute, 3),
)
def utilitysPieCharts(circles, piePieces, angles):
"""Calculate utilities for pie chart disk case."""
absoluteSmallestOverall = float('inf')
smallestOverall = float('inf')
largestDist, smallestDist, occludedCounter = calculateAllPieDistances(
circles, piePieces, angles
)
# Calculate absolute values
for l in largestDist:
if len(l) == 0:
x = float('inf')
else:
x = min(l)
if x < absoluteSmallestOverall:
absoluteSmallestOverall = x
for s in smallestDist:
if len(s) == 0:
x = float('inf')
else:
x = min(s)
if x < smallestOverall:
smallestOverall = x
absoluteSmallestAvg = 0
smallestAvg = 0
k = 0
j = 0
# Calculate averages
for l in largestDist:
for tmp in l:
absoluteSmallestAvg = absoluteSmallestAvg + tmp
k = k + 1
for s in smallestDist:
for tmp in s:
smallestAvg = smallestAvg + tmp
j = j + 1
absoluteSmallestAvg = absoluteSmallestAvg / k
smallestAvg = smallestAvg / j
sumOccluded = sum(occludedCounter)
minimumNonZero = smallestOverall
if sumOccluded > 0:
minimum = 0
else:
minimum = smallestOverall
minimum = round(minimum, 3)
minimumNonZero = round(minimumNonZero, 3)
smallestAvg = round(smallestAvg, 3)
absoluteSmallestAvg = round(absoluteSmallestAvg, 3)
sumOccluded = round(sumOccluded, 3)
print([sumOccluded, minimum, minimumNonZero, smallestAvg, absoluteSmallestAvg])
return [sumOccluded, minimum, minimumNonZero, smallestAvg, absoluteSmallestAvg]
def calculateAllPieDistances(circles, piePieces, angles):
"""Calculate largest, smallest arc and number occluded lines per circle."""
largestDist = []
smallestDist = []
occludedCounter = []
for i in range(0, len(circles)):
adjustedAngles = []
c = circles[i]
visibleInt = caculateVisibleIntervall(c, circles[(i + 1):])
if visibleInt is None:
x = 2
else:
for Int in visibleInt:
if Int[0] >= Int[1]:
Int[1] = Int[1] + np.pi * 2
adjustedAngles.append(angles[i])
for p in piePieces[i]:
adjustedAngles.append(p + angles[i])
tmpL = []
tmpS = []
tmpCounter = 0
for angle in adjustedAngles:
isVisible = False
if visibleInt is None:
tmpS.append(0)
tmpL.append(0)
tmpCounter = tmpCounter + 1
continue
for interval in visibleInt:
if (
(interval[0] <= angle and interval[1] > angle)
or (
interval[0] <= -2 * np.pi + angle < interval[1]
)
or (
interval[0] <= 2 * np.pi + angle < interval[1]
)
):
if (
interval[0] <= -2 * np.pi + angle < interval[1]
):
x = np.absolute(-2 * np.pi + angle - interval[0])
y = np.absolute(-2 * np.pi + angle - interval[1])
else:
if (
interval[0] <= 2 * np.pi + angle < interval[1]
):
x = np.absolute(2 * np.pi + angle - interval[0])
y = np.absolute(2 * np.pi + angle - interval[1])
else:
x = np.absolute(angle - interval[0])
y = np.absolute(angle - interval[1])
isVisible = True
if x <= y:
tmpS.append(x)
tmpL.append(x * circles[i][2])
else:
tmpS.append(y)
tmpL.append(y * circles[i][2])
if isVisible is False:
tmpS.append(0)
tmpL.append(0)
tmpCounter = tmpCounter + 1
largestDist.append(tmpL)
smallestDist.append(tmpS)
occludedCounter.append(tmpCounter)
return largestDist, smallestDist, occludedCounter
################################squares#######################################
# generates the heuristic Piecharts for the squares
def preparePies(squares):
circles = []
piePieces = []
baseAngles = []
for s in squares:
radius = 0
tmpPiece = []
center = [s[6][0], s[6][1]]
baseLine = [s[2][0], s[2][1]] # base piePiece devidinglineAt 0
baseAngle = calculateRelativeAngle(center, baseLine) # angle in the square
# init the three(four) deviding lines which must be visible
tmpAngle = calculateRelativeAngle(center, s[4]) - baseAngle
tmpPiece.append(tmpAngle)
tmpAngle = calculateRelativeAngle(center, s[5]) - baseAngle
tmpPiece.append(tmpAngle)
tmpAngle = calculateRelativeAngle(center, s[0]) - baseAngle
tmpPiece.append(tmpAngle)
# radius of the circle
radius = distance(center[0], center[1], s[4][0], s[4][1]) + distance(
center[0], center[1], s[5][0], s[5][1]
)
radius = radius / 2
# we want positive angles only
tmpPiece = [a + 2 * np.pi if a <= 0 else a for a in tmpPiece]
baseAngles.append(baseAngle)
circles.append([center[0], center[1], radius])
piePieces.append(tmpPiece)
for i in range(0, len(piePieces)):
for j in range(0, len(piePieces[i])):
if piePieces[i][j] < 0:
piePieces[i][j] = piePieces[i][j] + 2 * np.pi
piePieces[i].sort()
return circles, piePieces, baseAngles
# rotates Squares such that the heuristic is maximized
def rotateTheSquares(squares, angles):
for i in range(0, len(squares)): #
square_center = (squares[i][6][1], squares[i][6][0])
for j in range(0, len(squares[i])):
if not isinstance(squares[i][j][0], str):
angle = angles[i]
y = squares[i][j][0]
x = squares[i][j][1]
x1, x0 = rotated_about(x, y, square_center[0], square_center[1], angle)
squares[i][j][0] = x0
squares[i][j][1] = x1
return squares
#maximizes heuristic for a given stacking
def heuristicRotationForStacking(squares):
localCircles, localPiePieces, baseAngles = preparePies(squares)
angle = 0
resultAngles = []
for i in range(0, len(squares)):
angle, value = caculateOneAnglePie(
localCircles[i], localPiePieces[i], localCircles[i + 1:]
)
if angle == None:
angle = 0
resultAngles.append(-baseAngles[i] + np.pi / 2 - angle)
# rotates Squares such that the heuristic is maximized
squares = rotateTheSquares(squares, resultAngles)
return squares
def algorithmSquaresStacking(squares):
localCircles, localPiePieces, baseAngles = preparePies(squares)
localSquares = copy.deepcopy(squares)
angle = 0
resultOrder = []
resultAngles = []
resultAnglesForPies = []
resultOrderForPies = []
resultPiecesForPies = []
while len(localCircles) > 0:
# calculate next glyph
ind, angle = calculateLowestPie(localCircles, localPiePieces)
# get next glyph
tmpCircle = localCircles.pop(ind)
tmpPiece = localPiePieces.pop(ind)
tmpSquare = localSquares.pop(ind)
# input into new lists
resultAngles.append(-baseAngles[ind] + np.pi / 2 - angle)
resultOrder.append(tmpSquare)
resultAnglesForPies.append(angle)
resultOrderForPies.append(tmpCircle)
resultPiecesForPies.append(tmpPiece)
# rotates Squares such that the heuristic is maximized
resultOrder = rotateTheSquares(resultOrder, resultAngles)
return resultOrder, resultOrderForPies, resultPiecesForPies, resultAnglesForPies
#############################################################################
############ Square Comparison Algorithms ###################################
#############################################################################
# optimal rotations but ordered by painter's algorithm
def algorithmHeuristicPainterSquareStacking(squares):
squares.sort(key=sideLength, reverse=True)
return heuristicRotationForStacking(squares)
# random rotations and ordered by painter's algorithm
def algorithmRandomPainterSquareStacking(squares):
squares.sort(key=sideLength, reverse=True)
angles = randomAngles(len(squares))
return rotateTheSquares(squares, angles)
# optimal rotations but ordered randomly
def algorithmHeuristicRandomSquareStacking(squares):
random.shuffle(squares)
return heuristicRotationForStacking(squares)
# random rotations and ordered randomly
def algorithmCompletelyRandomSquareStacking(squares):
random.shuffle(squares)
angles = randomAngles(len(squares))
return rotateTheSquares(squares, angles)
# generate random angles
def randomAngles(length):
return [random.random() * 2 * np.pi for i in range(length)]
# euclidian distance
def distance(ax, ay, bx, by):
return math.sqrt((by - ay) ** 2 + (bx - ax) ** 2)
# rotates point `A` about point `B` by `angle` radians counterclockwise.
def rotated_about(ax, ay, bx, by, angle):
radius = distance(ax, ay, bx, by)
angle += math.atan2(ay - by, ax - bx)
return (bx - radius * math.cos(angle), by - radius * math.sin(angle))
################################################################################
############################# square functions #################################
def utilitysSquares(squares):
minimum = float("inf")
minGreaterZero = float("inf")
avg = 0
for i, currentSquare in enumerate(squares):
squaresAbove = squares[i + 1:]
value = distanceToOcclusion(currentSquare, squaresAbove)
avg = avg + value
minimum = min(value, minimum)
if value > 0:
minGreaterZero = min(value, minGreaterZero)
occludedCounter = numberOfOccludedPointsIn(squares)
avg = avg / len(squares)
return [round(occludedCounter), round(minGreaterZero, 3), round(avg, 3)]
def numberOfOccludedPointsIn(squares):
counter = 0
for i, square in enumerate(squares):
squaresAbove = squares[i + 1:]
occludedIntervals = occludedIntervalsPerSide(square, squaresAbove)
counter += numberOfOccludedPointsOf(square, occludedIntervals)
return counter
def distanceToOcclusion(square, squares):
occludingIntervals = occludedIntervalsPerSide(square, squares)
return minDistanceToOcclusion(square, occludingIntervals)
def numberOfOccludedPointsOf(square, occlusionsPerSide):
counter = 0
points = importantSquarePoints(square)
for i, t in points:
if isOccluded(t, occlusionsPerSide[i]):
counter += 1
return counter
def occludedIntervalsPerSide(square, squares):
relevant_squares = removeDistantSquares(square, squares)
occluded_intervals = [
occludedIntervalsForSide(square, i, relevant_squares) for i in range(4)
]
return mergeAllIntervals(occluded_intervals)
def removeDistantSquares(square, squares):
filteredSquares = []
for sq in squares:
if not haveDisjointBoundingBoxes(square, sq):
filteredSquares.append(sq)
return filteredSquares
def occludedIntervalsForSide(square, i, squares):
occludedIntervals = []
side = [square[i], square[(i + 1) % 4]]
for sq in squares:
interval = occludedIntervalsForSquare(side, sq)
if interval is not None:
occludedIntervals.append(interval)
if interval == [0, 1]:
break
return occludedIntervals
def occludedIntervalsForSquare(side, square):
if sideIsContainedInSquare(side, square):
return [0, 1]
intersections = determineIntersections(side, square)
if len(intersections) == 1:
t = intersections[0]
result = []
if liesWithin(side[0], square):
result = [0, t]
elif liesWithin(side[1], square):
result = [t, 1]
else:
result = [t, t]
return result
if len(intersections) == 2:
[a, b] = intersections
return [a, b] if a < b else [b, a]
return None
def sideIsContainedInSquare(side, square):
return liesWithin(side[0], square) and liesWithin(side[1], square)
def liesWithin(p, square):
numOfIntersections = 0
q = [p[0] + 1, p[1]]
for i in range(4):
t, s = solveLinearEquation(p, q, square[i], square[(i + 1) % 4])
if 0 <= t and s >= 0 and s < 1:
numOfIntersections += 1
return numOfIntersections % 2 == 1
def determineIntersections(side, square):
intersections = []
for index in range(4):
t, s = solveLinearEquation(
side[0], side[1], square[index], square[(index + 1) % 4]
)
if 0 <= t and t <= 1 and s <= 0 and s < 1:
intersections.append(t)
return intersections
def solveLinearEquation(A, B, C, D):
[a, c] = [B[0] - A[0], D[0] - C[0]]
[b, d] = [B[1] - A[1], D[1] - C[1]]
det = a * d - c * b
if det != 0:
[e, f] = [C[0] - A[0], C[1] - A[1]]
result = [e * d - f * c, e * b - f * a]
return [result[0] / det, result[1] / det]
return [-1, -1]
def mergeAllIntervals(intervalArray):
return [mergeIntervals(intervals) for intervals in intervalArray]
def mergeIntervals(intervals):
newIntervals = []
if len(intervals) != 0:
intervals.sort(key=leftIntervalBoundary)
currentInterval = intervals[0]
for inter in intervals:
if inter[0] <= currentInterval[1]:
currentInterval[1] = max(currentInterval[1], inter[1])
else:
newIntervals.append(currentInterval)
currentInterval = inter
newIntervals.append(currentInterval)
return newIntervals
def leftIntervalBoundary(interval):
return interval[0]
def minDistanceToOcclusion(square, intervals):
sideLen = sideLength(square)
minDist = float("inf")
points = importantSquarePoints(square)
for point in points:
dist = pointDistanceToOcclusion(point, intervals)
if dist < minDist:
minDist = dist
return min(minDist, 2) * sideLen
def sideLength(square):
a = square[0]
b = square[1]
diff = [a[0] - b[0], a[1] - b[1]]
return math.sqrt(diff[0] * diff[0] + diff[1] * diff[1])
def importantSquarePoints(square):
A = importantSquarePoint(square[4], square)
B = importantSquarePoint(square[5], square)
C = [0, 0]
D = [2, 0]
return [A, B, C, D]
def importantSquarePoint(point, square):
for i in range(4):
t = calculateStepParam(square[i], square[(i + 1) % 4], point)
if t is not None:
return [i, t]
return None
def calculateStepParam(a, b, c):
v = []
w = []
v.append(b[0] - a[0])
v.append(b[1] - a[1])
w.append(c[0] - a[0])
w.append(c[1] - a[1])
if v[0] == 0 and v[1] == 0:
return 0
elif abs(v[0] * w[1] - v[1] * w[0]) <= 0.02:
return w[0] / v[0] if v[0] != 0 else w[1] / v[1]
else:
return None
def pointDistanceToOcclusion(point, intervals):
[index, param] = point
if isOccluded(param, intervals[index]):
return 0
else:
if sum([len(sideIntervals) for sideIntervals in intervals]) == 0:
return 2
else:
wrapped_intervals = wrapIntervals(index, intervals)
[a, b] = visibleRegion(param, wrapped_intervals)
return min(param - a, b - param)
def wrapIntervals(index, intervals):
wrapped_intervals = []
for i in range(-4, 5):
wrapped_intervals.extend(shiftIntervals(i, intervals[(i + index) % 4]))
wrapped_intervals.sort(key=leftIntervalBoundary)
return wrapped_intervals
def shiftIntervals(i, intervals):
shifted_intervals = []
for a, b in intervals:
shifted_intervals.append([a - i, b - i])
return shifted_intervals
def isOccluded(t, intervals):
for [a, b] in intervals:
if a <= t and t <= b:
return True
return False
def visibleRegion(t, intervals):
lower = [inter for inter in intervals if inter[1] < t]
higher = [inter for inter in intervals if inter[0] > t]
a = -4 if len(lower) == 0 else lower[-1][1]
b = 4 if len(higher) == 0 else higher[0][0]
return [a, b]
def haveDisjointBoundingBoxes(sq1, sq2):
bb1 = boundingBox(sq1[0:4])
bb2 = boundingBox(sq2[0:4])
return areDisjoint(bb1, bb2)
def boundingBox(points):
x_coordinates = [point[0] for point in points]
y_coordinates = [point[1] for point in points]
left = min(x_coordinates)
right = max(x_coordinates)
bottom = min(y_coordinates)
top = max(y_coordinates)
return [left, right, bottom, top]
def areDisjoint(bba, bbb):
return bba[1] < bbb[0] or bbb[1] < bba[0] or bba[3] < bbb[2] or bbb[3] < bba[2]
<file_sep>/.flake8
# .flake8
[flake8]
select = C,E,F,W
max-complexity = 10
<file_sep>/README.md
# GeomLabCode - 2020
Code for the geomlab.
Paper and Slides to this piece of software: https://github.com/rscircus/GeomLabTex

# Installation
To get this running you need to have:
- poetry - a python package manager
- pyenv - manages multiple python versions without messing with the host python version 💚
- Some kind of Python 3.7 (e.g., `$ pyenv install 3.7.5`)
installed.
Then simply run:
```bash
$ poetry install
```
let it install all deps and then
```bash
$ poetry shell
```
To test if everything got installed correctly you can do the following:
```shell
$ python
Python 3.7.5 (default, May 23 2020, 15:44:44)
[GCC 9.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import geomlab
>>> geomlab.__version__
'0.1.0'
```
will create a virtual environment for you with everything working out
smoothly and no interference with the host.
To run the GUI after installing all dependencies use
```shell
$ poetry run geomlab
```
If you guys need help with integration, feel free to drop anything into
slack. As always, I'm most likely to respond on `WEEKDAY not in
(Wed,Thu,Fri)`, YouRS.
## Logging
You can specify the visible log-level via appending `--log={INFO, DEBUG, CRITICAL}` in the cmd line.
<file_sep>/src/geomlab/__init__.py
# src/geomlab/__init__.py
__version__ = "0.1.0"
<file_sep>/pyproject.toml
[tool.poetry]
name = "geomlab"
version = "0.1.0"
description = ""
authors = ["<NAME> <<EMAIL>>", "<NAME>", "<NAME>"]
[tool.poetry.dependencies]
python = "^3.7"
kivy = "^1.11.1"
matplotlib = "^3.2.2"
pillow = "^7.2.0"
opencv-python = "192.168.3.11"
pandas = "^1.1.1"
black = "^20.8b1"
[tool.poetry.scripts]
geomlab = "geomlab.main:main"
[tool.poetry.dev-dependencies]
pytest = "^5.4.3"
nox = "^2020.5.24"
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
<file_sep>/src/geomlab/main.py
import matplotlib
import random
import logging
import time
import datetime
import numpy as np
import math
import copy
import csv
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk
from matplotlib.figure import Figure
import tkinter as tk
import tkinter.ttk as ttk
import tkinter.font as tkfont
# Import covid loader and most recent covid data
from . import covidloader as cl
from . import symbolicstacking as st
# Basic settings
logging.basicConfig(filename="log.txt", level=logging.DEBUG)
# Expand TK's oval:
def _create_circle(self, x, y, r, **kwargs):
"""Private function to create circle from TK's create_oval function."""
return self.create_oval(x - r, y - r, x + r, y + r, **kwargs)
tk.Canvas.create_circle = _create_circle
# Expand TK's oval to support our pies:
def _create_circle_arc(self, x, y, r, **kwargs):
"""Private function to create circle arc from TK's create_arc function."""
if "start" in kwargs and "end" in kwargs:
kwargs["extent"] = kwargs["end"] - kwargs["start"]
del kwargs["end"]
return self.create_arc(x - r, y - r, x + r, y + r, **kwargs)
tk.Canvas.create_circle_arc = _create_circle_arc
def on_combo_configure(event):
"""Adjust width of combobox based on values."""
combo = event.widget
style = ttk.Style()
long = max(combo.cget("values"), key=len)
font = tkfont.nametofont(str(combo.cget("font")))
width = max(0, font.measure(long.strip() + "0") - combo.winfo_width())
style.configure("TCombobox", postoffset=(0, 0, width, 0))
# Config object
class Config:
"""This object contains the configuration of the project."""
def __init__(self):
# geomDataGeneration (should be adapted by the user)
# default values
self.maximalSize = 40
self.scalingFactor = 500
self.lowerBoundCases = 10000
# Main Window
class GeomLabApp(tk.Tk):
"""Extends tk.Tk to GeomLabApp with all necessary frames. Manages the window."""
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
# Configure self
self.geometry("1810x2000")
self.title("Symbolic Maps")
# Menu
menubar = tk.Menu(self)
file_menu = tk.Menu(menubar, tearoff=0)
file_menu.add_command(
label="Symbolic Maps", command=lambda: self.show_frame(SymbolMapsPage)
)
file_menu.add_command(
label="Settings Page",
command=lambda: self.show_frame(SettingsPage),
)
file_menu.add_command(
label="Painting Program",
command=lambda: self.show_frame(PaintingProgramPage),
)
file_menu.add_command(
label="Matplotlib", command=lambda: self.show_frame(MatplotlibPage)
)
file_menu.add_command(label="About", command=lambda: self.show_frame(AboutPage))
file_menu.add_command(label="Quit", command=lambda: self.destroy())
menubar.add_cascade(label="File", menu=file_menu)
self.config(menu=menubar)
# Configure content
container = tk.Frame(self)
container.pack(side="top", fill="both", expand=True)
container.grid_rowconfigure(0, weight=1)
container.grid_columnconfigure(0, weight=1)
# Create config
self.symbolic_config = Config()
# Create "pages"
self.frames = {}
# Create
for page in (
SymbolMapsPage,
SettingsPage,
PaintingProgramPage,
MatplotlibPage,
AboutPage,
):
frame = page(container, self)
frame.grid(row=0, column=0, sticky="nswe")
self.frames[page] = frame
# Add a second frame of type SymbolicMapsPage
scnd_container = tk.Frame(self)
scnd_container.pack(side="top", fill="both", expand=True)
scnd_frame = SymbolMapsPage(scnd_container, self)
scnd_frame.grid(row=0, column=1, sticky="nswe")
# Display page in first frame
self.show_frame(SymbolMapsPage)
def show_frame(self, container):
"""Show a specific frame in the window."""
frame = self.frames[container]
frame.tkraise()
# Frames
class SymbolMapsPage(tk.Frame):
"""Frmae for the visualization of the symbol maps using COVID-19 data."""
def __init__(self, parent, controller):
super().__init__(parent)
self.csv_filename = (
"./" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + "_utilities.csv"
)
self.parent = parent
self.controller = controller
self.create_widgets()
# Display objects
self.pie_sets = {}
self.pie_piece_sets = {}
self.data_sets = {}
self.square_sets = {}
self.circles = []
self.pies = []
self.piePieces = []
self.squares = []
self.circles_for_drawing = [] # for nested disks different structure
self.squares_for_drawing = []
self.numberOfFeatures = 0 # numberOffeatures eg, rec,dead,rest equal 3
self.angles = []
self.timer_running = False
self.counter = 123456
self.timer_start_timestamp = datetime.datetime.now()
# Prepare inputs
self.initialize_data()
self.prepare_data()
# Write explanation line into csv
with open(self.csv_filename, mode="a") as utility_file:
utility_writer = csv.writer(
utility_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL
)
utility_writer.writerow(
[
"Timestamp",
"Unixtime (secs)",
"Algorithm",
"covered",
"minVis(r)/minDist",
"minVis(a)/minDistAvg",
"min1Gl/maxDistAvg",
"avgRel",
"abs%",
]
)
# Execute symbolic algo
self.apply_algorithm()
def flush_everything(self):
self.pie_sets = {}
self.pie_piece_sets = {}
self.data_sets = {}
self.square_sets = {}
self.circles = []
self.pies = []
self.piePieces = []
self.squares = []
self.circles_for_drawing = []
self.squares_for_drawing = []
self.numberOfFeatures = 0
self.angles = []
# TODO: Shift into own object
# TODO: Timer is defunct - probably needs an own thread for display updates
# TODO: Using wallclock timestamps for now
def timer_update_label(self):
def count():
if self.timer_running:
if self.counter == 123456:
timestr = "Starting..."
else:
timestamp = datetime.date.fromtimestamp(self.counter)
timestr = timestamp.strftime("%H:%M:%S")
self.timerlabel["text"] = timestr
self.timerlabel.after(1000, count)
self.counter += 1
# timer is running
count()
def timer_start(self):
self.timer_running_label["bg"] = "red"
self.timer_running_label["text"] = "Timer running"
self.timer_running_label.update_idletasks()
self.timer_start_timestamp = datetime.datetime.now()
# self.timer_update_label()
def timer_stop(self):
self.timer_running_label["bg"] = "green"
self.timer_running_label["text"] = "Timer not running"
self.timer_running_label.update_idletasks()
self.timerlabel["text"] = (
"Runtime (wall): "
+ str(
int(
(
datetime.datetime.now() - self.timer_start_timestamp
).total_seconds()
* 1000
)
)
+ " milliseconds"
)
# self.timer_running = False
# self.counter = 123456
def apply_algorithm(self):
"""Update Canvas upon algo change."""
# Set current dataset
self.circles = self.data_sets[self.data.current()]
self.piePieces = self.pie_piece_sets[self.data.current()]
self.pies = self.pie_sets[self.data.current()]
self.angles = [0] * len(self.pies)
self.squares = self.square_sets[self.data.current()]
print("Current data set:")
print(self.data.current())
print(f"Number of circles: {len(self.circles)}")
print(f"Number of pies: {len(self.pies)}")
print(f"Number of squares: {len(self.squares)}")
algo = self.algorithm.current()
"""
"centered disks | random", # 0
"centered disks | LeftToRight", # 1
"centered disks | RightToLeft", # 2
"centered disks | Painter", # 3
"centered disks | MinMin-Stacking (abs)", # 4
"centered disks | MinMin-Stacking (rel)", # 5
"centered disks | MinSum-Stacking (abs)", # 6
"centered disks | MinSum-Stacking (rel)", # 7
"hawaiian disks | random", # 8
"hawaiian disks | LeftToRight", # 9
"hawaiian disks | RightToLeft", # 10
"hawaiian disks | Painter", # 11
"hawaiian disks | our Stacking", # 12
"pie charts | random", # 13
"pie charts | LeftToRight", # 14
"pie charts | RightToLeft", # 15
"pie charts | Painter", # 16
"pie charts | our Stacking", # 17
"squares | Painter+heuristic" #18
"squares | Painter+random rotations" #19
"squares | random Stacking+heuristic rotations" #20
"squares | random Stacking+heuristic rotations" #21
"squares | our Stacking" #22
"""
# Timer start
self.timer_start()
# TODO: Assuming objective values are positive
objective_value = -1
self.objective_list.delete(1)
if algo == 0:
self.circles = st.algorithmNestedDisksRandom(self.circles)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 1:
self.circles = st.algorithmNestedDisksLeftToRight(self.circles)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 2:
self.circles = st.algorithmNestedDisksRightToLeft(self.circles)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 3:
self.circles = st.algorithmNestedDisksPainter(self.circles)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 4:
self.circles, objective_value = st.algorithmNestedDisksStackingMinMin(
self.circles, "absolute"
)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 5:
self.circles, objective_value = st.algorithmNestedDisksStackingMinMin(
self.circles, "relative"
)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 6:
self.circles, objective_value = st.algorithmNestedDisksStackingMinSum(
self.circles, "absolute"
)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 7:
self.circles, objective_value = st.algorithmNestedDisksStackingMinSum(
self.circles, "relative"
)
(
self.circles_for_drawing,
self.numberOfFeatures,
) = st.formatChangeNestedDisks(self.circles)
self.objective_list.insert(1, st.utilitysNestedDisks(self.circles))
elif algo == 8:
self.circles_for_drawing = st.algorithmHawaiianRandom(self.circles)
self.numberOfFeatures = len(self.circles[0]) - 2
self.objective_list.insert(
1, st.utilitysHawaiian(self.circles_for_drawing, 3)
)
elif algo == 9:
self.circles_for_drawing = st.algorithmHawaiianLeftToRight(self.circles)
self.numberOfFeatures = len(self.circles[0]) - 2
self.objective_list.insert(
1, st.utilitysHawaiian(self.circles_for_drawing, 3)
)
elif algo == 10:
self.circles_for_drawing = st.algorithmHawaiianRightToLeft(self.circles)
self.numberOfFeatures = len(self.circles[0]) - 2
self.objective_list.insert(
1, st.utilitysHawaiian(self.circles_for_drawing, 3)
)
elif algo == 11:
self.circles_for_drawing = st.algorithmHawaiianPainter(self.circles)
self.numberOfFeatures = len(self.circles[0]) - 2
self.objective_list.insert(
1, st.utilitysHawaiian(self.circles_for_drawing, 3)
)
elif algo == 12:
self.circles_for_drawing = st.algorithmHawaiianStacking(self.circles)
self.numberOfFeatures = len(self.circles[0]) - 2
self.objective_list.insert(
1, st.utilitysHawaiian(self.circles_for_drawing, 3)
)
elif algo == 13:
self.pies, self.piePieces, self.angles = st.algorithmPieChartsRandom(
self.pies, self.piePieces
)
self.objective_list.insert(
1, st.utilitysPieCharts(self.pies, self.piePieces, self.angles)
)
elif algo == 14:
self.pies, self.piePieces, self.angles = st.algorithmPieChartsPainterRandom(
self.pies, self.piePieces
)
self.objective_list.insert(
1, st.utilitysPieCharts(self.pies, self.piePieces, self.angles)
)
elif algo == 15:
self.pies, self.piePieces, self.angles = st.algorithmPieChartsRightToLeft(
self.pies, self.piePieces
)
self.objective_list.insert(
1, st.utilitysPieCharts(self.pies, self.piePieces, self.angles)
)
elif algo == 16:
self.pies, self.piePieces, self.angles = st.algorithmPieChartsPainter(
self.pies, self.piePieces
)
self.objective_list.insert(
1, st.utilitysPieCharts(self.pies, self.piePieces, self.angles)
)
elif algo == 17:
self.pies, self.piePieces, self.angles = st.algorithmPieChartsStacking(
self.pies, self.piePieces
)
self.objective_list.insert(
1, st.utilitysPieCharts(self.pies, self.piePieces, self.angles)
)
elif algo == 18:
self.squares_for_drawing = st.algorithmHeuristicPainterSquareStacking(
copy.deepcopy(self.squares)
)
self.objective_list.insert(
1, st.utilitysSquares(self.squares_for_drawing)
)
print("square utilitys: ", st.utilitysSquares(self.squares_for_drawing))
elif algo == 19:
self.squares_for_drawing = st.algorithmRandomPainterSquareStacking(
copy.deepcopy(self.squares)
)
self.objective_list.insert(
1, st.utilitysSquares(self.squares_for_drawing)
)
print("square utilitys: ", st.utilitysSquares(self.squares_for_drawing))
elif algo == 20:
self.squares_for_drawing = st.algorithmHeuristicRandomSquareStacking(
copy.deepcopy(self.squares)
)
self.objective_list.insert(
1, st.utilitysSquares(self.squares_for_drawing)
)
print("square utilitys: ", st.utilitysSquares(self.squares_for_drawing))
elif algo == 21:
self.squares_for_drawing = st.algorithmCompletelyRandomSquareStacking(
copy.deepcopy(self.squares)
)
self.objective_list.insert(
1, st.utilitysSquares(self.squares_for_drawing)
)
print("square utilitys: ", st.utilitysSquares(self.squares_for_drawing))
elif algo == 22:
self.squares_for_drawing, _, _, _ = st.algorithmSquaresStacking(
copy.deepcopy(self.squares)
)
self.objective_list.insert(
1, st.utilitysSquares(self.squares_for_drawing)
)
print("square utilitys: ", st.utilitysSquares(self.squares_for_drawing))
else:
logging.critical("Algorithm not present. You shouldn't see me.")
# Write results in to csv
#
# As the results are written into the 2nd line of objective_list, they
# are picked up there with an error check.
if self.objective_list.size() == 2:
print(f"Appending to {self.csv_filename}")
with open(self.csv_filename, mode="a") as utility_file:
utility_writer = csv.writer(
utility_file,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
)
obj = self.objective_list.get(1, 1)
obj_list = list(obj[0])
full_data = [
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),
time.mktime(datetime.datetime.now().timetuple()),
self.algorithm.get(),
]
full_data.extend(obj_list)
utility_writer.writerow(full_data)
else:
logging.critical("Some utility function is still missing!")
# Timer end
self.timer_stop()
# Objective update
# TODO: Leaving that as this has to adapt after the objective_list intro
if objective_value != -1:
self.objective_running_label["text"] = "Num. objective"
self.objectivelabel["text"] = str(objective_value)
self.objective_running_label["bg"] = "green"
else:
self.objective_running_label["bg"] = "red"
self.objective_running_label["text"] = "Objective"
self.objectivelabel["text"] = "N/A"
# Utilities
# self.objective_list.insert(1, "sth")
# Draw
if algo in range(0, 13):
self.draw_subcircle_stacking()
if algo in range(13, 18):
self.draw_pie_stacking()
if algo in range(18, 23):
self.drawSquareSolution()
def draw_circles(self):
for c in self.circles:
# x, y ,r
self.canvas.create_circle(c[0], c[1], c[2], fill="#bbb", outline="#000")
def from_rgb(self, rgb):
"""translates an rgb tuple of int to a tkinter friendly color code."""
return "#%02x%02x%02x" % rgb
def drawSquareSolution(self):
for i in range(0, len(self.squares_for_drawing)):
self.drawSquare(self.squares_for_drawing[i])
def drawSquare(self, square):
color1PIL = "#FF9994"
color2PIL = "#94FF99"
color3PIL = "#A0A0A0"
tmp = [0, 0]
tmp[1] = square[4][0] + (square[3][0] - square[0][0])
tmp[0] = square[4][1] + (square[3][1] - square[0][1])
#tuple for all of the mosaic parts of the square
square1_vertices = (
(square[0][1], square[0][0]),
(square[4][1], square[4][0]),
(tmp[0], tmp[1]),
(square[3][1], square[3][0]),
)
tmp2 = [0, 0]
tmp2[1] = square[4][0] + (square[5][0] - square[1][0])
tmp2[0] = square[4][1] + (square[5][1] - square[1][1])
square2_vertices = (
(square[4][1], square[4][0]),
(square[1][1], square[1][0]),
(square[5][1], square[5][0]),
(tmp2[0], tmp2[1]),
)
square3_vertices = (
(tmp2[0], tmp2[1]),
(square[5][1], square[5][0]),
(square[2][1], square[2][0]),
(tmp[0], tmp[1]),
)
#coloring the squares in the correct colors
color1 = ""
color2 = ""
color3 = ""
if square[4][2] == "dead":
color1 = color3PIL
else:
if square[4][2] == "rec":
color1 = color2PIL
else:
color1 = color1PIL
if square[5][2] == "dead":
color2 = color3PIL
else:
if square[5][2] == "rec":
color2 = color2PIL
else:
color2 = color1PIL
if square[7] == "dead":
color3 = color3PIL
else:
if square[7] == "rec":
color3 = color2PIL
else:
color3 = color1PIL
self.canvas.create_polygon(
square1_vertices, outline="#000", fill=color1, width=2
)
self.canvas.create_polygon(
square2_vertices, outline="#000", fill=color2, width=2
)
self.canvas.create_polygon(
square3_vertices, outline="#000", fill=color3, width=2
)
def draw_subcircle_stacking_3Features(self):
counter = 1
for c in self.circles_for_drawing:
y = c[0]
x = c[1]
r = c[2]
if counter == 1:
color = "#FF9994"
if counter == 2:
color = "#94FF99"
if counter == 3:
color = "#A0A0A0"
counter = counter + 1
if counter == 4:
counter = 1
self.canvas.create_circle(x, y, r, fill=color, outline="#000")
def draw_subcircle_stacking_arbitraryFeatures(self):
counter = 0
counterMax = self.numberOfFeatures - 1
for c in self.circles_for_drawing:
y = c[0]
x = c[1]
r = c[2]
#colors are given by different greyvalues
colorValue = int(200 - counter * (150 / counterMax))
colorRGB = (colorValue, colorValue, colorValue)
colorHEX = self.from_rgb(colorRGB)
self.canvas.create_circle(x, y, r, fill=colorHEX, outline="#000")
if counter == counterMax:
counter = 0
else:
counter = counter + 1
def draw_subcircle_stacking(self):
counterMax = self.numberOfFeatures
if counterMax == 3:
self.draw_subcircle_stacking_3Features()
else:
self.draw_subcircle_stacking_arbitraryFeatures()
def draw_pie_stacking_3Features(self):
for i in range(0, len(self.pies)):
#first arc from imaginary zero line to the first line
angle = self.angles[i]
y = self.pies[i][0]
x = self.pies[i][1]
r = self.pies[i][2]
angle = self.angles[i]
s = angle * 180 / np.pi
e = (angle + self.piePieces[i][0]) * 180 / np.pi
ext = e - s
if ext < 0:
ext = ext + 360
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill="#A0A0A0",
outline="black",
start=s - 90,
extent=ext,
)
#inner piece
s = e
e = (angle + self.piePieces[i][1]) * 180 / np.pi
ext = e - s
if ext < 0:
ext = ext + 360
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill="#94FF99",
outline="black",
start=s - 90,
extent=ext,
)
#piece from the last line to the imaginary 0 line
s = e
e = angle * 180 / np.pi
e = e + 360
ext = e - s
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill="#FF9994",
outline="black",
start=s - 90,
extent=ext,
)
def draw_pies_stacking_arbitraryFeatures(self):
for i in range(0, len(self.pies)):
# geometry of the circle
angle = self.angles[i]
y = self.pies[i][0]
x = self.pies[i][1]
r = self.pies[i][2]
angle = self.angles[i]
# initial Piece (does depend on somthing which is not in piePieces)
s = angle * 180 / np.pi
e = (angle + self.piePieces[i][0]) * 180 / np.pi
ext = e - s
if ext < 0:
ext = ext + 360
colorValue = int(200 - 0 * (150 / len(self.piePieces)))
colorHEX = self.from_rgb((colorValue, colorValue, colorValue))
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill=colorHEX,
outline="black",
start=s - 90,
extent=ext,
)
# middle Pieces
for j in range(1, len(self.piePieces[i])):
s = (angle + self.piePieces[i][j - 1]) * 180 / np.pi
e = (angle + self.piePieces[i][j]) * 180 / np.pi
ext = e - s
if ext < 0:
ext = ext + 360
colorValue = int(200 - j * (150 / (len(self.piePieces[i]))))
colorHEX = self.from_rgb((colorValue, colorValue, colorValue))
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill=colorHEX,
outline="black",
start=s - 90,
extent=ext,
)
# last Piece (does depend on something which is not in piePieces)
s = (angle + self.piePieces[i][len(self.piePieces[i]) - 1]) * 180 / np.pi
e = (angle * 180 / np.pi) + 360
ext = e - s
if ext < 0:
ext = ext + 360
colorValue = int(50)
colorHEX = self.from_rgb((colorValue, colorValue, colorValue))
self.canvas.create_arc(
x - r,
y - r,
x + r,
y + r,
fill=colorHEX,
outline="black",
start=s - 90,
extent=ext,
)
def draw_pie_stacking(self):
if len(self.piePieces[0]) == 2:
self.draw_pie_stacking_3Features()
else:
self.draw_pies_stacking_arbitraryFeatures()
def data_algo_change(self, event):
print("Change algorithm.")
self.canvas.delete("all")
self.canvas.create_image(0, 0, image=self.world_image, anchor="nw")
self.apply_algorithm()
# self.draw_circles()
def create_widgets(self):
# Top widgets
self.frame = tk.Frame(self, self.parent)
self.frame.grid(column=0, row=0, sticky="w")
# Add algo timer
self.timerlabel = tk.Label(self.frame, text="Timer...", fg="red")
self.timerlabel.grid(column=2, row=1, sticky=tk.W + tk.E)
self.timer_running_label = tk.Label(
self.frame, text="Timer not running", bg="red", fg="white"
)
self.timer_running_label.grid(column=2, row=0, sticky=tk.W + tk.E)
# Add cost/objective value display
self.objectivelabel = tk.Label(self.frame, text="Objective...", fg="red")
self.objectivelabel.grid(column=3, row=1, sticky=tk.W + tk.E)
self.objective_running_label = tk.Label(
self.frame, text="No objective", bg="red", fg="white"
)
self.objective_running_label.grid(column=3, row=0, sticky=tk.W + tk.E)
# Simply display all objectives there are
self.objective_list = tk.Listbox(self.frame)
self.olist_scrollbar = tk.Scrollbar(self.frame)
self.objective_list.config(
yscrollcommand=self.olist_scrollbar.set,
relief=tk.SUNKEN,
border=2,
height=3,
width=115,
)
self.olist_scrollbar.config(command=self.objective_list.yview)
self.olist_scrollbar.grid(row=0, column=5, sticky="ns", rowspan=3)
self.objective_list.grid(row=0, column=4, sticky="w", rowspan=3)
self.objective_list.insert(
tk.END, "covered | minVis(r)/minDist | minVis(a)/minDistAvg | min1Gl/maxDistAvg | avgRel | abs%"
)
self.objective_list.insert(tk.END, "--- no run, yet ---")
# Add canvas
self.canvas = tk.Canvas(self, bg="white", width=1800, height=900)
self.canvas.grid(column=0, row=1, sticky="nsew")
# Input data
self.datalabel = tk.Label(self.frame, text="Choose input data: ")
self.datalabel.grid(column=0, row=0)
self.data = ttk.Combobox(self.frame, width=50)
# Append all available covid data
for i in range(0,11):
cl.dates_list[len(cl.dates_list)-i-1]="random {}".format(10-i)
self.data["values"] = tuple(cl.dates_list)
self.data.current(193) # 193 is a good dataset
print(self.data.current())
self.data.grid(column=1, row=0)
self.data.bind("<<ComboboxSelected>>", self.data_algo_change)
self.data.bind("<<Configure>>", on_combo_configure)
# Algorithm
self.algolabel = tk.Label(self.frame, text="Choose algorithm :")
self.algolabel.grid(column=0, row=1)
self.algorithm = ttk.Combobox(self.frame, width=50)
self.algorithm["values"] = (
"centered disks | random", # 0
"centered disks | LeftToRight", # 1
"centered disks | RightToLeft", # 2
"centered disks | Painter", # 3
"centered disks | MinMin-Stacking (abs)", # 4
"centered disks | MinMin-Stacking (rel)", # 5
"centered disks | MinSum-Stacking (abs)", # 6
"centered disks | MinSum-Stacking (rel)", # 7
"hawaiian disks | random", # 8
"hawaiian disks | LeftToRight", # 9
"hawaiian disks | RightToLeft", # 10
"hawaiian disks | Painter", # 11
"hawaiian disks | our Stacking", # 12
"pie charts | random", # 13
"pie charts | PainterRandom", # 14
"pie charts | RightToLeft", # 15
"pie charts | Painter", # 16
"pie charts | our Stacking", # 17
"squares | Painter+heuristic", # 18
"squares | Painter+random rotations", # 19
"squares | random Stacking+heuristic rotations", # 20
"squares | random Stacking+random rotations", # 21
"squares | our Stacking", # 22
)
self.algorithm.current(0)
self.algorithm.grid(column=1, row=1)
self.algorithm.bind("<<ComboboxSelected>>", self.data_algo_change)
self.algorithm.bind("<<Configure>>", on_combo_configure)
# TODO: split this into {initialize, flush}
def initialize_data(self):
self._maps = {}
self.circles = []
self.pie_piece_sets = {}
self.pies = []
# Geometry by background
self.world_image = tk.PhotoImage(file=r"assets/test4.png")
self.canvas.create_image(0, 0, image=self.world_image, anchor="nw")
self.screen_height = self.world_image.height()
self.screen_width = self.world_image.width()
logging.info(self.screen_height)
logging.info(self.screen_width)
#prepares the data for the proportional symbols
def prepare_data(self):
#calculates coordinates from latitiude and longitude
def latLongToPoint(lat, long, h, w):
"""Return (x,y) for lat, long inside a box."""
lat = -lat + 90
long = long + 180 # längengerade oben unten
y = lat / 180
x = long / 360
x = int(x * w)
y = int(y * h)
return x, y
#changes the dataframe structure to a list
def changeStructureFromPanda(df):
myData = []
for lat, lon, conf, dead, rec in zip(
df["latitude"],
df["longitude"],
df["confirmed_cases"],
df["deaths"],
df["recovered"],
):
if conf > 0 and dead > 0 and rec > 0:
tmp = [0, 0, lat, lon, conf + 1, dead + 1, 1 + rec]
myData.append(tmp)
return myData
#for a given data point generate a mosaic square
def createOneSquare(size, case, heightOfImage, widthOfImage):
square = []
x, y = latLongToPoint(case[2], case[3], heightOfImage, widthOfImage)
# corners and center of the square
center = [y, x]
x1 = [y + size, x - size]
x2 = [y + size, x + size]
x3 = [y - size, x + size]
# special points and their represented "type"
x4 = [y - size, x - size]
x5 = [0, 0, " "]
x6 = [0, 0, " "]
last = [" "]
# data
allCases = case[4]
dead = case[5]
rec = case[6]
rest = case[4] - dead - rec
# checks which small square corresponds to which "type"
if dead >= rec and dead >= rest:
perc = dead / allCases
x5[0] = x1[0] + (x2[0] - x1[0]) * perc
x5[1] = x1[1] + (x2[1] - x1[1]) * perc
x5[2] = "dead"
if rec > rest:
perc = rec / (rec + rest)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "rec"
last = "rest"
else:
perc = rest / (rec + rest)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "rest"
last = "rec"
square.append(x1)
square.append(x2)
square.append(x3)
square.append(x4)
square.append(x5)
square.append(x6)
square.append(center)
square.append(last)
return square
if rec >= dead and rec >= rest:
perc = rec / allCases
x5[0] = x1[0] + (x2[0] - x1[0]) * perc
x5[1] = x1[1] + (x2[1] - x1[1]) * perc
x5[2] = "rec"
if rest > dead:
perc = rest / (rest + dead)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "rest"
last = "dead"
else:
perc = dead / (rest + dead)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "dead"
last = "rest"
square.append(x1)
square.append(x2)
square.append(x3)
square.append(x4)
square.append(x5)
square.append(x6)
square.append(center)
square.append(last)
return square
if rest >= dead and rest >= rec:
perc = rest / allCases
x5[0] = x1[0] + (x2[0] - x1[0]) * perc
x5[1] = x1[1] + (x2[1] - x1[1]) * perc
x5[2] = "rest"
if rec > dead:
perc = rec / (rec + dead)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "rec"
last = "dead"
else:
perc = dead / (rec + dead)
x6[0] = x2[0] + (x3[0] - x2[0]) * perc
x6[1] = x2[1] + (x3[1] - x2[1]) * perc
x6[2] = "dead"
last = "rec"
square.append(x1)
square.append(x2)
square.append(x3)
square.append(x4)
square.append(x5)
square.append(x6)
square.append(center)
square.append(last)
return square
#generates some random data which has some nice properties
def generateRandomData(numberOfCircles,maxRadius):
circles=[]
pies=[]
piePieces=[]
squares=[]
for i in range(0,numberOfCircles):
randomRadius=np.random.randint(maxRadius/3, maxRadius)
x=np.random.randint(0.1*self.screen_height,0.9*self.screen_height)
y=np.random.randint(0.33* self.screen_width,0.66* self.screen_width)
#appending everything
tmp=[x,y,randomRadius,2*randomRadius/3,4*randomRadius/9]
circles.append(tmp)
pies.append([int(x), int(y), int(randomRadius)])
piePieces.append([2, 4])
tmpSquare = createOneSquare(
randomRadius,
[0,0,np.random.randint(-45,45),np.random.randint(-45,45),randomRadius,randomRadius/4,2*randomRadius/4],
self.screen_height, self.screen_width
)
squares.append(tmpSquare)
print("generateRandomData:")
print(f"Number of pies: {len(pies)}")
print(f"Number of circles: {len(circles)}")
return circles,pies,piePieces,squares
def generateGeomData(myData, index):
# transport current values from config singleton
maximalSize = self.controller.symbolic_config.maximalSize
scalingFactor = self.controller.symbolic_config.scalingFactor
lowerBoundCases = self.controller.symbolic_config.lowerBoundCases
for case in list(my_data):
if case[4] < lowerBoundCases:
my_data.remove(case)
valueList = []
for case in list(my_data):
valueList.append(case[4])
valueList = sorted(valueList, reverse=True)
# sometimes use the 4th biggest confirmed value for scaling because of USA INDIA BRASIL
if len(valueList) == 1:
factor = valueList[0]
if len(valueList) == 0:
factor = 1
if len(valueList) <= 50 and len(valueList) > 1:
factor = valueList[1]
if len(valueList) > 50:
factor = valueList[3]
multiplicativeconstant = maximalSize / np.log(1 + scalingFactor)
circles = []
pies = []
piePieces = []
squares = []
# generating circles,pies and squares
for case in myData:
lat = case[2]
long = case[3]
x, y = latLongToPoint(lat, long, self.screen_height, self.screen_width)
# making sure data makes sense
if case[4] < case[6]:
continue
if case[4] == 0:
conf = 1
else:
conf = case[4]
if case[5] == 0 or math.isnan(case[5]):
dead = 1
else:
dead = case[5]
if case[6] == 0 or math.isnan(case[6]):
rec = 1
else:
rec = case[6]
# nestedCircles
confAdjusted = multiplicativeconstant * np.log(
1 + scalingFactor * conf / factor
)
deadAdjusted = multiplicativeconstant * np.log(
1 + scalingFactor / 2 * dead / factor
)
recAdjusted = multiplicativeconstant * np.log(
1 + scalingFactor / 2 * (rec + dead) / factor
)
r = confAdjusted
rprime2 = deadAdjusted
rprime1 = recAdjusted
if rprime2 < 1 or rprime1 < 1 or r < 1:
r = r + 1
rprime2 = rprime2 + 1
rprime1 = rprime1 + 1
circles.append([int(y), int(x), int(r), int(rprime1), int(rprime2)])
# pies
pies.append([int(y), int(x), int(r)])
p1 = (case[5] / case[4]) * 2 * np.pi
p2 = (((case[6] / case[4])) * 2 * np.pi) + p1
piePieces.append([p1, p2])
# squares
tmpSquare = createOneSquare(
r, case, self.screen_height, self.screen_width
)
squares.append(tmpSquare)
if len(circles) == 0:
print(
f"Data quality issues: Circles array is empty for dataset no. {index} ..."
)
return
print("generateGeomData:")
print(f"Number of pies: {len(pies)}")
print(f"Number of circles: {len(circles)}")
self.data_sets[index] = circles
self.pie_piece_sets[index] = piePieces
self.pie_sets[index] = pies
self.square_sets[index] = squares
# structure: loc,loc,lat,long,conf,dead,recovered
# Prepare npy or create circles
for i in range(3):
# flush previous set of circles
my_data = []
cur_data_set_idx = len(self.data_sets)
# append downloaded datasets
for _, df in cl.cases_by_date.items():
# generate geomData
my_data = changeStructureFromPanda(df)
generateGeomData(my_data, cur_data_set_idx)
cur_data_set_idx = len(self.data_sets)
for k in range(0,11):
#generate random data
circles,pies,piePieces,squares = generateRandomData(60, 50)
self.data_sets[cur_data_set_idx]= circles
self.pie_piece_sets[cur_data_set_idx] = piePieces
self.pie_sets[cur_data_set_idx] = pies
self.square_sets[cur_data_set_idx] = squares
cur_data_set_idx=cur_data_set_idx+1
class PaintingProgramPage(tk.Frame):
"""A frame to demonstrate the custom TK functions."""
def __init__(self, parent, controller):
super().__init__(parent)
self.parent = parent
self.controller = controller
self.old_x = None
self.old_y = None
self.create_widgets()
# Single circle
self.canvas.create_circle(150, 40, 20, fill="#bbb", outline="")
# Arcs
self.canvas.create_circle(100, 120, 50, fill="blue", outline="#DDD", width=4)
self.canvas.create_circle_arc(
100, 120, 48, fill="green", outline="", start=45, end=140
)
self.canvas.create_circle_arc(
100, 120, 48, fill="green", outline="", start=275, end=305
)
self.canvas.create_circle_arc(
100,
120,
45,
style="arc",
outline="white",
width=6,
start=270 - 25,
end=270 + 25,
)
# Example for rectangle
# args: (x1, y1, x2, y2, **kwargs)
self.canvas.create_rectangle(350, 200, 400, 250, fill="red")
def create_widgets(self):
self.canvas = tk.Canvas(self, bg="white")
self.canvas.pack(side="top", fill="both", expand=True)
self.canvas.bind("<B1-Motion>", self.paint)
self.canvas.bind("<ButtonRelease-1>", self.reset)
self.symbolic_button = tk.Button(
self,
text="Go to Symbolic Maps Page",
command=lambda: self.controller.show_frame(SymbolMapsPage),
)
self.symbolic_button.pack(side="bottom")
def paint(self, event):
if self.old_x and self.old_y:
self.canvas.create_line(
self.old_x,
self.old_y,
event.x,
event.y,
width=2,
fill="black",
capstyle=tk.ROUND,
smooth=tk.TRUE,
splinesteps=36,
)
self.old_x = event.x
self.old_y = event.y
def reset(self, event):
self.old_x, self.old_y = None, None
class MatplotlibPage(tk.Frame):
"""Test the matplotlib instance here in case it is needed."""
def __init__(self, parent, controller):
super().__init__(parent)
self.parent = parent
self.controller = controller
self.create_widgets()
def create_widgets(self):
self.label = tk.Label(self, text="Matplotlib... plotting a simple data series")
self.label.pack(side="top")
self.fig = Figure(figsize=(5, 5), dpi=100)
self.subfig = self.fig.add_subplot(111)
self.subfig.plot([1, 2, 3, 4, 5, 6], [1, 3, 2, 5, 3, 6])
self.canvas = FigureCanvasTkAgg(self.fig, self)
self.canvas.draw()
self.toolbar = NavigationToolbar2Tk(self.canvas, self)
self.toolbar.update()
# self.canvas._tkcanvas.pack(side="top", fill="both", expand=True)
class SettingsPage(tk.Frame):
"""A frame to configure the whole project using a singleton."""
def __init__(self, parent, controller):
super().__init__(parent)
self.parent = parent
self.controller = controller
self.create_widgets()
def create_widgets(self):
# Title
self.title_label = tk.Label(
self,
text="Settings of the current algorithms",
)
self.title_label.grid(column=0, row=0, sticky=tk.W + tk.E)
# Settings
self.frame = tk.Frame(self)
self.frame.grid(column=0, row=1, sticky=tk.W + tk.E)
self.maximalSize_label = tk.Label(self.frame, text="Maximal size:")
self.maximalSize_label.grid(column=0, row=1, sticky=tk.W)
self.maximalSize_entry = tk.Entry(self.frame, show=None)
self.maximalSize_entry.grid(column=1, row=1, sticky=tk.W)
self.maximalSize_entry.delete(0, tk.END)
self.maximalSize_entry.insert(
0, str(self.controller.symbolic_config.maximalSize)
)
self.scalingFactor_label = tk.Label(self.frame, text="Scaling factor:")
self.scalingFactor_label.grid(column=0, row=2, sticky=tk.W)
self.scalingFactor_entry = tk.Entry(self.frame, show=None)
self.scalingFactor_entry.grid(column=1, row=2, sticky=tk.W)
self.scalingFactor_entry.delete(0, tk.END)
self.scalingFactor_entry.insert(
0, str(self.controller.symbolic_config.scalingFactor)
)
self.lowerBoundCases_label = tk.Label(self.frame, text="Lower bound cases:")
self.lowerBoundCases_label.grid(column=0, row=3, sticky=tk.W)
self.lowerBoundCases_entry = tk.Entry(self.frame, show=None)
self.lowerBoundCases_entry.grid(column=1, row=3, sticky=tk.W)
self.lowerBoundCases_entry.delete(0, tk.END)
self.lowerBoundCases_entry.insert(
0, str(self.controller.symbolic_config.lowerBoundCases)
)
self.separator = tk.Label(self.frame, text="")
self.separator.grid(column=0, row=4, sticky=tk.W)
self.symbolic_button = tk.Button(
self,
text="Save & show first Symbolic Maps page",
command=lambda: self.save_and_to_symbolic_maps(),
)
self.symbolic_button.grid(column=0, row=2, sticky=tk.W + tk.E)
def save_and_to_symbolic_maps(self):
self.controller.symbolic_config.maximalSize = int(self.maximalSize_entry.get())
self.controller.symbolic_config.scalingFactor = int(
self.scalingFactor_entry.get()
)
self.controller.symbolic_config.lowerBoundCases = int(
self.lowerBoundCases_entry.get()
)
self.controller.frames[SymbolMapsPage].flush_everything()
self.controller.frames[SymbolMapsPage].initialize_data()
self.controller.frames[SymbolMapsPage].prepare_data()
self.controller.show_frame(SymbolMapsPage)
self.controller.frames[SymbolMapsPage].data_algo_change(None)
class AboutPage(tk.Frame):
def __init__(self, parent, controller):
super().__init__(parent)
self.parent = parent
self.controller = controller
self.create_widgets()
def create_widgets(self):
self.label = tk.Label(
self,
text="Some info about the algos, complexity, us, maybe link to paper...",
)
self.label.pack(side="top")
self.symbolic_button = tk.Button(
self,
text="Go to Symbolic Maps Page",
command=lambda: self.controller.show_frame(SymbolMapsPage),
)
self.symbolic_button.pack(side="bottom")
def main():
"""The main function of the geomlab."""
# Create application
app = GeomLabApp()
# Run application
app.mainloop()
<file_sep>/_arch/StackingOrderAlgorithms/StackingorderAlgorithms.py
# -*- coding: utf-8 -*-
"""
Created on Sat May 9 13:25:34 2020
@author: Philip
"""
from PIL import Image
import numpy as np
import time
import cv2
import math
import random
from PIL import Image, ImageDraw
color3PIL = "rgb(100, 100, 100)" # grau
color2PIL = "rgb(249, 102, 94)" # rot
color1PIL = "rgb(254, 201, 201)" # blau
color4PIL = "rgb(200, 239, 245)" # grün
color3 = [100, 100, 100] # grau
color2 = [249, 102, 94] # rot
color1 = [254, 201, 201] # blau
color4 = [200, 239, 245] # grün
def latLongToPoint(lat, long, h, w):
lat = -lat + 90
long = long + 180 # längengerade oben unten
y = lat / 180
x = long / 360
x = int(x * w)
y = int(y * h)
return x, y
def calculatePointOnCircle(c, angle):
cosangle = np.cos(angle)
sinangle = np.sin(angle)
return cosangle * c[2] + c[0], sinangle * c[2] + c[1]
#############################################################################
###############caculating of the occluded circumference of a Circle##########
###################given the circles which lie above it######################
#############################################################################
# calculates Intersection of to circles (if they exist!!) p,q=(x,y,r)
def calcIntersectionPoints(p, q):
int1 = np.array([0.0, 0.0])
int2 = np.array([0.0, 0.0])
d = np.sqrt((p[0] - q[0]) * (p[0] - q[0]) + (p[1] - q[1]) * (p[1] - q[1]))
ex = (q[0] - p[0]) / d
ey = (q[1] - p[1]) / d
x = (p[2] * p[2] - q[2] * q[2] + d * d) / (2 * d)
y = np.sqrt(p[2] * p[2] - x * x)
int1[0] = p[0] + x * ex - y * ey
int1[1] = p[1] + x * ey + y * ex
int2[0] = p[0] + x * ex + y * ey
int2[1] = p[1] + x * ey - y * ex
return int1, int2
# calculates Intersection of to circles and returns relative state (one inside the other
# or intersecting or far apart
def calculateCircleIntersection(p, q):
d = np.sqrt((p[0] - q[0]) * (p[0] - q[0]) + (p[1] - q[1]) * (p[1] - q[1]))
if d > (p[2] + q[2]):
return None, None, 3
if d + q[2] < p[2]:
return None, None, 2
if d == 0:
return None, None, 4
if d + p[2] < q[2]:
return None, None, 1
x, y = calcIntersectionPoints(p, q)
return x, y, 0
# calculates the relative angle of to points on a circle
def calculateRelativeAngle(p, q):
x = q[0] - p[0]
y = q[1] - p[1]
angle = np.arctan2(y, x)
if angle < 0:
return 2 * np.pi + angle
return angle
# given to points on a circle calculate the intervall which lies between them
def calculateSingleCoverInterval(p, q):
p1, p2, check = calculateCircleIntersection(p, q)
if check == 0:
a1 = calculateRelativeAngle(p, p1)
a2 = calculateRelativeAngle(p, p2)
aMiddle = calculateRelativeAngle(p, q)
if (a1 < aMiddle and aMiddle < a2) or (aMiddle < a2 and a2 < a1):
return a1, a2
else:
return a2, a1, True
if check == 1:
return 0, 2 * np.pi, True
if check == 4:
return 0, 2 * np.pi, True
if check == 2 or check == 3:
return None, None, False
# test if a circle is completly covered by the intervals I
def testCompletlyCovered(I):
a0 = 2 * np.pi + I[0][0]
x = I[0][1]
i = 0
while x < a0:
if I[i][1] < I[i][0]:
I[i][1] = I[i][1] + 2 * np.pi
if I[i][0] < x:
x = np.max([x, I[i][1]])
else:
return False
i = i + 1
if i == len(I):
if x >= a0:
return True
else:
return False
return True
# if the interval is not completely covered there exist a point relative to which
# all intervalls lie in [0,2pi]
def findStartingPoint(I):
c = 0
min = 0
minindex = 0
for i in range(0, len(I)):
if I[i][1] == 0:
c = c + 1
else:
c = c - 1
if c == min:
minindex = i
else:
if c < min:
min = c
minindex = i
if min == 0:
return 0
if minindex == len(I) - 1:
return 0
else:
return minindex + 1
# calculates the maximal non intersecting intervals covered by the Interval
# given the shift that all intervals lie in [0,2pi]
def calculateMaxIntervalsWithShift(I, shift):
n = len(I)
c = 0
resultArray = []
for i in range(0, n):
ind = (shift + i) % n
if c == 0:
resultArray.append(I[ind][0])
c = c + 1
else:
if I[ind][1] == 0:
c = c + 1
else:
c = c - 1
if c == 0:
resultArray.append(I[ind][0])
return resultArray
# given disjoint intervals on a circle calculates the covered Circumference
def calcCirc(Arr):
result = 0
for i in range(0, int(len(Arr) / 2)):
x = Arr[2 * i + 1] - Arr[2 * i]
if x < 0:
x = x + 2 * np.pi
result = result + x
return result
# given a circle and all circles N which lie above it calculates the covered circumference
def calculateCoveredCircumference(c, N):
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
a, b, bo = calculateSingleCoverInterval(c, n)
if bo == True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return 0
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
return calcCirc(CoverArray)
else:
return 2 * np.pi
#############################################################################
################Different utilitys and costs for circles####################
#############################################################################
def calculateAbsoluteBoundaryUtility(circle, Neighbours):
x = 2 * np.pi - calculateCoveredCircumference(circle, Neighbours)
return x * circle[2]
def calculateRelativeBoundaryUtility(circle, Neighbours):
x = 2 * np.pi - calculateCoveredCircumference(circle, Neighbours)
return x
#############################################################################
################Pie Charts####################
#############################################################################
# given a circle and some angles for the deviding lines of the pies and the
# circles that lie above the circle calculates all disjoint Intervals in which
# the first deviding line can be positioned
def caculateFeasibleIntervall(c, piePiecesC, N):
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
a, b, bo = calculateSingleCoverInterval(c, n)
if bo == True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return [[0, 2 * np.pi]]
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
coverArray2D = []
for i in range(0, int((len(CoverArray) / 2))):
coverArray2D.append([CoverArray[2 * i], CoverArray[2 * i + 1]])
distances = []
for j in range(0, len(piePiecesC)):
distances.append(piePiecesC[j])
for d in distances:
for intervall in coverArray2D:
a = intervall[0] - d # -0.1
if a < 0:
a = a + 2 * np.pi
b = intervall[1] - d # +0.1
if b < 0:
b = b + 2 * np.pi
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
result = []
result.append([CoverArray[len(CoverArray) - 1], CoverArray[0]])
for i in range(0, int((len(CoverArray) / 2) - 1)):
result.append([CoverArray[2 * i + 1], CoverArray[2 * i + 2]])
return result
else:
return None
else:
return None
# calculates the angle of the first deviding line given the feasible intervals
# output (angle, length of visibility thingy)
def calculateAngle(intervals):
val = -1
result = 0
if intervals == None:
return None, None
for i in intervals:
a = i[0]
b = i[1]
if a > b:
b = b + 2 * np.pi
length = np.absolute(b - a)
if length > val:
val = length
result = a + length / 2
return result, val
# given a circle and devidinglines and the circles that lie above it
# calculates the postion and value of the deviding line
def caculateOneAnglePie(c, piePiecesC, N):
tmp = caculateFeasibleIntervall(c, piePiecesC, N)
if tmp == None:
return None, None
else:
angle, value = calculateAngle(tmp)
if angle == None:
return None, None
else:
return angle, value
# calculates for an arrangement of Pies the pie which should be the lowest one
# in the stacking returns the index of the circle in the list and the angle
# of the first devidingline
def calculateLowestPie(circles, piePieces):
locPiePieces = []
for p in piePieces:
locPiePieces.append([p[0], p[1]])
hasFound = False
resultIndex = 0
resultMax = -1
resultAngle = 0
while hasFound == False:
for i in range(0, len(circles)):
tmpC = circles[i]
tmpPieces = locPiePieces[i]
tmpN = circles[:i] + circles[i + 1 :]
angle, value = caculateOneAnglePie(tmpC, tmpPieces, tmpN)
if not angle == None:
hasFound = True
if value * tmpC[2] > resultMax:
resultAngle = angle
resultMax = value * tmpC[2]
resultIndex = i
if hasFound == True:
return resultIndex, resultAngle
if len(locPiePieces[0]) == 0:
break
if len(locPiePieces) == 0:
break
for p in locPiePieces:
p.pop(len(p) - 1)
return 0, 0
#############################################################################
##########################Stacking algorithms################################
#############################################################################
# calculates best pie stacking
# input circles: [[x,y,r]...] piePieces [[p1,p2,...]...] 0 is always a deviding line is
# every circle has to have at least 1 more deviding line!
# output:
# resultOrder new Stackingorder
# resultPieces the pieces in the same order as the circles
# for every pie the angle of the 0 devidingline
def pieStacking(circles, piePieces):
resultAngles = []
resultOrder = []
resultPieces = []
localCircles = circles.copy()
localPiePieces = piePieces.copy()
while len(localCircles) > 0:
ind, angle = calculateLowestPie(localCircles, localPiePieces)
tmpCircle = localCircles.pop(ind)
tmpPieces = localPiePieces.pop(ind)
resultAngles.append(angle)
resultOrder.append(tmpCircle)
resultPieces.append(tmpPieces)
return resultOrder, resultPieces, resultAngles
# given some circles of the form (x,y,r1,r2,...) where r1>r2>...
# returns a hawaiianStacking
# form: for each circle with subcircles there are now multiple circles
# output has form [[x1,y1,r1],[x2,y2,r2],....,[x1',y1',r1'],[x2',y2'.r2']...]
def hawaiianStacking(circles):
local = circles.copy()
stacking = []
stackingAllCircles = []
for i in range(0, len(circles)):
index, value = calculateLowestCircleMaxMin(local, "absolute")
tmp = local.pop(index)
stacking.append(tmp)
for i in range(0, len(stacking)):
N = stacking[i + 1 :]
visbleInt = caculateVisibleIntervall(stacking[i], N)
maximum = -1
angle = 0
for interval in visbleInt:
if interval[1] < interval[0]:
interval[1] = interval[1] + 2 * np.pi
tmp = np.absolute(interval[1] - interval[0])
if tmp > maximum:
maximum = tmp
angle = interval[0] + (interval[1] - interval[0]) / 2
onCircleX, onCircleY = calculatePointOnCircle(
[int(stacking[i][0]), int(stacking[i][1]), int(stacking[i][2])], angle
)
deltaX = stacking[i][0] - onCircleX
deltaY = stacking[i][1] - onCircleY
deltaX = deltaX / stacking[i][2] # (np.sqrt(deltaX*deltaX +deltaY*deltaY))
deltaY = deltaY / stacking[i][2] # (np.sqrt(deltaX*deltaX +deltaY*deltaY))
for j in range(2, len(stacking[i])):
offSet = 0
x0 = onCircleX + deltaX * (stacking[i][j] - offSet)
y0 = onCircleY + deltaY * (stacking[i][j] - offSet)
r0 = stacking[i][j]
stackingAllCircles.append([x0, y0, r0])
return stackingAllCircles
# [for hawaiian] calculates the visible parts of a circle und the circles N
def caculateVisibleIntervall(c, N):
CoverIntervals1D = []
CoverIntervals2D = []
for n in N:
a, b, bo = calculateSingleCoverInterval(c, n)
if bo == True:
CoverIntervals2D.append([a, b])
CoverIntervals1D.append([a, 0])
CoverIntervals1D.append([b, 1])
CoverIntervals1D.sort()
CoverIntervals2D.sort()
if len(CoverIntervals1D) == 0:
return [[0, 0]]
if not testCompletlyCovered(CoverIntervals2D):
shift = findStartingPoint(CoverIntervals1D)
CoverArray = calculateMaxIntervalsWithShift(CoverIntervals1D, shift)
visibleArray2D = []
visibleArray2D.append([CoverArray[len(CoverArray) - 1], CoverArray[0]])
for i in range(0, int((len(CoverArray) / 2)) - 1):
visibleArray2D.append([CoverArray[2 * i + 1], CoverArray[2 * i + 2]])
return visibleArray2D
# calculates the lowest circle (for circles without subcircles)
# for the cost Max the Min of visible area
def calculateLowestCircleMaxMin(Circles, mode):
maximum = -1
for i in range(0, len(Circles)):
tmp = Circles[:i] + Circles[i + 1 :]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(Circles[i], tmp)
else:
tmpValue = calculateRelativeBoundaryUtility(Circles[i], tmp)
if tmpValue > maximum:
index = i
maximum = tmpValue
return index, maximum
# calculates the lowest circle (for circles with subcircles)
# for the cost: Max the Min of the minimal subcircle of visible area
# mode:"absolute" or "relative"
def calculateLowestCircleMaxMinMinK(Circles, mode):
maximum = -1
maximumNonZero = -1
for i in range(0, len(Circles)):
tmp = Circles[:i] + Circles[i + 1 :]
tmp = np.array(tmp)
if not len(tmp) == 0:
tmp = tmp[:, :3]
tmpMin = 100000000000
tmpMinNonZero = 1000000000
for k in range(0, len(Circles[0]) - 2):
tmpCircle = [Circles[i][0], Circles[i][1], Circles[i][2 + k]]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(tmpCircle, tmp)
else:
tmpValue = calculateRelativeBoundaryUtility(tmpCircle, tmp)
if tmpValue < tmpMin:
tmpMin = tmpValue
if tmpValue < tmpMin and tmpValue > 0:
tmpMinNonZero = tmpValue
if tmpMinNonZero > maximumNonZero:
indexNonZero = i
maximumNonZero = tmpMinNonZero
if tmpMin > maximum:
index = i
maximum = tmpMin
if maximum == 0 and maximumNonZero > 0 and mode == "absolute":
return indexNonZero, maximumNonZero
return index, maximum
# calculates the lowest circle (for circles with subcircles)
# for the cost: Max the Min of the sum of the subcircle of visible area
# mode:"absolute" or "relative"
def calculateLowestCircleMaxMinSumK(Circles, mode):
maximum = -1
for i in range(0, len(Circles)):
tmpSum = 0
tmp = Circles[:i] + Circles[i + 1 :]
tmp = np.array(tmp)
if not len(tmp) == 0:
tmp = tmp[:, :3]
for k in range(0, len(Circles[0]) - 2):
tmpCircle = [Circles[i][0], Circles[i][1], Circles[i][2 + k]]
if mode == "absolute":
tmpValue = calculateAbsoluteBoundaryUtility(tmpCircle, tmp)
tmpSum = tmpSum + tmpValue
if mode == "relative":
tmpValue = calculateRelativeBoundaryUtility(tmpCircle, tmp) #!!!!!!!!
tmpSum = tmpSum + tmpValue
if mode == "weighted":
tmpValue = calculateAbsoluteBoundaryUtility(tmpCircle, tmp)
tmpSum = tmpSum + (1 / (((len(Circles[0]) - 1) - k) ** 2) * tmpValue)
if tmpSum > maximum:
index = i
maximum = tmpSum
return index, maximum
# input: circles nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# output: nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# maximizes minimum of minimal subcircles
def maxMinMinKStacking(circles, mode):
local = circles.copy()
solution = []
for i in range(0, len(circles)):
index, value = calculateLowestCircleMaxMinMinK(local, mode)
tmp = local.pop(index)
solution.append(tmp)
return solution
# input: circles nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# output: nested-List [[x,y,r1,r2,r3....][x',y',r1',....],...] r1>r2>...
# maximizes minimum of sum of the subcircles
def maxMinSumKStacking(circles, mode):
local = circles.copy()
solution = []
for i in range(0, len(circles)):
index, value = calculateLowestCircleMaxMinSumK(local, mode)
tmp = local.pop(index)
solution.append(tmp)
return solution
# painter only defined for circles without subcircles
def painterAlgorithm(circle):
local = circles.copy()
local.sort(key=lambda x: x[2], reverse=True)
return local
################################################################################
###############################draw functions ###############################
def drawPieSolution(circles, cPieces, angles, image):
for i in range(0, len(circles)):
tmpC = circles[i]
tmpPieces = cPieces[i]
tmpAngle = angles[i]
drawPie(tmpC, tmpPieces, tmpAngle, image)
# only for 4 colors
def drawPieSolution2(circles, cPieces, angles, image):
for i in range(0, len(circles)):
tmpC = circles[i]
tmpPieces = cPieces[i]
tmpAngle = angles[i]
drawPie2(tmpC, tmpPieces, tmpAngle, image)
def drawSolution(stacking, image):
numberOfRadi = len(stacking[0]) - 2
for i in range(0, len(stacking)):
tmp = stacking[i]
for k in range(0, numberOfRadi):
colorValue = 200 - (150 * ((k + 1) / numberOfRadi))
x = int(tmp[1])
y = int(tmp[0])
r = int(tmp[2 + k])
cv2.circle(
image,
(x, y),
r,
[colorValue, colorValue, colorValue],
thickness=-1,
lineType=8,
shift=0,
)
cv2.circle(
image,
(x, y),
r,
[colorValue - 19, colorValue - 19, colorValue - 19],
thickness=2,
lineType=8,
shift=0,
)
cv2.circle(
image,
(int(tmp[1]), int(tmp[0])),
int(tmp[2]),
[0, 0, 0],
thickness=2,
lineType=8,
shift=0,
)
def drawSolutionWeighted(stacking, image):
numberOfRadi = len(stacking[0]) - 2
for i in range(0, len(stacking)):
tmp = stacking[i]
for k in range(0, numberOfRadi):
colorValue = 200 - (150 * (k / numberOfRadi))
x = int(tmp[1])
y = int(tmp[0])
r = int(tmp[2 + k])
cv2.circle(
image,
(x, y),
r,
[colorValue, colorValue, colorValue],
thickness=-1,
lineType=8,
shift=0,
)
cv2.circle(
image,
(x, y),
r,
[colorValue - 10, colorValue - 10, colorValue - 10],
thickness=1,
lineType=8,
shift=0,
)
cv2.circle(
image,
(int(tmp[1]), int(tmp[0])),
int(tmp[2]),
[0, 0, 0],
thickness=1,
lineType=8,
shift=0,
)
# only for 4 colors
def drawing4H(stacking, image):
bo = False
j = 0
for i in range(0, len(stacking)):
tmp = stacking[i]
x = int(tmp[1])
y = int(tmp[0])
r = int(tmp[2])
if j == 0:
color = color1
if j == 1:
color = color2
if j == 2:
color = color3
j = -1
if j == 3:
color = color4
j = -1
bo = True
j = j + 1
cv2.circle(image, (x, y), r, color, thickness=-1, lineType=8, shift=0)
if j == 1:
cv2.circle(
image,
(x, y),
r,
[22 - 19, 22 - 19, 22 - 19],
thickness=2,
lineType=8,
shift=0,
)
bo = False
# only for 4 colors
def drawing4Normal(stacking, image):
for i in range(0, len(stacking)):
tmp = stacking[i]
x = int(tmp[1])
y = int(tmp[0])
r = int(tmp[2])
cv2.circle(image, (x, y), tmp[2], color1, thickness=-1, lineType=8, shift=0)
cv2.circle(image, (x, y), tmp[3], color2, thickness=-1, lineType=8, shift=0)
cv2.circle(image, (x, y), tmp[4], color3, thickness=-1, lineType=8, shift=0)
cv2.circle(image, (x, y), r, [0, 0, 0], thickness=2, lineType=8, shift=0)
# only for 4 colors
def drawing4Pie(circles, cPieces, angles, image):
for i in range(0, len(circles)):
tmpC = circles[i]
tmpPieces = cPieces[i]
tmpAngle = angles[i]
drawPie2(tmpC, tmpPieces, tmpAngle, image)
return image
def drawPie(circle, pieces, angle, image):
c = circle
cPieces = pieces
x = int(c[0])
y = int(c[1])
r = int(c[2])
colorValue = 100
cv2.circle(
data1,
(y, x),
r,
[colorValue, colorValue, colorValue],
thickness=-1,
lineType=8,
shift=0,
)
cv2.circle(
data1,
(y, x),
r,
[colorValue - 19, colorValue - 19, colorValue - 19],
thickness=2,
lineType=8,
shift=0,
)
x, y = calculatePointOnCircle(c, angle)
cv2.line(data1, (int(y), int(x)), (int(c[1]), int(c[0])), (0, 0, 255), thickness=2)
for p in cPieces:
x, y = calculatePointOnCircle(c, angle + p)
cv2.line(
data1, (int(y), int(x)), (int(c[1]), int(c[0])), (0, 255, 0), thickness=2
)
# only for 4 colors
def drawPie2(circle, pieces, angle, image):
img1 = ImageDraw.Draw(image)
x = circle[0]
y = circle[1]
r = circle[2]
shape = [y - r - 4, x - r - 4, y + r + 4, x + r + 4]
anglesClock = [0, 0, 0, 0, 0]
anglesClock[0] = -angle * 360 / (2 * np.pi) + 90
anglesClock[3] = 360 - angle * 360 / (2 * np.pi) + 90
anglesClock[1] = 360 - (pieces[1] + angle) * 360 / (2 * np.pi) + 90
anglesClock[2] = 360 - (pieces[0] + angle) * 360 / (2 * np.pi) + 90
img1.ellipse(shape, fill=(0, 0, 0))
shape = [y - r, x - r, y + r, x + r]
img1.pieslice(
shape, start=anglesClock[0], end=anglesClock[1], fill=color1PIL, outline="black"
)
img1.pieslice(
shape, start=anglesClock[1], end=anglesClock[2], fill=color2PIL, outline="black"
)
img1.pieslice(
shape, start=anglesClock[2], end=anglesClock[3], fill=color3PIL, outline="black"
)
###############################################################################
###################################Main########################################
###############################################################################
# Init some Images
start_time = time.time()
w, h = 1280, 720
data1 = np.zeros((h, w, 3), dtype=np.uint8)
data1[:, :, :] = 254
data1[500, 10, :] = 125
dataAll = np.zeros((1, w, 3), dtype=np.uint8)
dataAll[:, :, :] = 254
imgTest = Image.open(r"C:\Users\Mayer\Desktop\SS2020\LAB\Programme\test4.png")
circles = []
pies = []
piePieces = []
################################initialize Data###############################
##############################################################################
# structure: loc,loc,lat,long,conf,dead,recovered
myData1 = np.load("testdata.npy", allow_pickle=True)
data1 = cv2.imread("test4.png")
h = len(data1)
w = len(data1[0])
circles = []
pies = []
piePieces = []
myData = []
for case in myData1:
tmp = []
for slot in case:
tmp.append(slot)
myData.append(tmp)
for case in list(myData):
if case[4] < 5000:
myData.remove(case)
maximum = 1
for case in myData:
if case[4] < 1:
tmp = 1
else:
tmp = case[4]
if tmp > maximum:
maximumsecond = maximum
maximum = tmp
maximum2 = np.log(4 + case[4] * 100 / maximumsecond)
for case in myData:
lat = case[2]
long = case[3]
x, y = latLongToPoint(lat, long, h, w)
case[4] = case[4] + 5
case[5] = case[5] + 5
case[6] = case[6] + 5
if case[4] < case[6]:
continue
if case[4] == 0:
conf = 1
else:
conf = np.log(4 + case[4] * 100 / maximumsecond)
if case[5] == 0 or math.isnan(case[5]):
dead = 1
else:
dead = case[5]
if case[6] == 0 or math.isnan(case[6]):
rec = 1
else:
rec = case[6]
conf = 125 * conf / maximum2
dead = np.sqrt(conf * conf * (dead / case[4]))
rec = np.sqrt(conf * conf * (rec / case[4]) + dead * dead)
r = conf
rprime2 = dead
rprime1 = rec
rprime0 = 1
circles.append([int(y), int(x), int(r), int(rprime1), int(rprime2)])
pies.append([int(y), int(x), int(r)])
a0 = rprime0 * rprime0
a1 = rprime1 * rprime1
a2 = rprime2 * rprime2
a = r * r
p1 = (case[5] / case[4]) * 2 * np.pi
p2 = (((case[6] / case[4])) * 2 * np.pi) + p1
piePieces.append([p1, p2])
###############################################################################
########################doing some Algorithms and Drawing######################
print("started drawing")
drawing4Normal(maxMinSumKStacking(circles, "absolute"), data1)
cv2.putText(
data1,
"MaxMinSumK_absolute:",
(40, 40),
cv2.FONT_HERSHEY_SIMPLEX,
1,
(255, 0, 0, 255),
3,
)
data1[h - 1, :, :] = 0
data1[h - 2, :, :] = 0
data1[h - 3, :, :] = 0
dataAll = cv2.vconcat([data1])
img22 = Image.fromarray(data1, "RGB")
data1 = cv2.imread("test4.png")
drawing4H(hawaiianStacking(circles), data1)
cv2.putText(
data1, "Hawaiian:", (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0, 255), 3
)
data1[h - 1, :, :] = 0
data1[h - 2, :, :] = 0
data1[h - 3, :, :] = 0
dataAll = cv2.vconcat([dataAll, data1])
data1 = cv2.imread("test4.png")
circlesToDraw, piecesToDraw, anglesToDraw = pieStacking(pies, piePieces)
drawPieSolution2(circlesToDraw, piecesToDraw, anglesToDraw, imgTest)
imgTest.save("PieChartsTmp.png")
np_im3 = cv2.imread("PieChartsTmp.png")
np_im3 = cv2.cvtColor(np_im3, cv2.COLOR_BGR2RGB)
cv2.putText(
np_im3, "PieCharts:", (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0, 255), 3
)
np_im3[h - 1, :, :] = 0
np_im3[h - 2, :, :] = 0
np_im3[h - 3, :, :] = 0
dataAll = cv2.vconcat([dataAll, np_im3])
img4 = Image.fromarray(dataAll, "RGB")
img4.save("All.png")
img4.show()
| f2f888c37ed531c03cbed7218cd89b04b946c606 | [
"Markdown",
"TOML",
"Python",
"INI"
] | 9 | Python | rscircus/GeomLabCode | e3890e72443c642775e6b2cb0188d5d0672765ec | 1bf117b51aff5fc255c7e754e22aecfd0a9713dd |
refs/heads/master | <file_sep>#! /bin/bash
ip=$1
echo "1 client 1 way"
iperf3 -c $ip -t 30 | grep SUM
echo "1 client 1 way 128k"
iperf3 -c $ip -t 30 -w 128k | grep SUM
echo "30 client 1 way"
iperf3 -c $ip -t 30 -P 30 | grep SUM
echo "30 client 1 way 128k"
iperf3 -c $ip -t 30 -P 30 -w 128K | grep SUM
# -R
echo "1 client 1 way"
iperf3 -c $ip -t 30 -R | grep SUM
echo "1 client 1 way 128k"
iperf3 -c $ip -t 30 -w 128k -R | grep SUM
echo "30 client 1 way"
iperf3 -c $ip -t 30 -P 30 -R | grep SUM
echo "30 client 1 way 128k"
iperf3 -c $ip -t 30 -P 30 -w 128K -R | grep SUM
| 19585d18c454dcacd72745869c6c805f39398959 | [
"Shell"
] | 1 | Shell | Aiden128/firefighter | d4f15529390367b41b779a322ba20d2c52dcf772 | 21050f9145beb263c5491d4a5742c3be4ef0e827 |
refs/heads/master | <file_sep>this is used for Java script JS file.
| 519cead4aa6cce3140df47354f2ae5059e82f176 | [
"JavaScript"
] | 1 | JavaScript | yesubabup/TestRepo1 | 7e78363bb7919f4552744c458d50fce89b2e7f8d | 687b43e3f840b0a105e1046d4f78535ac9146fd7 |
refs/heads/master | <file_sep>import { Component, OnInit } from '@angular/core';
import { Location } from '@angular/common';
import { ChatService } from '../../services/chat.service';
import { IMessage } from '../../interfaces/message.interface';
import { AngularFireAuth } from 'angularfire2/auth';
@Component({
selector: 'app-chat',
templateUrl: './chat.component.html'
})
export class ChatComponent implements OnInit {
messages: IMessage[];
message: string;
user: any = {};
constructor(private chatService: ChatService, private afAuth: AngularFireAuth) { }
ngOnInit() {
this.chatService.allMessages().subscribe((messages: IMessage[]) => {
this.messages = messages.reverse();
});
this.afAuth.authState.subscribe(user => {
this.user.name = user.displayName;
this.user.uid = user.uid;
});
}
sendMessage(): void {
if (this.message.trim().length === 0) {
return;
}
this.chatService.addMessage(this.message, this.user);
this.message = '';
}
logout(): void {
this.afAuth.auth.signOut();
location.reload();
}
}
<file_sep>import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import { AngularFirestore, AngularFirestoreCollection } from 'angularfire2/firestore';
import { IMessage } from '../interfaces/message.interface';
@Injectable()
export class ChatService {
private itemsCollection: AngularFirestoreCollection<IMessage>;
constructor(private afs: AngularFirestore) { }
allMessages(): Observable<IMessage[]> {
this.itemsCollection = this.afs.collection<IMessage>('chats', ref =>
ref.orderBy('date', 'desc').limit(5)
);
return this.itemsCollection.valueChanges();
}
addMessage(message: string, user: any) {
let newMessage: IMessage = {
name: user.name,
message: message,
date: new Date().getTime(),
uid: user.uid
};
return this.itemsCollection.add(newMessage);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { AngularFireAuth } from 'angularfire2/auth';
import * as firebase from 'firebase/app';
@Component({
selector: 'app-login',
templateUrl: './login.component.html'
})
export class LoginComponent implements OnInit {
loggedIn: boolean = false;
constructor(private afAuth: AngularFireAuth) { }
ngOnInit(): void {
this.afAuth.authState.subscribe(user => {
if (user) this.loggedIn = true;
});
}
login(platform: string) {
if (platform === 'google') {
this.afAuth.auth.signInWithPopup(new firebase.auth.GoogleAuthProvider());
} else if (platform === 'twitter') {
this.afAuth.auth.signInWithPopup(new firebase.auth.TwitterAuthProvider());
}
}
}
| c6e1572ccc5fd204006cd3543a8f112b6665de5c | [
"TypeScript"
] | 3 | TypeScript | cristobaldominguez95/firechat | d51838341d3025bc78bd6db70678e46d8e2924d8 | ce764a102ca25da33373a67be05c37f1478923cc |
refs/heads/master | <repo_name>ivandevp/awesome-rank-api<file_sep>/models/campus.js
'use strict';
module.exports = function(sequelize, DataTypes) {
var Campus = sequelize.define('Campus', {
name: DataTypes.STRING
}, {
classMethods: {
associate: function(models) {
// associations can be defined here
}
}
});
return Campus;
};<file_sep>/seeders/20161204161058-answer-seed.js
'use strict';
var models = require('../models');
var qaHelper = function (questions) {
this.questions = questions;
};
qaHelper.prototype.getQuestionIdByName = function (name) {
var questionId = null;
this.questions.forEach(function (question) {
if (question.description === name) {
questionId = question.id;
}
});
return questionId;
};
module.exports = {
up: function (queryInterface, Sequelize) {
return models.Question.all()
.then(function (questions) {
var qa = new qaHelper(questions);
return queryInterface.bulkInsert('answers', [{
order: 1,
description: 'Nunca tiene un estado emocional regulado. Sus sentimientos predominantes son tristeza, ira, frustración.',
questionId: qa.getQuestionIdByName('Emotional status'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces tiene un estado emocional regulado. Sus sentimientos predominantes tristeza, ira, frustración.',
questionId: qa.getQuestionIdByName('Emotional status'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'A veces presenta un estado emocional regulado. Oscila entre un estado de optimismo y calma, a un estado de tristeza, ira, frustración.',
questionId: qa.getQuestionIdByName('Emotional status'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces presenta un estado emocional regulado en el que predomina una sensación de bienestar, optimismo y calma; tolerando la frustración y el estrés.',
questionId: qa.getQuestionIdByName('Emotional status'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'La mayoría de las veces presenta un estado emocional regulado en el que predomina una sensación de bienestar, optimismo y calma; tolerando la frustración y el estrés. Además, colabora con mantener este estado en sus compañeras.',
questionId: qa.getQuestionIdByName('Emotional status'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca recibe un reto con entusiasmo, sino que más bien se frustra y siente que la situación rebasa sus recursos.',
questionId: qa.getQuestionIdByName('Challenge taker'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces recibe un reto con entusiasmo, sino que más bien se frustra y siente que la situación rebasa sus recursos.',
questionId: qa.getQuestionIdByName('Challenge taker'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'A veces recibe un reto con entusiasmo, pero otras veces se frustra de modo anticipado.',
questionId: qa.getQuestionIdByName('Challenge taker'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces recibe un reto con entusiasmo, confiando en que tiene los recursos para superarlo con éxito.',
questionId: qa.getQuestionIdByName('Challenge taker'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre recibe un reto con entusiasmo, confiando en que tiene los recursos para superarlo con éxito.',
questionId: qa.getQuestionIdByName('Challenge taker'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'No es capaz de adaptarse a nuevas situaciones: cambios repentinos y nuevos equipos. No tolera la incertidumbre. Ante una falla no busca una solución.Presenta serias dificultades para adaptarse a situaciones nuevas: cambios repentinos y nuevos equipos. Le resulta complicado tolerar la incertidumbre y cuando falla se demora en proponer una solución.',
questionId: qa.getQuestionIdByName('Adaptability'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Presenta serias dificultades para adaptarse a situaciones nuevas: cambios repentinos y nuevos equipos. Le resulta complicado tolerar la incertidumbre y cuando falla se demora en proponer una solución.',
questionId: qa.getQuestionIdByName('Adaptability'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'A veces puede adaptarse a situaciones nuevas, pero no es un comportamiento constante.',
questionId: qa.getQuestionIdByName('Adaptability'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces puede adaptarse a situaciones nuevas: afrontando con optimismo cambios repentinos, nuevos equipos. Puede tolerar la incertidumbre y a veces propone soluciones creativas. Ante una falla, cambia de estrategia rápidamente.',
questionId: qa.getQuestionIdByName('Adaptability'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre se adapta a situaciones nuevas: afronta con optimismo cambios repentinos y nuevos equipos. Lidia con la incertidumbre proponiendo soluciones creativas. Ante una falla, cambia de estrategia rápidamente.',
questionId: qa.getQuestionIdByName('Adaptability'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Se le dificulta exponer sus ideas con claridad, lo que denota un vago entendimiento del proceso que ha realizado. Ante preguntas no sabe cómo responder. Su lenguaje no verbal denota inseguridad y no ayuda a transmitir su mensaje.',
questionId: qa.getQuestionIdByName('Thinking out loud'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Hace esfuerzos por exponer sus ideas con claridad, pero presenta serias dificultades para lograrlo. Ante preguntas, contesta de manera vaga y no logra explicar lo que se le consultó. Su lenguaje no verbal denota inseguridad y no ayuda a transmitir su mensaje.',
questionId: qa.getQuestionIdByName('Thinking out loud'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Logra exponer sus ideas con claridad con preparación previa, pero ante preguntas le cuesta responder de modo claro y no logra contestar lo que se le consultó. Su lenguaje no verbal denota poca seguridad.',
questionId: qa.getQuestionIdByName('Thinking out loud'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces logra expresar sus ideas con claridad. Ante preguntas, la mayoría de las veces responde con una explicación clara y en pocas ocasiones es necesario repreguntarle para conseguir la respuesta que se buscaba. Su lenguaje no verbal denota seguridad y ayuda a transmitir su mensaje.',
questionId: qa.getQuestionIdByName('Thinking out loud'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre expone sus ideas con claridad, logrando que los demás entiendan lo que quiere comunicar. Su lenguaje no verbal denota seguridad y ayuda a transmitir su mensaje (adecuado tono de voz y movimientos corporales expresivos). Ante preguntas, responde adecuadamente de modo que no es necesario repreguntarle.',
questionId: qa.getQuestionIdByName('Thinking out loud'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'No escucha con atención cuando otra alumna está hablando. Sea porque conversa con otra compañera o porque está distraída con otro elemento. No tiene buena actitud para recibir feedbacks.',
questionId: qa.getQuestionIdByName('Listening'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces escucha con atención cuando otra alumna está hablando. En varias ocasiones interrumpe sea porque conversa con otra compañera o porque está distraída con otro elemento. No acepta fácilmente las propuestas de otros, sino que se opone sin dar otras alternativas. No tiene buena actitud para recibir feedbacks.',
questionId: qa.getQuestionIdByName('Listening'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'En algunas ocasiones presta atención, pero en otras no escucha a sus compañeras. A veces valora las propuestas de otros. En ocasiones tiene buena actitud para recibir feedback.',
questionId: qa.getQuestionIdByName('Listening'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces escucha con atención cuando otra alumna está hablando. Valora las propuestas de sus compañeras, aunque no esta de acuerdo. Tiene una buena actitud para recibir feedback.',
questionId: qa.getQuestionIdByName('Listening'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre escucha con atención las ideas de sus compañeras. Valora la opinión de las demás y construye nuevas ideas rescatando los puntos de vista de otras personas, además de los suyos. Tiene una buena actitud para recibir feedback.',
questionId: qa.getQuestionIdByName('Listening'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca ofrece ideas para realizar el trabajo, ni propone sugerencias para su mejora. Sus intervenciones no son pertinentes ni contribuyen.',
questionId: qa.getQuestionIdByName('Participation'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces ofrece ideas para realizar el trabajo, pero nunca propone sugerencias para su mejora. Sus intervenciones no son pertinentes ni contribuyen.',
questionId: qa.getQuestionIdByName('Participation'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces ofrece ideas para realizar el trabajo, pero pocas veces para mejorarlo. En ocasiones sus intervenciones no son pertinentes ni contribuyen.',
questionId: qa.getQuestionIdByName('Participation'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'Casi siempre ofrece ideas para realizar el trabajo y para su mejora. La mayoría de las veces participa con ideas que aportan al desarrollo y discusión en el squad o en el salón. Sus intervenciones son pertinentes y contribuyen.',
questionId: qa.getQuestionIdByName('Participation'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre ofrece ideas para realizar el trabajo y propone sugerencias para su mejora. Participa con ideas que aportan al desarrollo y discusión en el squad o en el salón. Sus intervenciones son pertinentes y contribuyen.',
questionId: qa.getQuestionIdByName('Participation'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'No comunica sus errores, sino que intenta ocultarlos.',
questionId: qa.getQuestionIdByName('Error communication'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces comunica sus errores, y cuando lo hace no tiene una propuesta de cómo mejorarlos.',
questionId: qa.getQuestionIdByName('Error communication'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces comunica sus errores, pero su propuesta para mejorarlos carece de planificación.',
questionId: qa.getQuestionIdByName('Error communication'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces comunica sus errores y cuando lo hace tiene una propuesta de cómo mejorarlos.',
questionId: qa.getQuestionIdByName('Error communication'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre puede comunicar sus dificultades y obstáculos en busca de ayuda. Diseña (sola o con ayuda) un plan para mejorar y no volver a cometer los mismos errores. Asimismo, comunica las complicaciones del grupo, asumiendo parte de la responsabilidad.',
questionId: qa.getQuestionIdByName('Error communication'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'No muestra respeto por una figura de autoridad, se muestra desafiante y no logra comunicarse de manera adecuada.',
questionId: qa.getQuestionIdByName('Communication & Respect'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces se comunica de manera respetuosa hacia la figura de autoridad, se exaspera al no estar de acuerdo y no logra dar a conocer su punto de vista.',
questionId: qa.getQuestionIdByName('Communication & Respect'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces logra comunicarse de manera respetuosa hacia la figura de autoridad, pudiendo tener dificultades para comunicar puntos de vista divergentes.',
questionId: qa.getQuestionIdByName('Communication & Respect'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces se comunica de manera respetuosa hacia la figura de autoridad, incluso cuando tiene un punto de vista diferente.',
questionId: qa.getQuestionIdByName('Communication & Respect'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre se comunica a figuras de autoridad en el squad de manera respetuosa.',
questionId: qa.getQuestionIdByName('Communication & Respect'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca logra sentirse cómoda ante figurad de autoridad, mostrando un disconfor constante.',
questionId: qa.getQuestionIdByName('Relationship with authority'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces se siente cómoda ante figuras de autoridad. En varias ocasiones guarda silencio, mostrándo dificultades para relacionarse con esta figura y evitando el contacto con ella.',
questionId: qa.getQuestionIdByName('Relationship with authority'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces logra sentirse cómoda ante figuras de autoridad, pero en otras actúa con nerviosismo y cautela.',
questionId: qa.getQuestionIdByName('Relationship with authority'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de la veces se siente cómoda ante figuras de autoridad, logrando relacionarse con ellas.',
questionId: qa.getQuestionIdByName('Relationship with authority'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre se siente cómoda ante figuras de autoridad, mostrando capacidad de relacionarse con ellas de manera simétrica, logrando una buena comunicación y relación.',
questionId: qa.getQuestionIdByName('Relationship with authority'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca logra mantener una actitud positiva en el trabajo.',
questionId: qa.getQuestionIdByName('Possitive attitude'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces logra mantener una actitud positiva en el trabajo, lo que afecta no sólo a ella misma, sino también a su squad.',
questionId: qa.getQuestionIdByName('Possitive attitude'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces logra una actitud positiva en el trabajo, pero ante distintas circunstancias es posible que se desanime afectándo su desempeño.',
questionId: qa.getQuestionIdByName('Possitive attitude'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces muestra una actitud positiva ante el trabajo y los nuevos desafíos, centrándose en los aspectos positivos de éste.',
questionId: qa.getQuestionIdByName('Possitive attitude'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre muestra una actitud positiva ante el trabajo y los nuevos desafíos que éste trae. No se desanima ante las dificultades, sino que mantiene un buen ánimo, fomentando un buen ambiente en su squad.',
questionId: qa.getQuestionIdByName('Possitive attitude'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca demuestra pasión por su trabajo, se encuentra irritable y parece que esto no tiene un sentido para ella.',
questionId: qa.getQuestionIdByName('Passion for work'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces logra transmitir pasión por su trabajo, se ve frustrada constantemente y no logra postergar alguna necesidad por alcanzar un bien mayor.',
questionId: qa.getQuestionIdByName('Passion for work'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Algunas veces transmite pasión por su trabajo, pero en circunstancias difíciles parece desmotivarse y olvidarse del objetivo mayor.',
questionId: qa.getQuestionIdByName('Passion for work'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces demuestra pasión por su trabajo, es decir, disfruta de lo que hace y transmite amor hacia su oficio.',
questionId: qa.getQuestionIdByName('Passion for work'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre demuestra pasión por su trabajo, es decir, disfruta lo que hace y contagia a quienes se encuentran cerca de ella. Además, si es necesario postergar ciertas necesidades propias logra visualizar que es por un bien mayor.',
questionId: qa.getQuestionIdByName('Passion for work'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'No hay evidencias de su trabajo en el repositorio.',
questionId: qa.getQuestionIdByName('Version Control System'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Tuvo problemas con la creación/conexión al repositorio, hay evidencias de su trabajo en el repositorio pero la organización no fue la adecuada.',
questionId: qa.getQuestionIdByName('Version Control System'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Manejo del repositorio de manera correcta pero con faltas notables en la organización de branches y/o redacción de commits.',
questionId: qa.getQuestionIdByName('Version Control System'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'Manejo del repositorio de manera correcta, se sugiere hacer mejoras en la redacción de los commit y/u organiación de los branches.',
questionId: qa.getQuestionIdByName('Version Control System'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Manejo del repositorio de manera correcta, con correcta nomenclatura y uso de branches, buena redacción de commits y definición del .gitignore.',
questionId: qa.getQuestionIdByName('Version Control System'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca utiliza lenguaje técnico',
questionId: qa.getQuestionIdByName('Techie language'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces se refiere a las herramientas con un lenguaje técnico y cuando utiliza ese lenguaje es de manera excepcional o utilizándolo de manera vaga o poco clara.',
questionId: qa.getQuestionIdByName('Techie language'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Logra utilizar un lenguaje técnico en algunas circunstancias, pero a veces no logra especificar las herramientas utilizadas, refiriéndose a ellas de manera genérica (cosa, eso, esto, etc).',
questionId: qa.getQuestionIdByName('Techie language'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces utiliza un lenguaje técnico para referirse a las herramientas que está utilizando.',
questionId: qa.getQuestionIdByName('Techie language'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre utiliza lenguaje técnico para referirse a distintas herramientas, logrando especificar de manera correcta aquellos elementos que está utilizando.',
questionId: qa.getQuestionIdByName('Techie language'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca logra aplicar los conocimientos de programación adquirido, teniendo dificultades con conocimientos básicos.',
questionId: qa.getQuestionIdByName('Programming fundamentals'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces logra resolver los problemas con los conocimientos de programación adquiridos, y cuando lo hace es de manera excepcional.',
questionId: qa.getQuestionIdByName('Programming fundamentals'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Logra aplicar los conocimientos de programación adquiridos, pero en otras instancias no encuentra solución a problemas que debiese manejar.',
questionId: qa.getQuestionIdByName('Programming fundamentals'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'Logra aplicar los conocimientos de programación adquiridos, sólo alcanza el mínimo requerido.',
questionId: qa.getQuestionIdByName('Programming fundamentals'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre logra aplicar los conocimientos de programación ya adquiridos e incluso buscó nuevas soluciones que van más allá de lo exigido.',
questionId: qa.getQuestionIdByName('Programming fundamentals'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca logra utilizar los elementos de la metodología ágil, teniendo dificultades en seguir la organización del squad en base a esta metodología.',
questionId: qa.getQuestionIdByName('Agile methodologies'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces logra utilizar los elementos de la metodología ágil, incluso parece no entender cómo funciona esta metodología.',
questionId: qa.getQuestionIdByName('Agile methodologies'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Logra utilizar los elementos de la metodología ágil en el algunas circunstancias, pero en otras toma decisiones propios olvidándose del squad.',
questionId: qa.getQuestionIdByName('Agile methodologies'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces logra utilizar los elementos de la metodología ágil en el squad, sin embargo no siempre utiliza esta metodología en su documentación. ',
questionId: qa.getQuestionIdByName('Agile methodologies'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre busca la manera de utilizar los distintos elementos de la metodología ágil, no sólo en el equipo,que también en su documentación. Además, es capaz de incentivar la organización del squad en torno a la utilización de esta metodología.',
questionId: qa.getQuestionIdByName('Agile methodologies'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'Nunca logra buscar recursos para resolver problemas, fuera de los dados en la sala de clase.',
questionId: qa.getQuestionIdByName('Being resourceful'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Muy pocas veces logra buscar recursos para resolver problemas, perdiendo mucho tiempo en la búsqueda de documentación, no sabiendo discriminar la información.',
questionId: qa.getQuestionIdByName('Being resourceful'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'Logra buscar recursos para resolver problemas.',
questionId: qa.getQuestionIdByName('Being resourceful'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'La mayoría de las veces logra buscar recursos para resolver los problemas, utilizando distintos fuentes de documentación. Además, no utiliza exceso de tiempo recopilando información innecesaria.',
questionId: qa.getQuestionIdByName('Being resourceful'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'Siempre logra buscar distintos recursos para resolver problemas, utilizando distintas fuentes de documentación para encontrar la solución. Además, administra de manera eficiente su tiempo, enfocando su búsqueda en sitios específicos que puedan entregarle información.',
questionId: qa.getQuestionIdByName('Being resourceful'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 1,
description: 'El producto desarrollado carece de diseño y funcionalidad, siendo no utilizable.',
questionId: qa.getQuestionIdByName('Look & feel / Usability / UX'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 2,
description: 'Sólo algunas partes del producto se encuentran desarrolladas y diseñadas, existiendo carencias que no lo hacen utilizable.',
questionId: qa.getQuestionIdByName('Look & feel / Usability / UX'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 3,
description: 'El producto desarrollado tiene un buen diseño, resulta adecuado y atractivo para el usuario. Sin embargo, el producto no tiene la funcionalidad esperada, teniendo errores tanto en su interacción con el usuario como en su proceso lógico, siendo un producto no utilizable.',
questionId: qa.getQuestionIdByName('Look & feel / Usability / UX'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 4,
description: 'El producto desarrollado es utilizable, pero hay algunas partes de éste que se podrían mejorar tanto a nivel de su interacción con el usuario como del proceso lógico del producto. Además, se podrían mejorar aspectos del diseño del producto.',
questionId: qa.getQuestionIdByName('Look & feel / Usability / UX'),
createdAt: new Date(),
updatedAt: new Date()
}, {
order: 5,
description: 'El producto desarrollado por la alumna es funcional a nivel de usuario. Lo anterior implica que la interacción con el producto desarrollado es de fácil utilización para el beneficiario. Además se espera que el proceso lógico del producto, es decir, como debería ser utilizado por el usuario sea simple y fácil de emplear. Por último, el diseño del producto debe ser adecuado a la necesidad y atractivo para el usuario.',
questionId: qa.getQuestionIdByName('Look & feel / Usability / UX'),
createdAt: new Date(),
updatedAt: new Date()
}], {});
});
},
down: function (queryInterface, Sequelize) {
return queryInterface.bulkDelete('answers', null, {});
}
};
<file_sep>/migrations/20170113105829-rename-campus-to-campus-id-on-user.js
'use strict';
module.exports = {
up: function (queryInterface, Sequelize) {
return queryInterface.renameColumn('developers', 'campus', 'campusId');
},
down: function (queryInterface, Sequelize) {
return queryInterface.renameColumn('developers', 'campusId', 'campus');
}
};
<file_sep>/models/developer.js
'use strict';
module.exports = function(sequelize, DataTypes) {
var Developer = sequelize.define('Developer', {
name: DataTypes.STRING,
lastname: DataTypes.STRING,
age: DataTypes.INTEGER,
campusId: DataTypes.STRING,
photoUrl: DataTypes.STRING,
title: DataTypes.STRING,
captainLink: DataTypes.STRING,
ranking: DataTypes.INTEGER
}, {
classMethods: {
associate: function(models) {
// associations can be defined here
models.Squad.hasMany(Developer, { constraints: true });
}
}
});
return Developer;
};
<file_sep>/seeders/20161203024140-user-seed.js
'use strict';
var crypto = require('crypto'),
algorithm = 'aes-256-ctr',
password = '<PASSWORD>';
function encrypt(text){
var cipher = crypto.createCipher(algorithm,password)
var crypted = cipher.update(text,'utf8','hex')
crypted += cipher.final('hex');
return crypted;
}
module.exports = {
up: function (queryInterface, Sequelize) {
return queryInterface.bulkInsert('users', [{
name: 'Lab',
lastname: 'Internacional',
username: 'awesome',
company: 'Laboratoria',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Banco',
lastname: 'Credito',
username: 'bcp',
company: 'BCP',
password: encrypt('bcp'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'GMD',
username: 'gmd',
company: 'GMD',
password: encrypt('gmd'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Tekton Labs',
username: 'tekton',
company: 'Tekton',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Ministerio',
lastname: 'Produccion',
username: 'minprod',
company: 'MinProduccion',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Thoughtworks',
username: 'thoughtworks',
company: 'Thoughtworks',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Banco',
lastname: 'Scotiabank',
username: 'scotiabank',
company: 'Scotiabank',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Urbaner',
username: 'urbaner',
company: 'Urbaner',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Lyft',
username: 'lyft',
company: 'Lyft',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Globant',
username: 'globant',
company: 'Globant',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Cignium',
username: 'cignium',
company: 'Cignium',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Everis',
username: 'everis',
company: 'Everis',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Empresa',
lastname: 'Globant',
username: 'globant',
company: 'Globant',
password: encrypt('<PASSWORD>'),
createdAt: new Date(),
updatedAt: new Date()
}], {});
},
down: function (queryInterface, Sequelize) {
return queryInterface.bulkDelete('users', null, {});
}
};
<file_sep>/queries/ranking.sql
select distinct d.name, d.lastname, d.age, d.campusId, d.photoUrl,
d.title, s.name as 'squad', d.ranking, IFNULL(tp.avgtech, 0) as 'avgtech',
IFNULL(tss.avghse, 0) as 'avghse', IFNULL(tp.avgtech, 0) + IFNULL(tss.avghse, 0) as 'avg'
from developers d
inner join squads s on d.squadId = s.id
left join (
select d.id, AVG(r.points) as 'avgtech'
from developers d
inner join rankings r on r.developerId = d.id
inner join questions q on r.questionId = q.id
where q.type in ('tech','uxd') and r.points > 0
and d.campusId = ?
group by 1
) as tp on tp.id = d.id
left join (
select d.id, AVG(r.points) as 'avghse'
from developers d
inner join rankings r on r.developerId = d.id
inner join questions q on r.questionId = q.id
where q.type REGEXP 'hse' and r.points > 0
and d.campusId = ?
group by 1
) as tss on tss.id = d.id
where d.campusId = ?
group by d.name, d.lastname, d.age, d.campusId, d.photoUrl,
d.title, s.name, d.ranking, IFNULL(tp.avgtech, 0), IFNULL(tss.avghse, 0),
IFNULL(tp.avgtech, 0) + IFNULL(tss.avghse, 0)
order by avg desc, d.ranking, d.name;
<file_sep>/models/user.js
'use strict';
module.exports = function(sequelize, DataTypes) {
var User = sequelize.define('User', {
name: DataTypes.STRING,
lastname: DataTypes.STRING,
username: DataTypes.STRING,
password: DataTypes.STRING,
company: DataTypes.STRING
}, {
classMethods: {
associate: function(models) {
models.Campus.hasMany(User, { constraints: true });
}
}
});
return User;
};<file_sep>/seeders/20161203181502-developer-seed.js
'use strict';
module.exports = {
up: function (queryInterface, Sequelize) {
return queryInterface.bulkInsert('developers', [{
name: 'Daniela',
lastname: 'Mora',
age: 26,
campus: 'Santiago de Chile',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/dana.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Karen',
lastname: 'Orozco',
age: 27,
campus: 'Ciudad de México',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/karen.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Diana',
lastname: 'Navarro',
age: 19,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/diana.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Paola',
lastname: 'Or<NAME>',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/paola.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Maryori',
lastname: 'Quiroz',
age: 19,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/maryori.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Susana',
lastname: 'Opazo',
age: 25,
campus: 'Santiago de Chile',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/su.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Alexandra',
lastname: 'Neira',
age: 30,
campus: 'Santiago de Chile',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/alexandra.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Elisa',
lastname: 'Martinez',
age: 25,
campus: 'Ciudad de México',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/elisa.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Maciel',
lastname: 'Porraz',
age: 27,
campus: 'Ciudad de México',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/maciel.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Teresa',
lastname: 'Lara',
age: 24,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/teresa.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sheila',
lastname: 'Acuña',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sheila.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Evelyn',
lastname: 'Farfan',
age: null,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/evelyn.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sandy',
lastname: 'Ciquero',
age: 26,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sandy.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Cilene',
lastname: 'De la Cruz',
age: 24,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/cilene.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Karol',
lastname: 'Orrillo',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/karol.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Janet',
lastname: 'Quispe',
age: 24,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/janet.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Yeiny',
lastname: 'Gonzalez',
age: 29,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/yeiny.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Christy',
lastname: 'Castro',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/christy.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sthefany',
lastname: 'Floriano',
age: 21,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sthefany.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Michelle',
lastname: 'Seguil',
age: 18,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/michelle.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Liz',
lastname: 'Ruelas',
age: 22,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/liz.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Alexandra',
lastname: 'Fernandez',
age: 18,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/alexandra_f.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sara',
lastname: 'Castillo',
age: 29,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sara.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Jazmine',
lastname: 'Angoma',
age: 21,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/jazmine.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sammia',
lastname: 'Zubizarreta',
age: 18,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sammia.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Milagros',
lastname: 'Quispe',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/milagros.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: '<NAME>',
lastname: 'Jácobo',
age: 23,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/fatima.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Andrea',
lastname: 'Maucaylli',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/andrea_m.jpg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Andrea',
lastname: 'Cabrera',
age: 20,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/andrea_c.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Jennifer',
lastname: 'Jara',
age: 24,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/jennifer.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sofia',
lastname: 'Caballero',
age: 29,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sofia.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Anabel',
lastname: 'Espinal',
age: 18,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/anabel.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Paola',
lastname: 'Quispe',
age: 22,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/paola_q.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Lezmit',
lastname: 'Galarza',
age: 25,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/lezmit.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Melissa',
lastname: 'Berrocal',
age: 26,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/melissa.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Lilibet',
lastname: 'Hualtibamba',
age: 27,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/lili.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Sheyla',
lastname: 'Breña',
age: 20,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/sheyla.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}, {
name: 'Carol',
lastname: 'Juarez',
age: 22,
campus: 'Lima',
photoUrl: 'https://awesome-rank.herokuapp.com/img/developers/carol.jpeg',
createdAt: new Date(),
updatedAt: new Date()
}], {});
},
down: function (queryInterface, Sequelize) {
return queryInterface.bulkDelete('developers', null, {});
}
};
<file_sep>/server.js
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var morgan = require('morgan');
var jwt = require('jsonwebtoken');
var fs = require('fs');
var crypto = require('crypto'),
algorithm = 'aes-256-ctr',
password = '<PASSWORD>';
var rankQuery = '';
var models = require('./models');
fs.readFile('./queries/ranking.sql', 'utf8', function (err, data) {
if (err) {
console.log(err);
}
rankQuery = data.split("\n\n");
});
app.set('port', process.env.PORT || 8080);
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use(morgan('dev'));
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
var apiRoutes = express.Router();
var encrypt = function (text){
var cipher = crypto.createCipher(algorithm,password)
var crypted = cipher.update(text,'utf8','hex')
crypted += cipher.final('hex');
return crypted;
};
var decrypt = function (text){
var decipher = crypto.createDecipher(algorithm,password)
var dec = decipher.update(text,'hex','utf8')
dec += decipher.final('utf8');
return dec;
};
app.get('/', function(req, res) {
res.send('Hello! The API is at http://localhost:' + app.get('port') + '/api');
});
apiRoutes.post('/login', function(req, res) {
var auth = {
username: req.body.username,
password: <PASSWORD>
};
models.User.find({
where: {
username: auth.username,
password: encrypt(auth.password)
}
}).then(function(user) {
if (!user) {
console.log('No user with the username ' + auth.username + ' has been found.');
res.json({ success: false });
} else {
console.log('All attributes of user:', user.get());
res.json({ success: true, user: user });
}
});
});
apiRoutes.get('/developers', function(req, res) {
var developerFilter = {
campusId: req.query.campusId
};
var squadFilter = {
userId: req.query.userId
};
models.Squad.findAll({
attributes: ['id', 'name'],
include: [{
model: models.Developer,
attributes: ['id', 'name', 'lastname', 'photoUrl', 'title', 'captainLink'],
where: developerFilter
}],
where: squadFilter,
order: [
[ 'name', 'ASC' ],
[ models.Developer, 'name', 'ASC' ],
[ models.Developer, 'lastname', 'ASC' ]
]
}).then(function (squads) {
res.json({ squads: squads });
});
});
apiRoutes.get('/questions', function(req, res) {
models.Question.all({
where: {
active: true
},
include: [{
model: models.Answer,
}]
}).then(function (questions) {
res.json({ questions: questions});
});
});
apiRoutes.post('/ratings', function(req, res) {
var user, developer, question;
var ratings = JSON.parse(req.body.ratings);
models.Ranking.bulkCreate(ratings)
.then(function (ratings) {
res.send({ success: true, rank: ratings });
});
});
apiRoutes.get('/ranking', function (req, res) {
var campusId = req.query.campusId;
models.sequelize.query(rankQuery[0], {
replacements: [ campusId, campusId, campusId ]
}).then(function (ranking) {
res.send({ success: true, ranking: ranking });
}).catch(function (err) {
console.log(err);
});
});
app.use('/api', apiRoutes);
app.listen(app.get('port'), function () {
console.log('Server started on port ' + app.get('port'));
})
<file_sep>/migrations/20170113022437-add-campus-id-to-user.js
'use strict';
module.exports = {
up: function (queryInterface, Sequelize) {
/*
Add altering commands here.
Return a promise to correctly handle asynchronicity.
Example:
return queryInterface.createTable('users', { id: Sequelize.INTEGER });
*/
var options = { raw: true };
return queryInterface.sequelize.query('SET FOREIGN_KEY_CHECKS = 0', options)
.then(function () {
queryInterface.addColumn('users', 'campusId', {
type: Sequelize.INTEGER,
allowNull: Sequelize,
defaultValue: 0,
references: {
model: 'campuses',
key: 'id'
}
});
});
},
down: function (queryInterface, Sequelize) {
/*
Add reverting commands here.
Return a promise to correctly handle asynchronicity.
Example:
return queryInterface.dropTable('users');
*/
return queryInterface.removeColumn('users', 'campusId');
}
};
<file_sep>/models/ranking.js
'use strict';
module.exports = function(sequelize, DataTypes) {
var Ranking = sequelize.define('Ranking', {
points: DataTypes.INTEGER
}, {
classMethods: {
associate: function(models) {
models.User.hasMany(Ranking, { constraints: true });
models.Developer.hasMany(Ranking, { constraints: true });
models.Question.hasMany(Ranking, { constraints: true });
}
}
});
return Ranking;
};<file_sep>/migrations/20170731181259-add-active-to-questions.js
'use strict';
module.exports = {
up: function (queryInterface, Sequelize) {
return queryInterface.addColumn(
'questions',
'active',
{
type: Sequelize.BOOLEAN,
defaultValue: true
}
);
},
down: function (queryInterface, Sequelize) {
return queryInterface.removeColumn('questions', 'active');
}
};
| d6a09f2e95e986e28faff57573220bc7f693776d | [
"JavaScript",
"SQL"
] | 12 | JavaScript | ivandevp/awesome-rank-api | dacbd66c8f45f68d99d7ffb34172dd835fa0eabf | 8427e19153ebdc2901869db29d22edc9abe83a7c |
refs/heads/main | <repo_name>kallefrombosnia/goldsrc-file-server<file_sep>/README.md
# Counter Strike - Fast download server :open_file_folder:
CS 1.6 fast download server napisan u node.js uz pomoc express.js frameworka koji olaksava koristenje ruta i postavljanje rate limitera.
Kako se koristi?
Uploadati sve fajlove na server te zatim editovati `config.js` file (sve imate objasnjeno u fajlu).
Nakon toga pokrenuti npm skriptu koja starta server.
```sh
$ npm install && npm run production
```
Nakon toga ispisat ce vam u konzolu da je webserver pokrenut. Proces je potrebno ostaviti u screen da se ne ugasi!
Note: skripta ocekuje da postoji zadano ime staticnog folderu u direktoriju, ako mijenjate ime mora se manuelno napraviti/ rename direktorij.
### Funkcije
Glavna ruta za download se satoji iz `/:userid/cstrike/:path(*)` gdje je:
* :userid - {string} id korisnika koji ima fdl server (id nije limitovan, sve zavisi kako tretirate staticni folder)
* cstrike - {string} default value bez kojeg fdl ne radi jer cs klijent ocekuje taj value u url da bi skinuo fajl
* :path(*) {string} folderi sa svim resursima koji postoje u staticnom folderu
### API
Webserver nudi API koji:
| METHOD | ENDPOINT | DESCRIPTION | NOTE |
|--------|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------|
| GET | `/api/:userid` | Userid je string od kojeg dobijate informacije. Informacije koje dobijate u obliku objekta su: ` {userid: string, info: [{id: userid, download_count: int, errors: []]} ` | json response |
| GET | `api/filesize` | Pozivom na ovaj endpoint dobijate velicinu databaze u MB. Ako predje 200mb reset bi bio dobar jer databaza baguje. | Ovo ne resetuje ukupni broj skidanja. |
| GET | `api/reset` | Resetuje databazu, tacnije sav `users` info. | <file_sep>/src/functions/logger.js
const low = require('lowdb')
const FileSync = require('lowdb/adapters/FileSync');
const adapter = new FileSync(`${process.cwd()}/db.json`)
const db = low(adapter);
const incrementAllDownloads = () => {
return db.update('all_downloads', number => number + 1).write()
}
const incrementUserDownloads = (userid) => {
return db.get('users').find({id: userid}).update('download_count', number => number + 1).write();
}
const writeErrorMessage = (userid, message) => {
// get error array
const value = db.get('users').find({id: userid}).value();
if(value){
const { errors } = value;
if(errors.length > 0){
const lastItem = [errors.length - 1]
errors.push({time: Date.now(), error_message: message, error_code: 404});
if(errors[lastItem].error_message !== message){
return db.get('users').find({id: userid}).assign({errors}).write();
}
return
}else{
errors.push({time: Date.now(), error_message: message, error_code: 404});
return db.get('users').find({id: userid}).assign({errors}).write();
}
}
}
const addNewUserDb = (userid) => {
console.log(checkUserExistsDb(userid))
return !checkUserExistsDb(userid) ? db.get('users').push({ id: userid, errors: [], download_count: 0}).write() : ''
}
const checkUserExistsDb = (userid) =>{
return db.get('users').find({id: userid}).value() ? true : false;
}
module.exports = {
incrementAllDownloads,
writeErrorMessage,
incrementUserDownloads,
addNewUserDb
}
<file_sep>/src/functions/file.js
const fs = require('fs');
const path = require('path');
const Logger = require('../functions/logger');
/**
* Check if user exists
* @constructor
* @param {string} userid - ID of the user.
* @returns {Boolean}
*/
const checkUserExists = (userid) => {
if(fs.existsSync(path.join(process.cwd(), global.config.static_folder_name, userid))) {
// Check if users exists in db
Logger.addNewUserDb(userid)
return true
}else{
return false
}
}
/**
* Check if file exists
* @constructor
* @param {string} userid - userid.
* @param {string} filepath - pathname.
* @returns {Boolean}
*/
const checkFileExists = (userid, filepath) => {
if(fs.existsSync(path.join(process.cwd(), global.config.static_folder_name, userid, filepath))) {
return true
}else{
return false
}
}
/**
* Check if file extension is for download
* @constructor
* @param {string} filepath - pathname.
* @returns {Boolean}
*/
const checkExtension = (filepath) =>{
// Get extension name
const ext = filepath.match(/\.[0-9a-z]+$/i);
// Check if exists match
if(ext){
return global.config.whitelist_ext.includes(ext[0]) ? true : false;
}
return false;
}
module.exports = {
checkUserExists,
checkFileExists,
checkExtension
}<file_sep>/config.js
module.exports = {
port: 80,
static_folder_name: 'static', // default
time_resolution: 60000, // time in miliseconds default 1 minute = 60000 sec
max_requests_in_time_resolution: 1000,// how much attempts in time_resolution can be accessed
error_message: 'Sorry, rate limit is reached.',
whitelist_ext: [
'.bsp',
'.mdl',
'.res',
'.wad',
'.spr',
'.mp3',
'.html',
'.tga'
]
} | 16c811577a570593b225c410cf2d28848acde3db | [
"Markdown",
"JavaScript"
] | 4 | Markdown | kallefrombosnia/goldsrc-file-server | 0bb6d281cf1a3b9e68de590ea3ca483ea8501154 | 748dfc1cc9427c9966c2886308aeb9684eee4416 |
refs/heads/master | <repo_name>gwklok/OnlyVPN<file_sep>/OnlyVPN
#!/bin/bash
# This script oringally by:
# automatically change configuration of Mac OS X based on location
# author: <NAME> <<EMAIL>>
# url: http://tech.inhelsinki.nl/locationchanger/
# version: 0.4
# Abused by <EMAIL>
# redirect all IO to /dev/null (comment this out if you want to debug)
exec 1>/dev/null 2>/dev/null
# get a little breather before we get data for things to settle down
sleep 2
# get various system information
SSID=`/System/Library/PrivateFrameworks/Apple80211.framework/Versions/A/Resources/airport -I\
| grep ' SSID:' | cut -d ':' -f 2 | tr -d ' '`
LOCATION=
# locations (use to be used Location name here)
ATHOME=Home
# detect HOME
ATHOME_SSID=YOUR SSID HERE
if [ -z $LOCATION ]; then
case $SSID in
$ATHOME_SSID ) LOCATION="$ATHOME";;
esac
REASON=SSID
fi
if [ -z $LOCATION ]; then
# still didn't get a location, so do automatic
LOCATION="automatic"
REASON=Fallback
fi
case $LOCATION in
$ATHOME )
# do some stuff here if you want to
#scselect $ATHOME
/Library/LaunchAgents/OnlyVPN/Firewall.athome
;;
automatic )
# do some stuff here if you want to
#scselect Automatic
/Library/LaunchAgents/OnlyVPN/Firewall.onlyvpn
;;
esac
# do some stuff here that needs to happen after every network change
echo `date` "Location: $LOCATION " >> /var/log/OnlyVPN.log
exit 0
<file_sep>/Firewall.onlyvpn
#!/bin/sh
IPFW=/sbin/ipfw
${IPFW} -f flush
#loopback device
${IPFW} -f add 01000 allow ip from any to any via lo\*
#openvpn
${IPFW} -f add 01001 allow ip from any to any dst-port 1194 out
${IPFW} -f add 01002 allow ip from any to any src-port 1194 in
# dhcp
${IPFW} -f add 01003 allow udp from any 67 to any dst-port 68 in
${IPFW} -f add 01004 allow udp from any 68 to any dst-port 67 out
# dns, if you are super concerned about privacy this isnt a good
# rule because a lot of apps will attempt to make connections
# on interface up so they will perform DNS lookups revealing
# where you connect to (I assume you also tunnel DNS)
# you could hardcore the ipaddress of your openvpn server
${IPFW} -f add 01005 allow ip from any to any dst-port 53 out
${IPFW} -f add 01006 allow ip from any to any src-port 53 in
# let everything on the tun used by the vpn in
${IPFW} -f add 10001 allow ip from any to any via tun0
${IPFW} -f add 10001 allow ip from any to any via tap0
#block everything
${IPFW} -f add deny in
${IPFW} -f add deny out
<file_sep>/Firewall.athome
#!/bin/sh
IPFW=/sbin/ipfw
$IPFW -f flush
${IPFW} -f add 65535 allow ip from any to any
| e920da549e5e6cd93f6e6db7c3b3133eedb81722 | [
"Shell"
] | 3 | Shell | gwklok/OnlyVPN | d95a5aeba0dfa5b107b8530e87eaf3cc4e3e5363 | e6981344106f2aaa224d07bffa0e3e2ec8ec4441 |
refs/heads/master | <file_sep>import { get, _delete, post, patch, put } from '@/lin/plugins/axios'
class order {
/**
* 获取订单列表
*/
async getOrder(page, count, input, searchDate) {
if (searchDate.length > 1) {
const res = await get('v1/order', { page, count, start: searchDate[0], end: searchDate[1],handleError: true})
return res
}
if (input !== '') {
let order = input.split('')
if (((order[0] >= 'A' && order[0] <= 'Z') || (order[0] >= 'a' && order[0] <= 'z'))) {
const res = await get('v1/order', { page, count, order_no: input, handleError: true })
return res
} else {
const res = await get('v1/order', { page, count, name: input, handleError: true })
return res
}
} else {
const res = await get('v1/order', { page, count,handleError: true })
return res
}
}
/**
* 发货
*/
async shipment(id,obj) {
const res = await post(`v1/order/shipment/${id}`,obj,{handleError:true})
return res
}
/**
* 获取物流
*/
async Logistics(order_no){
const res = await get(`v1/logistics/${order_no}`,{handleError:true})
return res
}
}
export default new order()
<file_sep>import { get, _delete, post, patch, put } from '@/lin/plugins/axios'
class Product {
/**
* 获取分类信息
*/
async getCategory() {
const res = await get('v1/category',{handleError: true})
return res
}
/**
* 添加分类
*/
async addCategory(obj) {
const res = await post('v1/category',obj,{handleError:true})
return res
}
/**
* 删除分类
* @param {*} ids
*/
async delCategoryByIds(ids) {
// { ids } 等价于 { ids : ids },对象的key和value命名相同时的一种简写
const res = await _delete('v1/category', { ids }, { handleError: true })
return res
}
/**
* 修改分类
* @param {*} page
* @param {*} count
* @param {*} product_name
*/
async editCategory(id,obj){
const res = await put(`v1/category/${id}`,obj,{handleError:true})
return res
}
/**
* 获取商品列表
*/
async getProducts(page,count,product_name='') {
const res = await get('v1/product/paginate',{page,count,product_name,handleError:true})
return res
}
/**
* 删除商品
* @param {*} ids
*/
async delProductById(ids){
const res = await _delete('v1/product', { ids }, { handleError: true })
return res
}
/**
* 修改商品状态
*/
async modifyStatus(ids){
const res = await patch(`v1/product/${ids}`,{handleError: true})
return res
}
/**
* 添加商品
* @param {*} obj
*/
async addProduct(obj){
const res = await post('v1/product',obj,{handleError:true})
return res
}
/**
* 修改商品主题数据
* @param {*} obj
*/
async editProduct(obj){
const res = await put('v1/product',obj,{handleError:true})
return res
}
/**
* 新增商品介绍图片
* @param {*} obj
*/
async addProductImg(obj){
console.log(obj)
const res = await post('v1/product/image',{image:obj},{handleError:true})
return res
}
/**
* 修改商品介绍图片
* @param {*} obj
*/
async editProductImg(obj){
console.log({image:obj})
const res = await put('v1/product/image',{image:obj},{handleError:true})
return res
}
/**
* 删除商品介绍图片
* @param {*} obj
*/
async delProductImg(obj){
console.log(obj)
const res = await _delete('v1/product/image',{ids:obj},{handleError:true})
console.log(res)
return res
}
}
export default new Product()<file_sep>import { get, _delete, post, patch, put } from '@/lin/plugins/axios'
class Theme {
/**
* 获取主题信息
*/
async getThemes() {
const res = await get('v1/theme',{handleError: true})
return res
}
/**
* 删除主题
* @param {*} ids
*/
async delThemeByIds(ids) {
// { ids } 等价于 { ids : ids },对象的key和value命名相同时的一种简写
const res = await _delete('v1/theme', { ids,handleError: true} )
return res
}
/**
* 获取可用商品信息
*/
async getProducts(){
const res = await get('v1/product',{handleError: true})
return res
}
/**
* 添加主题
* @param {*} obj
*/
async addTheme(obj){
const res = await post('v1/theme',obj,{handleError: true})
return res
}
/**
* 添加关联商品
* @param {*} obj
*/
async addRelProduct(id,ids){
const res = await post(`v1/theme/product/${id}`,ids,{handleError: true})
return res
}
/**
* 更新主题主体信息
*/
async editTheme(id,ids){
const res = await patch(`v1/theme/${id}`,ids,{handleError: true})
return res
}
/**
* 获取主体信息
* @param {]} id
* @param {*} ids
*/
async getThemeById(id){
const res = await get(`v1/theme/${id}`,{handleError: true})
return res
}
async addRelProduct(id,obj){
const res = await post(`v1/theme/product/${id}`,obj,{handleError: true})
return res
}
async delRelProduct(id,obj){
const res = await _delete(`v1/theme/product/${id}`,obj,{handleError: true})
return res
}
}
export default new Theme()
<file_sep>const Record = {
route: null,
name: null,
title: '物流管理',
type: 'folder', // 类型: folder, tab, view
icon: 'iconfont icon-wuliu',
filePath: 'views/logistics/', // 文件路径
order: null,
inNav: true,
children: [
{
title: '发货列表',
type: 'view',
name: 'Record',
route: '/logistics/record',
filePath: 'views/logistics/Record.vue',
inNav: true,
permission: ['发货记录列表'],
},
],
}
export default Record
<file_sep>let nDate = new Date();
let year = nDate.getFullYear()
let month = nDate.getMonth()
let day = nDate.getDate()
function nowDate(){
return year
+ "-" + (month + 1) +
"-" +day;
}
function lastMonthDate(){
return year
+ "-" + month +
"-" + day;
}
function weekDate(){
if(day-7 < 0){
return year
+ "-" + month +
"-" + (Math.abs(day-7+30));
}else{
return year
+ "-" + month +
"-" + (day-7);
}
}
export {
nowDate,
lastMonthDate,
weekDate
}<file_sep>const bannerRouter = {
route: null,
name: null,
title: 'banner管理',
type: 'folder', // 类型: folder, tab, view
icon: 'iconfont icon-banner',
filePath: 'views/operate/', // 文件路径
order: null,
inNav: true,
children: [
{
title: '轮播图列表',
type: 'view',
name: 'bannerList',
route: '/banner/list',
filePath: 'views/operate/banner/List.vue',
inNav: true,
icon: 'iconfont icon-huiyuanguanli',
permission: ['轮播图列表'],
},
{
title: '新增轮播图',
type: 'view',
inNav: true,
route: '/banner/add',
icon: 'iconfont icon-add',
name: 'bannerAdd',
filePath: 'views/operate/banner/Add.vue',
permission: ['新增轮播图'],
},
],
}
export default bannerRouter
<file_sep>/**
* 处理catch返回值
* Object.values(e.data.msg).join(';'),
*/
export default function(obj){
if(typeof obj === 'string'){
return obj
}
//console.log(Object.values(obj).length)
if(Object.values(obj).length>=1){
return Object.values(obj)[0]
}else{
return Object.values(obj)
}
}<file_sep>const productRouter = {
route: null,
name: null,
title: '商品管理',
type: 'folder', // 类型: folder, tab, view
icon: 'iconfont icon-shop',
filePath: 'views/product/', // 文件路径
order: null,
inNav: true,
children: [
{
title: '商品分类',
type: 'view',
name: 'categoryList',
route: '/category/list',
filePath: 'views/product/category/CategoryList.vue',
inNav: true,
permission: ['分类列表'],
},
{
title: '商品库',
type: 'view',
name: 'productsList',
route: '/product/list',
filePath: 'views/product/products/ProductsList.vue',
inNav: true,
permission: ['商品列表'],
},
{
title: '新增商品',
//type: 'view',
name: 'productsAdd',
route: '/product/add',
filePath: 'views/product/products/ProductsAdd.vue',
inNav: true,
permission: ['新增商品'],
},
],
}
export default productRouter
<file_sep>const comp = [{
name: '顺丰',
number: 'sf',
name1: '申通',
number1: 'sto'
}, {
name: '圆通',
number: 'yt',
name1: '韵达',
number1: 'yd'
},
{
name: '天天',
number: 'tt',
name1: 'EMS',
number1: 'ems'
}, {
name: '中通',
number: 'zto',
name1: '汇通',
number1: 'ht'
}, {
name: '全峰',
number: 'qf',
name1: '德邦',
number1: 'db'
},
{
name: '国通',
number: 'gt',
name1: '如风达',
number1: 'rfd'
}, {
name: '京东',
number: 'jd',
name1: '宅急送',
number1: 'zjs'
},
{
name: '邮政快递',
number: 'youzheng',
name1: '百世',
number1: 'bsky'
}
]
export default comp<file_sep>const themeRouter = {
route: null,
name: null,
title: '主题管理',
type: 'folder', // 类型: folder, tab, view
icon: 'iconfont icon-theme',
filePath: 'views/operate/', // 文件路径
order: null,
inNav: true,
children: [
{
title: '主题列表',
type: 'view',
name: 'record',
inNav: true,
route: '/theme/list',
filePath: 'views/operate/theme/ThemeList.vue',
inNav: true,
permission: ['主题列表'],
},
{
title: '新增主题',
type: 'view',
inNav: true,
route: '/theme/add',
icon: 'iconfont icon-add',
name: 'themeAdd',
filePath: 'views/operate/theme/ThemeAdd.vue',
permission: ['新增主题'],
},
],
}
export default themeRouter
| 7e45d9197d9d54ccefe2b520b5233cdbebc97723 | [
"JavaScript"
] | 10 | JavaScript | linhexs/minishop-cms | ca2eabf3a56479229c1ae6fe0f588b7cc791a9e9 | 4022864fb7a44c0f6d11116610ada9490380a2d0 |
refs/heads/master | <file_sep>cmake_minimum_required(VERSION 3.2)
project(flockingbird
VERSION 0.0.1
LANGUAGES C CXX
DESCRIPTION "Implementations of flocking simulation algorithms.")
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release)
endif()
set(CMAKE_CXX_FLAGS "-Wall -Wextra")
set(CMAKE_CXX_FLAGS_DEBUG "-g")
set(CMAKE_CXX_FLAGS_RELEASE "-O3")
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED True)
set(CMAKE_INSTALL_PREFIX ${PROJECT_SOURCE_DIR})
set(FLOCKINGBIRD_INSTALL_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/include)
set(FLOCKINGBIRD_INSTALL_BUILD_DIR ${PROJECT_SOURCE_DIR}/build)
# TODO if this is really needed, can we recursively inclued all headers?
set(FLOCKINGBIRD_HEADERS_DIR ${PROJECT_SOURCE_DIR}/src)
# TODO clarify, need this for the tests to work.
# even though i created a interface library?
include_directories(${FLOCKINGBIRD_HEADERS_DIR})
add_subdirectory(src)
# Tests
enable_testing()
add_subdirectory(tests)
# Add googletest (refer to https://github.com/google/googletest/blob/master/googletest/README.md)
<file_sep>#pragma once
#include "flock_simulation/flock.hpp"
#include "flock_simulation/simulation.hpp"
<file_sep>cmake_minimum_required(VERSION 3.2)
project(flockingbird_tests)
include(FetchContent)
FetchContent_Declare(
googletest
# Specify the commit you depend on and update it regularly.
URL https://github.com/google/googletest/archive/609281088cfefc76f9d0ce82e1ff6c30cc3591e5.zip
)
# For Windows: Prevent overriding the parent project's compiler/linker settings
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
FetchContent_MakeAvailable(googletest)
# TODO: Can we recursively add all tests?
set(SOURCE_FILES
src/nearest_neighbors/nanoflann_exploratory_tests.cpp
src/nearest_neighbors/visible_proximity_tests.cpp
src/flock_simulation/flock_tests.cpp
src/flock_simulation/rules_tests.cpp
src/utility/vector_operations_tests.cpp
src/flock_simulation/simulation_test.cpp)
add_executable(flockingbird_tests ${SOURCE_FILES})
add_test(flockingbird_tests, flockingbird_tests)
target_link_libraries(flockingbird_tests flockingbird gtest_main gmock_main)
install(TARGETS flockingbird_tests DESTINATION ${FLOCKINGBIRD_INSTALL_BUILD_DIR})
<file_sep>#include "flock_simulation/rules.hpp"
#include "gtest/gtest.h"
#include <vector>
using namespace flockingbird;
class RulesTest : public ::testing::Test {
public:
protected:
RulesTest()
: boidToUpdate(Boid(Vector2D(0, 0), Vector2D(1.0, 1.0))) {
Boid boid1 = Boid(Vector2D(1.0, 2), Vector2D(1.0, 1.0));
Boid boid2 = Boid(Vector2D(2, 3), Vector2D(2, 1.0));
Boid boid3 = Boid(Vector2D(3, 4), Vector2D(1.0, 3.5));
proximity.push_back(boid1);
proximity.push_back(boid2);
proximity.push_back(boid3);
};
std::vector<Boid> proximity;
Boid boidToUpdate;
virtual void TearDown(){};
};
TEST_F(RulesTest, SeparationSimplestTest) {
// Arrange Simple test with just one neighbor, and unit parameters
FlockSimulationParameters parameters;
parameters.speedLimit = 1;
parameters.forceLimit = 500;
parameters.avoidanceRadius = 25;
parameters.separationWeight = 1;
Boid boidToUpdate = Boid(Vector2D(2.0, 2.0), Vector2D(1.0, 1.0));
Boid boid2 = Boid(Vector2D(1.0, 1.0), Vector2D(1.0, 1.0));
std::vector<Boid> proximity { boid2 };
SeparationRule rule;
// Difference between two boids is 1, 1
// This is normalized and devided by the distance (sqrt 2)
// Averaged (which has no effect)
Vector2D expectedSteer = Vector2D(1, 1).normalized() * (1.0/sqrt(2));
// After that we normalize the result
// multiply with max speed (no effect)
// subtract the velocity (1, 1)
// and limit it by max force (no effect)
Vector2D expectedResult = expectedSteer.normalized() - Vector2D(1, 1);
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
TEST_F(RulesTest, SeparationCancellingForcesTest) {
// Arrange
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 500;
parameters.avoidanceRadius = 25;
parameters.separationWeight = 1;
Boid boidToUpdate = Boid(Vector2D(2.0, 2.0), Vector2D(2.5, 1.5));
Boid boid2 = Boid(Vector2D(1.0, 1.0), Vector2D(1.0, 1.0));
Boid boid3 = Boid(Vector2D(3.0, 3.0), Vector2D(1.0, 1.0));
std::vector<Boid> proximity{boid2, boid3};
SeparationRule rule;
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(0, actualResult.x, 1E-5);
EXPECT_NEAR(0, actualResult.y, 1E-5);
}
TEST_F(RulesTest, SeparationComplexTest) {
// Arrange Simple test with just one neighbor, and unit parameters
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 0.1;
parameters.avoidanceRadius = 25;
parameters.separationWeight = 2;
Boid boidToUpdate = Boid(Vector2D(2.0, 2.0), Vector2D(2.5, 1.5));
Boid boid2 = Boid(Vector2D(1.5, 1.5), Vector2D(1.0, 1.0));
Boid boid3 = Boid(Vector2D(0.5, 0.5), Vector2D(1.0, 1.0));
std::vector<Boid> proximity { boid2, boid3 };
SeparationRule rule;
float dist1 = Vector2D(2.0, 2.0).distanceTo(Vector2D(1.5, 1.5));
float dist2 = Vector2D(2.0, 2.0).distanceTo(Vector2D(0.5, 0.5));
Vector2D diff1 = Vector2D(0.5, 0.5).normalized() / dist1;
Vector2D diff2 = Vector2D(1.5, 1.5).normalized() / dist2;
Vector2D expectedSteer = (diff1 + diff2) / 2;
Vector2D expectedResult = ((expectedSteer.normalized() * 3) - Vector2D(2.5, 1.5)).limit(0.1) * 2;
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
TEST_F(RulesTest, AligmentSimpleTest) {
// Arrange Simple test with just one neighbor, and unit parameters
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 0.1;
parameters.alignmentWeight = 1;
Boid boidToUpdate = Boid(Vector2D(1.0, 1.0), Vector2D(3.0, 3.0));
Boid boid2 = Boid(Vector2D(0.0, 0.0), Vector2D(1.0, 1.0));
std::vector<Boid> proximity { boid2 };
AlignmentRule rule;
Vector2D expectedResult = ((Vector2D(1.0, 1.0).normalized() * 3) - Vector2D(3.0, 3.0)).limit(0.1);
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
TEST_F(RulesTest, AlignComplexTest) {
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 0.1;
parameters.alignmentWeight = 2;
Boid boidToUpdate = Boid(Vector2D(2.0, 2.0), Vector2D(2.5, 1.5));
Boid boid2 = Boid(Vector2D(1.5, 1.5), Vector2D(1.0, 2.1));
Boid boid3 = Boid(Vector2D(0.5, 0.5), Vector2D(3.5, 1.0));
std::vector<Boid> proximity{ boid2, boid3 };
AlignmentRule rule;
Vector2D expectedResult
= (((Vector2D(4.5, 3.1) / 2).normalized() * 3) - Vector2D(2.5, 1.5)).limit(0.1) * 2;
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
TEST_F(RulesTest, CohesionSimpleTest) {
// Arrange Simple test with just one neighbor, and unit parameters
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 0.1;
parameters.cohesionWeight = 1;
Boid boidToUpdate = Boid(Vector2D(1.0, 1.0), Vector2D(3.0, 3.0));
Boid boid2 = Boid(Vector2D(0.0, 0.0), Vector2D(1.0, 1.0));
std::vector<Boid> proximity{boid2};
CohesionRule rule;
// Averaged position of other boids is 0, 0
Vector2D expectedResult
= ((Vector2D(-1, -1).normalized() * 3) - Vector2D(3, 3)).limit(0.1);
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
TEST_F(RulesTest, CohesionComplexTest) {
// Arrange Simple test with just one neighbor, and unit parameters
FlockSimulationParameters parameters;
parameters.speedLimit = 3;
parameters.forceLimit = 0.1;
parameters.cohesionWeight = 2;
Boid boidToUpdate = Boid(Vector2D(1.0, 1.0), Vector2D(3.0, 3.0));
Boid boid2 = Boid(Vector2D(2.0, 1.0), Vector2D(1.0, 1.0));
Boid boid3 = Boid(Vector2D(3.0, 3.5), Vector2D(1.0, 1.0));
std::vector<Boid> proximity{ boid2, boid3 };
CohesionRule rule;
// Averaged position of other boids is 0, 0
Vector2D expectedResult = (((Vector2D(1.5, 1.25)).normalized() * 3) - Vector2D(3, 3)).limit(0.1) * 2;
// Act
Vector2D actualResult = rule(boidToUpdate, proximity, parameters);
// Assert
EXPECT_NEAR(expectedResult.x, actualResult.x, 1E-5);
EXPECT_NEAR(expectedResult.y, actualResult.y, 1E-5);
}
<file_sep>#pragma GCC diagnostic ignored "-Wunused-parameter"
#include "flockingbird.hpp"
#include <cairo.h>
#include <gtk/gtk.h>
#include <iostream>
#include <math.h>
#include <ostream>
using namespace flockingbird;
static void do_drawing(cairo_t*, GtkWidget*);
const int SCREEN_WIDTH = 1024;
const int SCREEN_HEIGHT = 600;
const int REFRESH_INTERVAL_REDRAW = 1000.0 / 30.0;
double speedLimit = 5;
double forceLimit = 0.03;
double positionIncrementScalingFactor = 1;
double avoidanceRadius = 25;
double visionRange = 100;
double separationWeight = 1.5;
double alignmentWeight = 1.0;
double cohesionWeight = 1.0;
static FlockSimulationParameters flockSimulationParameters(speedLimit,
forceLimit,
positionIncrementScalingFactor,
avoidanceRadius,
visionRange,
separationWeight,
alignmentWeight,
cohesionWeight,
SCREEN_WIDTH,
SCREEN_HEIGHT);
static Flock flock(1000, SCREEN_WIDTH, SCREEN_HEIGHT);
static FlockSimulation flockSimulation(flockSimulationParameters, flock, defaultRules);
static gboolean on_draw_event(GtkWidget *widget, cairo_t *cr,
gpointer user_data)
{
do_drawing(cr, widget);
return FALSE;
}
inline double wrap(double val, double max) {
return val - max * floor(val / max);
}
static void do_drawing(cairo_t *cr, GtkWidget *widget)
{
cairo_save(cr);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_paint(cr);
cairo_restore(cr);
cairo_set_source_rgb(cr, 1, 1, 1);
for (auto it = flock.boids.begin(); it != flock.boids.end(); it ++) {
double x = it->position.x;
double y = it->position.y;
// Draw triangular boid;
Vector2D directionVector((*it).velocity.normalized());
double theta = atan2(directionVector.y, directionVector.x) - M_PI/2;
cairo_set_line_width(cr, 1);
cairo_save(cr);
cairo_translate(cr, x, y);
cairo_rotate(cr, theta);
cairo_move_to(cr, -2.5, 0);
cairo_line_to(cr, 2.5, 0);
cairo_line_to(cr, 0, 10);
cairo_line_to(cr, -2.5, 0);
cairo_stroke(cr);
cairo_restore(cr);
}
cairo_fill(cr);
}
static void sendRedrawSignals(GtkWidget *widget) {
gtk_widget_queue_draw(widget);
flockSimulation.step();
}
int main (int argc, char *argv[])
{
GtkWidget *window;
GtkWidget *darea;
gtk_init(&argc, &argv);
window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
darea = gtk_drawing_area_new();
gtk_container_add(GTK_CONTAINER(window), darea);
g_signal_connect(G_OBJECT(darea), "draw",
G_CALLBACK(on_draw_event), NULL);
g_signal_connect(G_OBJECT(window), "destroy",
G_CALLBACK(gtk_main_quit), NULL);
g_timeout_add(REFRESH_INTERVAL_REDRAW, (GSourceFunc) sendRedrawSignals, window);
gtk_window_set_position(GTK_WINDOW(window), GTK_WIN_POS_CENTER);
gtk_window_set_default_size(GTK_WINDOW(window), SCREEN_WIDTH, SCREEN_HEIGHT);
gtk_window_set_title(GTK_WINDOW(window), "Flock simulation");
gtk_widget_show_all(window);
gtk_main();
return 0;
}
<file_sep>#include "utility/vector_operations.hpp"
#include "gtest/gtest.h"
#include <vector>
class VectorOperationsTest : public ::testing::Test {
protected:
virtual void SetUp(){};
virtual void TearDown(){};
};
TEST_F(VectorOperationsTest, VectorEquality) {
EXPECT_EQ(Vector2D(1, 2), Vector2D(1, 2));
EXPECT_FALSE(Vector2D(1.00001, 2) == Vector2D(1, 2));
}
TEST_F(VectorOperationsTest, VectorSum) {
Vector2D result = Vector2D(1, 2) + Vector2D(3, 4);
EXPECT_EQ(result, Vector2D(4, 6));
}
TEST_F(VectorOperationsTest, VectorDiff) {
Vector2D result = Vector2D(1, 2) - Vector2D(3, 4);
EXPECT_EQ(result, Vector2D(-2, -2));
}
TEST_F(VectorOperationsTest, VectorMultiplyScalar) {
Vector2D result = Vector2D(3, 4) * 0.5;
EXPECT_EQ(result, Vector2D(1.5, 2));
}
TEST_F(VectorOperationsTest, Magnitude) {
float magnitude = Vector2D(1, 3).magnitude();
EXPECT_NEAR(magnitude, sqrt(10), 1E-5);
magnitude = Vector2D(1, 50).magnitude();
EXPECT_NEAR(magnitude, sqrt(2501), 1E-5);
}
TEST_F(VectorOperationsTest, Normalize) {
Vector2D normalized = Vector2D(1, 3).normalized();
EXPECT_NEAR(normalized.x, 1.0 / sqrt(10), 1E-2);
EXPECT_NEAR(normalized.y, 3.0 / sqrt(10), 1E-2);
EXPECT_NEAR(normalized.magnitude(), 1, 1E-2);
}
TEST_F(VectorOperationsTest, NormalizeNegativeNumbers) {
Vector2D normalized = Vector2D(-1, -1).normalized();
EXPECT_NEAR(normalized.x, -1.0 / sqrt(2), 1E-3);
EXPECT_NEAR(normalized.y, -1.0 / sqrt(2), 1E-3);
EXPECT_NEAR(normalized.magnitude(), 1, 1E-3);
}
TEST_F(VectorOperationsTest, TestDistance) {
Vector2D a = Vector2D(3, 2);
Vector2D b = Vector2D(9, 7);
EXPECT_NEAR(a.distanceTo(b), sqrt(61),1E-5);
}
TEST_F(VectorOperationsTest, TestLimitMagnitude) {
Vector2D a = Vector2D(3, 2);
Vector2D limited = a.limit(300);
EXPECT_EQ(a, limited);
limited = a.limit(0.1);
EXPECT_NEAR(limited.magnitude(), 0.1, 1E-3);
}
<file_sep>cd build
cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=DEBUG ..
make
make install
./flockingbird_tests
cd ..
<file_sep>#pragma once
#include "../utility/vector_operations.hpp"
#include <ostream>
namespace flockingbird {
class Boid {
public:
Boid(Vector2D positionIn, Vector2D velocity)
: position(positionIn)
, velocity(velocity) {}
Vector2D position;
Vector2D velocity;
friend std::ostream& operator<<(std::ostream& outputStream, const Boid& p);
};
inline std::ostream& operator<<(std::ostream& outputStream, const Boid& p) {
outputStream << "(pos" << p.position << ") (dir: " << p.velocity << ")";
return outputStream;
}
}
<file_sep>cmake_minimum_required(VERSION 3.2)
set(HEADER_FILES flockingbird)
add_library(flockingbird INTERFACE ${HEADER_FILES})
project(flockingbird_demo
VERSION 0.0.1
LANGUAGES C CXX
DESCRIPTION "Graphical test demo of flockingbird")
set(SOURCE_FILES main.cpp)
INCLUDE(FindPkgConfig)
PKG_CHECK_MODULES(CAIRO REQUIRED cairo)
pkg_check_modules(GTK3 REQUIRED gtk+-3.0)
add_executable(flockingbird_demo ${SOURCE_FILES})
target_compile_features(flockingbird_demo PRIVATE cxx_std_17)
TARGET_INCLUDE_DIRECTORIES(
flockingbird_demo
PRIVATE
${CAIRO_INCLUDE_DIRS}
${GTK3_INCLUDE_DIRS}
)
target_link_directories(
flockingbird_demo
PRIVATE
${CAIRO_LIBRARY_DIRS}
${GTK3_LIBRARY_DIRS}
)
target_link_libraries(
flockingbird_demo
flockingbird
${CAIRO_LIBRARIES}
${GTK3_LIBRARIES}
)
install(TARGETS flockingbird_demo DESTINATION ${FLOCKINGBIRD_INSTALL_BUILD_DIR})
install(TARGETS flockingbird DESTINATION ${FLOCKINGBIRD_INSTALL_BUILD_DIR})
<file_sep>#include "gtest/gtest.h"
#include "flock_simulation/flock.hpp"
using namespace flockingbird;
class FlockingbirdTest : public ::testing::Test {
protected:
virtual void SetUp(){};
virtual void TearDown(){};
};
TEST_F(FlockingbirdTest, RandomInit) {
Flock flock(10, 10, 10);
EXPECT_EQ(flock.boids.size(), 10);
for (int i = 0; i < 10; i++) {
Boid boid = flock.boids[i];
EXPECT_GE(boid.velocity.x, -1);
EXPECT_LE(boid.velocity.x, 1);
EXPECT_GE(boid.velocity.y, -1);
EXPECT_LE(boid.velocity.y, 1);
EXPECT_GE(boid.position.x, 0);
EXPECT_LE(boid.position.x, 10);
EXPECT_GE(boid.position.y, 0);
EXPECT_LE(boid.position.y, 10);
}
}
TEST_F(FlockingbirdTest, CopyConstructors) {
const int N = 1000;
const int numTests = 10;
for (int testRun = 0; testRun < numTests; testRun++) {
Flock flock(N, 100, 100);
Flock copy(flock);
for (int i = 0; i < N; i++) {
EXPECT_EQ(flock.boids[i].position.x, copy.boids[i].position.x);
EXPECT_EQ(flock.boids[i].position.y, copy.boids[i].position.y);
EXPECT_EQ(flock.boids[i].velocity.x, copy.boids[i].velocity.x);
EXPECT_EQ(flock.boids[i].velocity.y, copy.boids[i].velocity.y);
}
}
}
<file_sep>#include "nearest_neighbors/nanoflann.hpp"
#include "nearest_neighbors/visible_proximity.hpp"
#include "utility/random_numbers.hpp"
#include "gtest/gtest.h"
#include <cmath>
#include <cstdlib>
#include <vector>
using namespace nanoflann;
using namespace flockingbird;
class VisibleProximityTest : public ::testing::Test {
public:
protected:
VisibleProximityTest()
: flock(Flock(0, 10, 10)) {
Vector2D dummyDirection = Vector2D(1.0, 1.0);
Boid boid1 = Boid(Vector2D(1.01, 2.12), dummyDirection);
Boid boid2 = Boid(Vector2D(1.5, 2.5), dummyDirection);
Boid boid3 = Boid(Vector2D(5, 4), dummyDirection);
Boid boid4 = Boid(Vector2D(5.01, 3.99), dummyDirection);
flock.boids.push_back(boid1);
flock.boids.push_back(boid2);
flock.boids.push_back(boid3);
flock.boids.push_back(boid4);
};
Flock flock;
virtual void TearDown(){};
};
TEST_F(VisibleProximityTest, FindNearestNeighborsInCloseProximity) {
VisibleProximity visibleProximity(flock);
const float visionRange = 1;
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ 0, visionRange);
EXPECT_EQ(visibleBoids.size(), 1);
Boid firstBoid = visibleBoids.front();
EXPECT_EQ(firstBoid.position.x, 1.5);
EXPECT_EQ(firstBoid.position.y, 2.5);
}
TEST_F(VisibleProximityTest, FindsPointsOnEdgeOfSearchRadius) {
VisibleProximity visibleProximity(flock);
const float visionRange = 0.3845 + 1E-5; // ((1.5 - 1.01) ^ 2 + (2.5-2.12) ^ 2 == 0.3845)
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ 0, visionRange);
EXPECT_EQ(visibleBoids.size(), 1);
Boid firstBoid = visibleBoids.front();
EXPECT_EQ(firstBoid.position.x, 1.5);
EXPECT_EQ(firstBoid.position.y, 2.5);
}
TEST_F(VisibleProximityTest, ExcludesPointsOnEdgeOfSearchRadius) {
VisibleProximity visibleProximity(flock);
const float visionRange = 0.3844; // ((1.5 - 1.01) ^ 2 + (2.5-2.12) ^ 2 == 0.3845)
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ 0, visionRange);
EXPECT_EQ(visibleBoids.size(), 0);
}
TEST_F(VisibleProximityTest, MultipleCallsDontVaryTheResult) {
VisibleProximity visibleProximity(flock);
const float visionRange = 0.3844; // ((1.5 - 1.01) ^ 2 + (2.5-2.12) ^ 2 == 0.3845)
visibleProximity.of(/*boid at index*/ 0, visionRange);
visibleProximity.of(/*boid at index*/ 2, visionRange);
visibleProximity.of(/*boid at index*/ 1, visionRange);
std::vector<Boid> visibleBoids = visibleProximity.of(0, visionRange);
EXPECT_EQ(visibleBoids.size(), 0);
}
TEST_F(VisibleProximityTest, FindTheWholeFlockWithASufficientVisionRange) {
VisibleProximity visibleProximity(flock);
const float visionRange = 50;
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ 2, visionRange);
EXPECT_EQ(visibleBoids.size(), flock.boids.size() - 1);
}
TEST_F(VisibleProximityTest, FindsNeigborWithVeryNarrowVision) {
VisibleProximity visibleProximity(flock);
const float visionRange = 0.2;
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ 2, visionRange);
EXPECT_EQ(visibleBoids.size(), 1);
}
/*
* Random tests
*
*/
inline float L2_Reference(Vector2D a, Vector2D b) { return pow(a.x - b.x, 2) + pow(a.y - b.y, 2); }
TEST_F(VisibleProximityTest, RandomTests) {
int N = 30; // if test runs in under 1 sec, we can reach this fps
for (int testRun = 0; testRun < N; testRun++) {
int numBoids = 500;
std::vector<Boid> boids;
for (int boid = 0; boid < numBoids; boid++) {
boids.push_back(
Boid(Vector2D(randomInBounds(0, 10), randomInBounds(0, 10)), Vector2D(0, 0)));
}
float visionRange = randomInBounds(0, 10);
Flock flock = Flock();
flock.boids = boids;
Flock refFlock(flock);
VisibleProximity visibleProximity(flock);
for (int i = 0; i < numBoids; i++) {
std::vector<Boid> visibleBoids = visibleProximity.of(/*boid at index*/ i, visionRange);
for (auto boidIt = visibleBoids.begin(); boidIt != visibleBoids.end(); boidIt++) {
EXPECT_LE(L2_Reference(boidIt->position, refFlock.boids[i].position), visionRange);
}
}
}
}
<file_sep>#include "utility/vector_operations.hpp"
#include "gtest/gtest.h"
#include <iostream>
#include "flock_simulation/simulation.hpp"
#include "gmock/gmock.h"
using ::testing::_;
using ::testing::Return;
using namespace flockingbird;
class SimulationTest : public ::testing::Test {
public:
protected:
SimulationTest() {
Boid boid1 = Boid(Vector2D(1.0, 2), Vector2D(1.0, 1.0));
Boid boid2 = Boid(Vector2D(2, 3), Vector2D(2, 1.0));
Boid boid3 = Boid(Vector2D(3, 4), Vector2D(1.0, 3.5));
Boid boid4 = Boid(Vector2D(0, 0), Vector2D(1.0, 1.0));
Boid boid5 = Boid(Vector2D(1024, 1024),
Vector2D(1.0, 1.0)); // out of range boid, should not affect calculations
boids.push_back(boid1);
boids.push_back(boid2);
boids.push_back(boid3);
boids.push_back(boid4);
boids.push_back(boid5);
flock.boids = boids;
};
std::vector<Boid> boids;
Flock flock;
virtual void TearDown(){};
};
class MockRule : public Rule {
public:
MockRule(): callReturnValue(Vector2D(1, 1)) {
}
Vector2D callReturnValue;
MOCK_METHOD(Vector2D, Apply, (Boid, std::vector<Boid>, FlockSimulationParameters));
Vector2D operator()(Boid boidToUpdate, std::vector<Boid> proximity, FlockSimulationParameters configuration) override {
return Apply(boidToUpdate, proximity, configuration);
}
};
TEST_F(SimulationTest, TestStepAppliesRulesToSingleOutlierBoid) {
// Arrange
FlockSimulationParameters testParameters;
MockRule dummyRule;
testParameters.speedLimit = 500;
EXPECT_CALL(dummyRule, Apply(_, _, _))
.WillRepeatedly(Return(Vector2D(1, 1)));
std::vector<Rule*> rules;
rules.push_back(&dummyRule);
FlockSimulation simulation(testParameters, flock, rules);
// Act
simulation.step();
Boid outlierBoid = flock.boids[4];
Vector2D expectedPosition = Vector2D(1026, 1026);
// Assert
EXPECT_EQ(outlierBoid.position, expectedPosition);
EXPECT_EQ(outlierBoid.velocity, Vector2D(2, 2));
}
TEST_F(SimulationTest, TestSteppAppliesRulesForAllNeighbors) {
// Arrange
FlockSimulationParameters testParameters;
testParameters.speedLimit = 500;
MockRule dummyRule;
std::vector<Rule*> rules;
rules.push_back(&dummyRule);
FlockSimulation simulation(testParameters, flock, rules);
EXPECT_CALL(dummyRule, Apply(_, _, _)).Times(5).WillRepeatedly(Return(Vector2D(1, 1)));
// Act
simulation.step();
}
<file_sep>#pragma once
#include <random>
inline float randomInBounds(float fMin, float fMax) {
float f = (float)rand() / static_cast<float>(RAND_MAX);
return fMin + f * (fMax - fMin);
}
<file_sep>#include "boid.hpp"
#include "configuration.hpp"
#include "../utility/vector_operations.hpp"
#include <iostream>
#include <vector>
#pragma once
class Rule {
public:
virtual Vector2D operator()(flockingbird::Boid boidToUpdate,
std::vector<flockingbird::Boid> proximity,
flockingbird::FlockSimulationParameters configuration)
= 0;
};
class SeparationRule: public Rule {
public:
virtual Vector2D operator()(flockingbird::Boid boidToUpdate,
std::vector<flockingbird::Boid> proximity,
flockingbird::FlockSimulationParameters configuration) {
int count = 0;
Vector2D steer(0, 0);
for (flockingbird::Boid boid : proximity) {
float d = boidToUpdate.position.distanceTo(boid.position);
if (d > 0 && d < configuration.avoidanceRadius) {
Vector2D diff = (boidToUpdate.position - boid.position).normalized() / d;
steer = steer + diff;
count += 1;
}
}
if (count > 0) {
steer = steer / count;
}
if (steer.magnitude() > 0) {
steer = steer.normalized();
steer = steer * configuration.speedLimit;
steer = steer - boidToUpdate.velocity;
return steer.limit(configuration.forceLimit) * configuration.separationWeight;
}
return steer;
};
};
class AlignmentRule: public Rule {
public:
virtual Vector2D operator()(flockingbird::Boid boidToUpdate,
std::vector<flockingbird::Boid> proximity,
flockingbird::FlockSimulationParameters configuration) {
Vector2D sum(0, 0);
int count = 0;
for (flockingbird::Boid boid : proximity) {
sum = sum + boid.velocity;
count++;
}
if (count > 0) {
sum = sum / count;
sum = sum.normalized() * configuration.speedLimit;
Vector2D steer = sum - boidToUpdate.velocity;
return steer.limit(configuration.forceLimit) * configuration.alignmentWeight;
}
return sum;
}
};
class CohesionRule: public Rule {
public:
virtual Vector2D operator()(flockingbird::Boid boidToUpdate,
std::vector<flockingbird::Boid> proximity,
flockingbird::FlockSimulationParameters configuration) {
Vector2D sum(0, 0);
int count = 0;
for (flockingbird::Boid boid : proximity) {
sum = sum + boid.position;
count++;
}
// Steer towards average position
if (count > 0) {
Vector2D target = sum / count;
Vector2D desired = target - boidToUpdate.position;
desired = desired.normalized() * configuration.speedLimit;
Vector2D steer = desired - boidToUpdate.velocity;
return steer.limit(configuration.forceLimit) * configuration.cohesionWeight;
}
return sum;
}
};
static SeparationRule separationRule;
static AlignmentRule alignmentRule;
static CohesionRule cohesionRule;
static std::vector<Rule*> defaultRules { &separationRule, &alignmentRule, &cohesionRule };
<file_sep>#pragma once
#include "../utility/random_numbers.hpp"
#include "../utility/vector_operations.hpp"
#include "boid.hpp"
namespace flockingbird {
class Flock {
public:
// constructors
Flock() {
std::vector<flockingbird::Boid> emptyBoids;
boids = emptyBoids;
}
Flock(std::vector<flockingbird::Boid> boids): boids(boids) {}
Flock(int numBoids, int maxX, int maxY) {
std::vector<flockingbird::Boid> result;
for (int i = 0; i < numBoids; i++) {
flockingbird::Boid randomBoid(Vector2D(randomInBounds(0, maxX),
randomInBounds(0, maxY)),
Vector2D(randomInBounds(-1, 1), randomInBounds(-1, 1)));
result.push_back(randomBoid);
}
boids = result;
}
// members
std::vector<flockingbird::Boid> boids;
// nanoflann API
inline size_t kdtree_get_point_count() const { return boids.size(); }
inline float kdtree_get_pt(const size_t idx, const size_t dim) const {
if (dim == 0)
return boids[idx].position.x;
else
return boids[idx].position.y;
}
template <class BBOX>
bool kdtree_get_bbox(BBOX& /* bb */) const {
return false;
}
};
} // namespace flockingbird
<file_sep>#include "rules.hpp"
#include "../nearest_neighbors/visible_proximity.hpp"
#include <vector>
#include "configuration.hpp"
namespace flockingbird {
class FlockSimulation {
private:
flockingbird::FlockSimulationParameters configuration;
std::vector<Rule*> rules;
float wrap(float val, float max) { return val - max * floor(val / max); }
Vector2D wrap(Vector2D position, float maxX, float maxY) {
return Vector2D(wrap(position.x, maxX), wrap(position.y, maxY));
}
public:
flockingbird::Flock& flock;
FlockSimulation(flockingbird::FlockSimulationParameters configurationIn,
flockingbird::Flock& flockIn,
std::vector<Rule*> rules) :
configuration(configurationIn),
rules(rules),
flock(flockIn) {}
void step() {
VisibleProximity visibleProximity(flock);
for (auto it = flock.boids.begin(); it != flock.boids.end(); it++) {
int i = std::distance(flock.boids.begin(), it);
Vector2D velocityUpdate(0, 0);
for (Rule* rule : rules) {
std::vector<flockingbird::Boid> proximity
= visibleProximity.of(i, pow(configuration.visionRange, 2));
velocityUpdate
= velocityUpdate + (*rule)(flock.boids[i], proximity, configuration);
}
Boid * boid = &flock.boids[i];
boid->velocity
= (boid->velocity + velocityUpdate).limit(configuration.speedLimit);
boid->position = boid->position + boid->velocity;
if (configuration.maxX > 0 && configuration.maxY > 0) {
boid->position
= wrap(boid->position, configuration.maxX, configuration.maxY);
}
}
}
};
}
<file_sep>#include "gtest/gtest.h"
#include "nearest_neighbors/nanoflann.hpp"
#include "flock_simulation/flock.hpp"
using namespace nanoflann;
using namespace flockingbird;
const int dim = 2;
const int maxLeaf = 10;
typedef KDTreeSingleIndexAdaptor<L1_Adaptor<double, Flock>,
Flock,
dim
> my_kd_tree_t;
class NanoflannTest: public ::testing::Test {
public:
protected:
NanoflannTest() : flock(Flock()),
kdTree(dim, flock, KDTreeSingleIndexAdaptorParams(maxLeaf)) {
// current direction vector not relevant for proximity tests
std::vector<double> dummyDirection = std::vector<double> {1.0, 1.0};
Boid boid1 = Boid(Vector2D(1, 2), Vector2D(1, 1));
Boid boid2 = Boid(Vector2D(1, 2), Vector2D(1, 1));
Boid boid3 = Boid(Vector2D(5, 4), Vector2D(1, 1));
flock.boids.push_back(boid1);
flock.boids.push_back(boid2);
flock.boids.push_back(boid3);
kdTree.buildIndex();
};
Flock flock;
my_kd_tree_t kdTree;
virtual void TearDown(){
};
};
TEST_F(NanoflannTest, FindsNeighborsNextToQueryPoint) {
nanoflann::SearchParams params;
const double search_radius = static_cast<double>(0.1);
std::vector<std::pair<size_t, double>> ret_matches;
const double query_pt[2] = {1, 2.09};
const size_t nMatches = kdTree.radiusSearch(&query_pt[0], search_radius, ret_matches, params);
EXPECT_EQ(nMatches, 2) << "Found wrong neighbors: " << nMatches;
EXPECT_EQ(ret_matches[0].first, 0);
EXPECT_EQ(ret_matches[1].first, 1);
}
TEST_F(NanoflannTest, ExcludesNeighborsIfNotInRadius) {
nanoflann::SearchParams params;
const double search_radius = static_cast<double>(0.5);
std::vector<std::pair<size_t, double>> ret_matches;
const double query_pt[2] = {5.2, 4.1};
const size_t nMatches = kdTree.radiusSearch(&query_pt[0], search_radius, ret_matches, params);
EXPECT_EQ(nMatches, 1) << "Found wrong neighbors: " << nMatches;
EXPECT_EQ(ret_matches[0].first, 2); // finds only boid 3 (index 2)
}
<file_sep>#pragma once
namespace flockingbird {
struct FlockSimulationParameters {
float speedLimit;
float forceLimit;
float positionIncrementScalingFactor;
float avoidanceRadius;
float visionRange;
float separationWeight;
float alignmentWeight;
float cohesionWeight;
float maxX = -1;
float maxY = -1;
FlockSimulationParameters() {}
FlockSimulationParameters(float speedLimit,
float forceLimit,
float positionIncrementScalingFactor,
float avoidanceRadius,
float visionRange,
float separationWeight,
float alignmentWeight,
float cohesionWeight,
float maxX,
float maxY)
: speedLimit(speedLimit)
, forceLimit(forceLimit)
, positionIncrementScalingFactor(positionIncrementScalingFactor)
, avoidanceRadius(avoidanceRadius)
, visionRange(visionRange)
, separationWeight(separationWeight)
, alignmentWeight(alignmentWeight)
, cohesionWeight(cohesionWeight),
maxX(maxX),
maxY(maxY){}
};
} // namespace flockingbird
<file_sep>#include "../flock_simulation/flock.hpp"
#include "nanoflann.hpp"
#include <iostream>
#include <ostream>
#include <vector>
using namespace nanoflann;
const int dim = 2;
const int maxLeaf = 10;
typedef KDTreeSingleIndexAdaptor<L2_Simple_Adaptor<float, flockingbird::Flock>, flockingbird::Flock, dim> kd_tree_t;
class VisibleProximity {
private:
flockingbird::Flock flock;
kd_tree_t kdTree;
public:
VisibleProximity(flockingbird::Flock flockToQuery)
: flock(flockToQuery)
, kdTree(dim, flock, KDTreeSingleIndexAdaptorParams(maxLeaf)) {
kdTree.buildIndex();
}
std::vector<flockingbird::Boid> of(int index, float visionRange) {
nanoflann::SearchParams params;
params.sorted = false;
std::vector<std::pair<size_t, float>> ret_matches;
Vector2D boidPosition = flock.boids[index].position;
const float query_pt[2] = {boidPosition.x, boidPosition.y};
kdTree.radiusSearch(query_pt, visionRange, ret_matches, params);
// TODO: Maybe block vision in the backwards direction of the bird?
std::vector<flockingbird::Boid> result;
for (auto match: ret_matches) {
float distance = match.second;
if (distance > 0) {
result.push_back(flock.boids[match.first]);
}
}
return result;
}
};
<file_sep>#pragma once
#include <iostream>
#include <ostream>
#include <math.h>
#include <bit>
#include <cstdint>
#include <limits>
class Vector2D {
public:
Vector2D(float xIn, float yIn)
: x(xIn)
, y(yIn) {}
float x, y;
float magnitude() { return sqrt(pow(x, 2) + pow(y, 2)); }
Vector2D normalized() {
float n = pow(x, 2) + pow(y, 2);
float invSq = Q_rsqrt(n);
return Vector2D(invSq * x, invSq * y);
}
float distanceTo(Vector2D other) {
float a = abs(x-other.x);
float b = abs(y-other.y);
return Vector2D(a, b).magnitude();
}
Vector2D limit(float maxForce) {
if (magnitude() > maxForce * maxForce) {
Vector2D norm = normalized();
return Vector2D(norm.x * maxForce, norm.y * maxForce);
}
return *this;
}
friend std::ostream& operator<<(std::ostream& outputStream, const Vector2D& p);
private:
float Q_rsqrt(float number) {
const float x2 = number * 0.5F;
const float threehalfs = 1.5F;
union {
float f;
uint32_t i;
} conv = {.f = number};
conv.i = 0x5f3759df - (conv.i >> 1);
conv.f *= threehalfs - (x2 * conv.f * conv.f);
return conv.f;
}
};
inline std::ostream& operator<<(std::ostream& outputStream, const Vector2D& p) {
outputStream << "[" << p.x << ", " << p.y << "]";
return outputStream;
}
inline bool operator==(Vector2D a, Vector2D b) { return a.x == b.x && a.y == b.y; }
inline Vector2D operator+(Vector2D a, Vector2D b) { return Vector2D(a.x + b.x, a.y + b.y); }
inline Vector2D operator-(Vector2D a, Vector2D b) { return Vector2D(a.x - b.x, a.y - b.y); }
inline Vector2D operator*(Vector2D a, float x) { return Vector2D(a.x * x, a.y * x); }
inline Vector2D operator/(Vector2D a, float x) { return Vector2D(a.x / x, a.y / x); }
<file_sep>### flockingbird
A header-only c++ library for creating 2D flocking animations.
http://www.cs.toronto.edu/~dt/siggraph97-course/cwr87/

The algorithm uses [kD-Trees](https://github.com/jlblancoc/nanoflann) to calculate the neighbors of a boid, making it possible to simulate more than a thousand boids (depending on framerate, compiler optimization level, and processor speed).
Please use the highest optimization level if you compile the library yourself. 500 boids is already difficult with the default optimization, whereas with `-O3` you can easiliy run a 1000 boid simulation.
### setup
(I don't have a windows PC, so just for mac)
```bash
brew install cmake
brew install llvm
brew install clang-format # for working on the project/formatting
```
### Dependencies
For the demo animation:
```bash
brew install cairo
brew install gtk+3
```
NOTE: i had to
```bash
export LDFLAGS="-L/usr/local/Cellar/cairo/1.16.0_5/lib $LDFLAGS"
```
### build
```bash
cd build
cmake ..
make && make install
```
### build and test
```bash
./test.sh
```
### run demo
```bash
./build/flockingbird_demo
```
### emacs integration
make sure that compile_commands is linked to the root
```bash
ln build/compile_commands.json .
```
### Credits/Disclaimer
Resources used for development
- http://www.vergenet.net/~conrad/boids/pseudocode.html
- https://p5js.org/examples/simulate-flocking.html
### Bindings
- [Swift](https://github.com/falcowinkler/flockingbird-swift) (Work in progress)
| 13d7ed402bcf152b922095c2433824ed0188f387 | [
"Markdown",
"CMake",
"C++",
"Shell"
] | 21 | CMake | wangtaoyijiu/flockingbird | eed3e7cead4a37635625d1055fb0a830e6152d1b | 550260a81ddbc2bdc11626d667b8be153017bede |
refs/heads/master | <repo_name>purdyben/Comps-311Design-and-Analysis-of-Algorithms<file_sep>/README.md
# Comps-311Design-and-Analysis-of-Algorithms
Great class
*Hws are all done in latex language tackling algorithm problems and big O understanding.
* Project regrade is a java based interval treap
<file_sep>/Project_1_regrade/Interval.java
/**
* Interval(int low, int high): Constructor with two parameters: the low and high
* endpoints.
* • int getLow(): Returns the low endpoint of the interval.
* • int getHigh(): Returns the high endpoint of the interval
*/
public class Interval {
private int low;
private int high;
public Interval(int low, int high){
this.low = low;
this.high = high;
}
public int getHigh() {
return high;
}
public int getLow() {
return low;
}
}
<file_sep>/Project_1/src/Main.java
import java.util.ArrayList;
import static org.junit.Assert.fail;
public class Main {
static final int count = 14;
// main function of the program
public static void main(String[] args){
//test cases, create nodes as the Figure 3 in Project instruction.
//node n1 to n11 is same with the node in Figure 3.
Interval i1 = new Interval(16,21);
Node n1 = new Node(i1);
n1.setPriority(8);
Interval i2 = new Interval(8,9);
Node n2 = new Node(i2);
n2.setPriority(12);
Interval i3 = new Interval(25,30);
Node n3 = new Node(i3);
n3.setPriority(10);
Interval i4 = new Interval(5,8);
Node n4 = new Node(i4);
n4.setPriority(17);
Interval i5 = new Interval(15,23);
Node n5 = new Node(i5);
n5.setPriority(16);
Interval i6 = new Interval(17,19);
Node n6 = new Node(i6);
n6.setPriority(13);
Interval i7 = new Interval(26,26);
Node n7 = new Node(i7);
n7.setPriority(11);
Interval i8 = new Interval(0,3);
Node n8 = new Node(i8);
n8.setPriority(21);
Interval i9 = new Interval(6,10);
Node n9 = new Node(i9);
n9.setPriority(20);
Interval i10 = new Interval(19,20);
Node n10 = new Node(i10);
n10.setPriority(17);
Interval i11 = new Interval(7,25);
Node n11 = new Node(i11);
n11.setPriority(9);
Interval i12 = new Interval(27,41);
Node n12 = new Node(i12);
n12.setPriority(32);
Interval i13 = new Interval(12,22);
Node n13 = new Node(i13);
n13.setPriority(18);
Interval i14 = new Interval(13,25);
Node n14 = new Node(i14);
n14.setPriority(40);
IntervalTreap T = new IntervalTreap();
// You can insert the 14 nodes in any order, and the result should be same
T.intervalInsert(n9);
print2D((T.getRoot()));
T.intervalInsert(n3);
print2D((T.getRoot()));
T.intervalInsert(n6);
print2D((T.getRoot()));
T.intervalInsert(n1);
print2D((T.getRoot()));
T.intervalInsert(n4);
print2D((T.getRoot()));
T.intervalInsert(n7);
print2D((T.getRoot()));
T.intervalInsert(n8);
print2D((T.getRoot()));
T.intervalInsert(n2);
print2D((T.getRoot()));
T.intervalInsert(n11);
print2D((T.getRoot()));
T.intervalInsert(n5);
print2D((T.getRoot()));
T.intervalInsert(n10);
print2D((T.getRoot()));
T.intervalInsert(n12);
//
T.intervalInsert(n13);
//
T.intervalInsert(n14);
//System.out.println(T.getHeight());
//
// ArrayList<Node> arr = new ArrayList<Node>();
// inOrder(T.getRoot(),arr);
//
// for (Node node : arr) {
// System.out.print(node.priority + ",");
// }
// print2D((T.getRoot()));
//// // Deletion case1
T.intervalDelete(n2);
// arr = new ArrayList<Node>();
// inOrder(T.getRoot(),arr);
//
// for (Node node : arr) {
// System.out.print(node.priority + ",");
// }
// System.out.println(" ");
// print2D((T.getRoot()));
// //print2D((T.getRoot()));
//// //T.inOrder(T.getRoot());
//// System.out.println(T.intervalSearch(i2).toString());
//// System.out.println(T.intervalSearchExactly(i2));
//
//
//// // Deletion case2
T.intervalDelete(n14);
// arr = new ArrayList<Node>();
// inOrder(T.getRoot(),arr);
//
// for (Node node : arr) {
// System.out.print(node.priority + ",");
// }
// System.out.println(" ");
////// //T.inOrder(T.getRoot());
////// print2D((T.getRoot()));
////// // Deletion case3
T.intervalDelete(n1);
//// System.out.println(T.intervalSearchExactly(i1).toString());
// print2D((T.getRoot()));
// arr = new ArrayList<Node>();
}
static void print2DUtil(Node root, int space) {
// Base case
if (root == null) {
return;
}
// Increase distance between levels
space += count;
// Process right child first
print2DUtil(root.right, space);
// Print current node after space
// count
System.out.print("\n");
for (int i = count; i < space; i++)
System.out.print(" ");
System.out.println(root.hight);
// if(root.parent == null){
// System.out.println("(" + root.interv.getLow() + "," + root.interv.getHigh() + ")"+ ":" + root.imax +", " + root.priority +":");
//
// }else{
// System.out.println("(" + root.interv.getLow() + "," + root.interv.getHigh() + ")"+ ":" + root.imax + ", " + root.priority );
//
// }
// System.out.println("(" + root.interv.getLow() + "," + root.interv.getHigh() + ")"+ ":" + root.imax + " - par =" + root.parent.priority + " - " + root.priority + ", height - " + root.hight);
//System.out.println(root.hight + "," + root.priority);
// if(root.parent != null){
// System.out.println(root.parent.priority + ", " + root.imax+ ", " + root.priority);
//
// }else{
// System.out.println("null" + ", "+ root.imax + ", " + root.priority);
//
// }
//System.out.println("(" + root.interv.getLow() + "," + root.interv.getHigh() + ")"+ ":" + root.imax);
// Process left child
print2DUtil(root.left, space);
}
// Wrapper over print2DUtil()
static void print2D(Node root) {
// Pass initial space count as 0
print2DUtil(root, 0);
System.out.println("----------------------------------------------------------------");
}
public static void inOrder(Node node, ArrayList<Node> array){
if(node == null){
return;
}
inOrder(node.getLeft(), array);
array.add(node);
//As you visit each node, check for the heap property.
if (node.getParent()!=null && node.getPriority() < node.getParent().getPriority()) {
fail("failed treap's min-heap property!");
}
inOrder(node.getRight(), array);
}
}
<file_sep>/Project_1/src/README.txt
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
<NAME>
04/27/2020
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
1. This is a very small test case for treap. The test case use the same nodes(same interval and same priority) in Figure 3(D) of Project Instruction.
2. To test your code, run the code file "main.java". Before you run "main.java", you need to do the following things in your code.
(1) Adding a function for setting priority(in Node class).
public void setPriority(int v) {
this.priority = v;
}
(2) Adding a function for inOrder walk(in IntervalTreap class).
public void inOrder(Node i){
if(i != null){
inOrder(i.getLeft());
Node parent = i.getParent();
Node left = i.getLeft();
Node right = i.getRight();
System.out.print("Node: ("+i.getInterv().getLow()+" "+i.getInterv().getHigh()+" "+ i.getIMax()+" "+i.getPriority()+" "+i.getHeight()+") ");
if(parent != null)
System.out.print("Parent: ("+parent.getInterv().getLow()+" "+parent.getInterv().getHigh()+" "+ parent.getIMax()+" "+parent.getPriority()+" "+parent.getHeight()+") ");
else
System.out.print("Parent: (null) ");
if(left != null)
System.out.print(" Left: ("+left.getInterv().getLow()+" "+left.getInterv().getHigh()+" "+ left.getIMax()+" "+left.getPriority()+" "+left.getHeight()+") ");
else
System.out.print("Left: (null) ");
if(right != null)
System.out.println("Right: ("+right.getInterv().getLow()+" "+right.getInterv().getHigh()+" "+ right.getIMax()+" "+right.getPriority()+" "+right.getHeight()+") ");
else
System.out.println("Right: (null)");
inOrder(i.getRight());
}
}
3. Since this test case is very small, it can be traced by hand. Each time you do a insertion, deletion or search, you can know what the result is. The inOrder() will print every attribute of each node. Just check if every attibute is correct.
4. You also can add smoe other nodes or change the attibute of nodes for testing.
5. When I was writing my own code, first I use this small test to check my code. And it finds a lot of bugs. If you have no clue where the bug is, you can start with this small test to see if you can find some bugs.
| 51bc7d71c87c7d977f3bfca48070317214aa84d7 | [
"Markdown",
"Java",
"Text"
] | 4 | Markdown | purdyben/Comps-311Design-and-Analysis-of-Algorithms | bdbba4eb149a7fe3c9d2b20974160f75ec0637f5 | 6fd622d331e616c9507f1ae6c703452fe8d10b95 |
refs/heads/master | <repo_name>crist014/CS441-GameTrader<file_sep>/indexheader.php
<!--
AUTHOR: <NAME>
-->
<?php
include 'header.php';
?>
<header>
<nav>
<ul>
<li class="logo"><a href="index.php">Game Trader Logo</a></li>
<li class="search">
<form action="search.php" method="POST">
<input class="search-text" type="text" name="search" placeholder="Search...">
<button class="button w3-button w3-blue-grey" type="search" name="submit-search">Search</button></li>
</form>
</li>
<!-- SIGN UP BOTTON -->
<li class="signup">
<div class="w3-container">
<button onclick="document.getElementById('id02').style.display='block'" class="button w3-button w3-blue-grey">Sign Up</button>
<div id="id02" class="w3-modal">
<div class="w3-modal-content w3-card-4 w3-animate-top" style="max-width:600px">
<div class="w3-center"><br>
<span onclick="document.getElementById('id02').style.display='none'" class="w3-button w3-xlarge w3-hover-red w3-display-topright" title="Close Modal">×</span>
<form class ="w3-container" action="signup.php" method="POST">
<div class="w3-section">
<ul>
<li><b>Username</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="text" name="uid" placeholder="Enter Username" required></li>
<li><b>Email</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="text" name="mail" placeholder="Enter Email" required></li>
<li><b>Password</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="password" name="pwd" placeholder="Enter Password" required></li>
<li><b>Confirm Password</b></li>
<li><input class="w3-input w3-border" type="password" name="pwd-repeat" placeholder="<PASSWORD>" required></li>
<li><button class="w3-button w3-block w3-blue-grey w3-section w3-padding" type="submit" name="signup-submit">Sign Up</button></li>
</ul>
</div>
</form>
<div class="w3-container w3-border-top w3-padding-16 w3-light-grey">
<button onclick="document.getElementById('id02').style.display='none'" type="button" class="w3-button w3-red">Cancel</button>
</div>
</div>
</div>
</div>
</div>
</li> <!-- END OF SIGN UP BUTTON -->
<!-- LOGIN BUTTON -->
<li class="login">
<div class="w3-container">
<button onclick="document.getElementById('id01').style.display='block'" class="button w3-button w3-blue-grey">Login</button>
<div id="id01" class="w3-modal">
<div class="w3-modal-content w3-card-4 w3-animate-top" style="max-width:600px">
<div class="w3-center"><br>
<span onclick="document.getElementById('id01').style.display='none'" class="w3-button w3-xlarge w3-hover-red w3-display-topright" title="Close Modal">×</span>
<form class="w3-container" action="login.php" method="POST">
<div class="w3-section">
<label><b>Username</b></label>
<input class="w3-input w3-border w3-margin-bottom" type="text" placeholder="Enter Username..." name="mailuid" required>
<label><b>Password</b></label>
<input class="w3-input w3-border" type="password" placeholder="Enter Password..." name="pwd" required>
<button class="w3-button w3-block w3-blue-grey w3-section w3-padding" type="submit">Login</button>
</div>
</form>
<div class="w3-container w3-border-top w3-padding-16 w3-light-grey">
<button onclick="document.getElementById('id01').style.display='none'" type="button" class="w3-button w3-red">Cancel</button>
</div>
</div>
</div>
</div>
</div>
</li> <!-- END OF LOGIN BUTTON -->
</ul>
</nav>
</header><file_sep>/member.php
<!--
AUTHOR: <NAME>
Resources: https://www.w3schools.com/w3css/4/w3.css
w3schools.com/w3css/w3css_modal.asp
-->
<?php
include 'memberheader.php';
include 'about.php';
?>
<html>
<head>
<title>Member</title>
</head>
</html><file_sep>/memberheader.php
<!--
AUTHOR: <NAME>
-->
<?php
include 'header.php';
?>
<header>
<nav>
<ul>
<li class="logo"><a href="member.php">Game Trader Logo</a></li>
<li class="search">
<form action="search.php" method="POST">
<input class="search-text w3-text-blue-grey" type="text" name="search" placeholder="Search...">
<button class="button w3-button w3-blue-grey " type="search" name="submit-search">Search</button>
</form>
</li>
<li class="button signout w3-button w3-blue-grey"><a href="logout.php">Sign out</a></li>
<li class="button profile w3-button w3-blue-grey"><a href="profile.php">Profile</a></li>
<!-- POST BUTTON -->
<li class="post">
<div class="w3-container">
<button onclick="document.getElementById('id01').style.display='block'" class="button w3-button w3-blue-grey">Post</button>
<div id="id01" class="w3-modal">
<div class="w3-modal-content w3-card-4 w3-animate-top" style="max-width:600px">
<div class="w3-center"><br>
<span onclick="document.getElementById('id01').style.display='none'" class="w3-button w3-xlarge w3-hover-red w3-display-topright" title="Close Modal">×</span>
<form class ="w3-container" action="addlisting.php" method="POST">
<div class="w3-section">
<ul>
<li><b>Name of Item</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="text" name="item_name" placeholder="Enter Item" required></li>
<li><b>Item Description</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="text" name="item_description" placeholder="Enter Description" required></li>
<li><b>Selling Price</b></li>
<li class="dollar-sign"><input class="w3-input w3-border w3-margin-bottom" type="text" name="item_price" placeholder="0.00" required><i>$</i></li>
<li><b>Item to Trade With</b></li>
<li><input class="w3-input w3-border w3-margin-bottom" type="text" name="item_propose" placeholder="Enter Item" ></li>
<li><input type="file" name="img" id="img"></li>
<!--TODO: ADD TOGGLE SWITCH-->
<li><button class="w3-button w3-block w3-blue-grey w3-section w3-padding" type="submit" name="post-submit">Post Item</button></li>
</ul>
</div>
</form>
<div class="w3-container w3-border-top w3-padding-16 w3-light-grey">
<button onclick="document.getElementById('id01').style.display='none'" type="button" class="w3-button w3-red">Cancel</button>
</div>
</div>
</div>
</div>
</div>
</li> <!-- END OF POST BUTTON -->
</ul>
</nav>
</header><file_sep>/index.php
<!--
AUTHOR: <NAME>
Resources: https://www.w3schools.com/w3css/4/w3.css
w3schools.com/w3css/w3css_modal.asp
-->
<?php
session_start();
$_SESSION["login"] = false;
include 'indexheader.php';
include 'about.php';
?>
<html>
<head>
<title>Home</title>
</head>
</html><file_sep>/logout.php
<!--
AUTHOR: <NAME>
-->
<?php
session_start();
$_SESSION["login"] == FALSE;
session_destroy();
header('Location: index.php');
?><file_sep>/addlisting.php
<!--
AUTHORS: <NAME> | <NAME>
-->
<?php
include 'dbh.php';
session_start();
$item = $_POST['item_name'];
$price = $_POST['item_price'];
$proposal = $_POST['item_propose'];
$id='';
$itemDesc = mysqli_real_escape_string($conn, $_POST['item_description']);
$seller = $_SESSION['username'];
if(!empty($item) && !empty($itemDesc) && (!empty($price) || !empty($proposal)))
{
$SELECT = "SELECT item_name, seller FROM Listings WHERE item_name = ?";
$INSERT = "INSERT INTO Listings (item_id, item_price, item_name, item_propose, item_desc, seller) VALUES (?,?,?,?,?,?)";
$stmt = $conn->prepare($SELECT);
$stmt->bind_param('s',$item);
$stmt->execute();
$stmt->bind_result($item);
$stmt->store_result();
$rnum = $stmt->num_rows;
$stmt->close();
$stmt = $conn->prepare($INSERT);
$stmt->bind_param("idssss", $id ,$price, $item, $proposal, $itemDesc, $seller);
$stmt->execute();
header("Location: member.php");
$stmt->close();
$conn->close();
}
?><file_sep>/header.php
<!--
AUTHOR: <NAME>
-->
<?php
include 'dbh.php';
?>
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
<meta charset="utf-8">
</head>
<body><file_sep>/signup.php
<!--
AUTHORS: <NAME> | <NAME>
-->
<?php
include 'dbh.php';
$username = $_POST['uid'];
$email = $_POST['mail'];
$pass = $_POST['pwd']; //password
$confirmpass = $_POST['pwd-repeat'];
$hashed_pass = password_hash($pass, PASSWORD_DEFAULT);
if(!empty($username) && !empty($email) && !empty($pass) && !empty($confirmpass))
{
$SELECT = "SELECT username FROM users WHERE username = ? LIMIT 1"; //come back here to fix
$INSERT = "INSERT INTO users (username, password, email) VALUES (?,?,?)";
//Prepare statement
$stmt = $conn->prepare($SELECT);
$stmt->bind_param('s',$email);
$stmt->execute();
$stmt->bind_result($email);
$stmt->store_result();
$rnum = $stmt->num_rows;
if($rnum==0)
{
$stmt->close();
$stmt = $conn->prepare($INSERT);
$stmt->bind_param("sss", $username, $hashed_pass, $email);
$stmt->execute();
header("Location: member.php");
}else{
echo "someone already register using this email";
}
$stmt->close();
$conn->close();
}elseif(strcmp($pass, $confirmpass)){
echo "Passwords do not match!";
}else{
echo "All fields are required";
die();
}
?><file_sep>/dbh.php
<!--
AUTHOR: <NAME>
-->
<?php
$host="localhost";
$dbUsername="root";
$dbPassword="";
$dbName="GTLogins";
//connects to the databse
$conn = mysqli_connect($host, $dbUsername, $dbPassword, $dbName);
if(mysqli_connect_error())
{die('connect error('. mysqli_connect_errno(). ')'. mysqli_connect_error());}
?><file_sep>/profile.php
<!--
AUTHORS: <NAME> | <NAME>
-->
<?php
include 'memberheader.php';
session_start();
$seller = $_SESSION['username'];
?>
<!DOCTYPE html>
<html>
<head>
<title>Profile</title>
</head>
<body>
<?php
echo "<b><center>Welcome ".$seller."! </b></center><br>";
?>
<p>All of Your Listings</p>
<table>
<tr>
<th>Item ID</th>
<th>Price</th>
<th>Item</th>
</tr>
<?php
//ATTENTION**** Store item_id in user information
$sql = "SELECT item_id, item_price, item_name, item_propose FROM Listings WHERE seller ='".$seller."'";
$result = $conn-> query($sql);
if($result->num_rows > 0)
{
while($row = $result-> fetch_assoc())
{
echo "<tr><td>". $row["item_id"] ."</td><td>". $row["item_price"] ."</td><td>". $row["item_name"]."</td></tr>";
}
echo "</table>";
}
else
{echo "0 results";}
$conn-> close();
?>
</table>
<footer>
<div class="levelup">© 2020 Level Up, Inc.</div>
</footer>
</body>
</html><file_sep>/about.php
<!--
AUTHOR: <NAME>
-->
<?php
include 'header.php';
?>
<section class="about">
<div>How large is your game collection?</div>
<p>Have you ever wished you could get more than just a measly
trade-in credit at GameStop for something you once thought so valuable? Whether your old
games and systems are collecting dust in a closet, you want more than a $5 credit at GameStop,
or you just want to experiment and try something new; gamers need a platform where they can
get more out of their game collections.</p>
<p>Game Trader provides gamers an opportunity to connect with other gamers in their local
community, and setup trades that benefit both parties. Game Trader’s strength is its flexibility –
gamers may make whatever deal they want. Maybe you have no use for that old Nintendo in the
garage, but another gamer across town may really want it, and may be willing to trade some
newer games that you’ve been itching to buy. Or maybe you have simply finished a game, and
just want a new one. How many other gamers in your community are in the exact same situation,
but the game they finished is the next one you’re interested in? The possibilities are nearly
limitless, and gamers even have the option to trade miscellaneous collectibles as well.</p>
<p>Connecting gamers within their local communities is an important aspect of Game Trader. Most
gamers don’t want to have to deal with the costs and hassles of shipping, let alone receive a
damaged product. Like other services such as LetGo and Craigslist, Game Trader encourages
gamers to see the item in person before the trade is completed, and a rating system is available to
ensure positive experiences.</p>
</section>
<footer class="team">
<div class="levelup">© 2020 Level Up, Inc.</div>
</footer><file_sep>/login.php
<!--
AUTHOR: <NAME> | <NAME>
-->
<?php
include 'dbh.php';
session_start();
if(isset($_POST['mailuid'])){
//Setting up variables from the html form
$uname=$_POST['mailuid'];
$password=$_POST['pwd'];
$_SESSION['username']=$uname;
//Creating a query statement for mySQL to run
$sql="SELECT username, password FROM users WHERE username='".$uname."' limit 1";
//mysqli_query is a bool
//Will return 1 if result is found or will return error
$result=mysqli_query($conn,$sql) or die(mysqli_error($conn));
//If found...
if(mysqli_num_rows($result)==1){
//Running a second query to pull password for the current username
$sql_password = "SELECT password FROM users WHERE username='".$uname."'";
$result2=mysqli_query($conn,$sql_password);
$row = mysqli_fetch_array($result2);
$row = $row['password']; //Finds the in query matching password with the selected user
//Unhashing, verifying password matches
if(password_verify($password, $row))
{echo "You Have Successfully Logged in";
$_SESSION["login"] = true; //Global boolean that sets login to true
header("Location: member.php"); //redirects here
//exit();
}
else
{echo "password incorrect!";}
}
else{
echo " You Have Entered Incorrect Password";
exit();
}
}
?><file_sep>/search.php
<!--
AUTHOR: <NAME>
-->
<html>
<?php
include 'dbh.php';
session_start();
if(!empty($_SESSION['username']))
{
include 'memberheader.php';
}
else{
include 'indexheader.php';
}
?>
<div>
<?php
if(isset($_POST['submit-search'])){
$search = mysqli_real_escape_string($conn, $_POST['search']);
$sql = "SELECT * FROM Listings WHERE item_name LIKE '%$search%' OR item_propose LIKE '%$search%' OR
seller LIKE '%$search%'";
$result = mysqli_query($conn, $sql);
$queryResult = mysqli_num_rows($result);
echo "<p class='search-results'>Search for \"".$search."\" came back with ".$queryResult." results.</p>";
if($queryResult > 0){
while($row = mysqli_fetch_assoc($result)){
echo "<div class='userlisting'>
<h3>".$row['item_name']."</h3>
<p class='price'>Price: $".$row['item_price']."</p>
<p class='seller'>Seller: ".$row['seller']."</p>
<footer class='info'>
<div class='w3-container'>
<button onclick=\"document.getElementById('id1".$row['item_id']."').style.display='block'\" class='more-info button w3-button w3-blue-grey'>More Info</button>
<div id='id1".$row['item_id']."' class='w3-modal'>
<div class='quick-look w3-modal-content w3-card-4 w3-animate-top' style='max-width:600px'>
<div class='w3-center'><br>
<span onclick=\"document.getElementById('id1".$row['item_id']."').style.display='none'\" class='close w3-button w3-xlarge w3-hover-red w3-display-topright' title='Close Modal'>×</span>
<div class='w3-section'>
<ul>
<li><h3>".$row['item_name']."</h3></li>
<li><p class='ql-desc'>Description: ".$row['item_desc']."</p></li>
<li><h4 class='ql-price'>Price: $".$row['item_price']."</h4></li>
<li><p class='ql-seller'>Seller: ".$row['seller']."</p></li>
</ul>
</div>
</div>
</div>
</div>
</div>
</footer>
</div>";
}
}else{
echo "<p>No items found with your search!</p>";
}
}
?>
</div>
</html><file_sep>/README.md
# CS441-GameTrader
Game Trader is a website where users can trade or sell video games and even trading items. Inside this repository contains the website built by <NAME>, <NAME>, <NAME>, and <NAME>
| 72400e11c32c1dc439a159e3b6bceb04231e6d34 | [
"Markdown",
"PHP"
] | 14 | PHP | crist014/CS441-GameTrader | a0cdb445c72b67d77bcc74cf51cd2cd481de5217 | cd1f9a294109d66184ae6cf2bb4735969180de10 |
refs/heads/master | <file_sep>var tools={
/**
* 通过id名获取元素节点对象
* @param id<string> 传入id名
* @return obj<DOM Object> 返回与id相对应的元素节点对象
*
* */
getEleObjById:function (id){
return document.getElementById(id);
},
/*
*通过标签名获取元素节点对象
* @param TagName<string> 字符串类型的标签名,eg:"div"
* @return HTMLCollection HTML的节点对象的集合,读取方式与数组相同
* -当集合中只有一个节点对象时,也需要使用索引进行读取
* */
getEleObjsByTagName:function (TagName){
return document.getElementsByTagName(TagName);
},
/*
*通过name属性来获取元素节点对象
*
*@param name<string> 字符串类型的name值
*@return HTMLCollection HTML的节点对象的集合,读取方式与数组相同
* -当集合中只有一个节点对象时,也需要使用索引进行读取
*
* */
getEleObjsByName:function (name){
return document.getElementsByName(name);
},
/*获取body(可视化窗口)的宽度和高度
* @return obj {width,height}
*
* */
getBody:function (){
var width=document.documentElement.clientWidth||document.body.clientWidth;
var height=document.documentElement.clientHeight||document.body.clientHeight;
return {"width":width,"height":height};
},
/*
*添加事件监听
* @param obj <DOM Object>
* @param type <string> 事件句柄(不加on)
* @param fn <function> obj的触发函数
* @param isCaption <boolean> 是否在捕获阶段开始触发,默认为false
*
* */
addListener:function (obj,type,fn,isCaption){
if(obj.addEventListener){
obj.addEventListener(type,fn,isCaption);
}else{//else if(window.attahEvent)
//如果此处写的是fn,那么this指代的就是window这个对象,因为attachEvent方法是window的
// obj.attachEvent("on"+type,fn);//兼容IE8及以下
//
obj.attachEvent("on"+type,function (){
fn.call(obj);
});
}
},
/*
*移除事件监听
* @param obj <DOM Object>
* @param type <string> 事件句柄(不加on)
* @param fn <function> obj的触发函数
* @param isCaption <boolean> 是否在捕获阶段开始触发,默认为false
*
* */
removeListener:function (obj,type,fn,isCaption){
if(obj.removeEventListener){
obj.removeEventListener(type,fn,isCaption);
}else{
obj.detachEvent(type,fn);//兼容8及以下浏览器
}
},
/*
*滚轮事件兼容
* @param obj<DOM Object> 添加滚轮事件的对象
* @param fn<function> obj的响应函数,该函数带有一个boolean类型的参数,true表示向下,false表示向上
*
*
* */
scroll:function (obj,fn){
if(obj.onmousewheel!==undefined){
obj.onmousewheel=function (e){
e=e||event;
if(e.wheelDelta<0){
fn(true);//调用函数fn,并给它传入一个实参
}else{
fn(false);
}
};
}else{
obj.addEventListener("DOMMouseScroll",function (e){
e=e||event;
if(e.detail>0){
fn(true);//调用函数fn,并给它传入一个实参
}else{
fn(false);
}
});
}
},
/*
*获取当前正在显示的对象的某个属性的属性值
* @param obj<DOM Object> DOM对象
* @param attr<string> 需要获取属性值的属性
*
* @return value<string> 返回与attr对应的属性值
* */
getStyle:function (obj,attr){
if(window.getComputedStyle){
return getComputedStyle(obj,null)[attr];
}else{
return obj.currentStyle[attr];
}
},
/*
*元素匀速运动到指定位置
* @param obj<DOM Object> 运动的DOM对象
* @param attribute<string> 需要运动的DOM对象的属性
* @param end<number> 运动结束位置
* @param time<number> 运动从开始位置到结束位置所需的时间,单位:毫秒
*
* */
linearMove:function (obj,attr,end,time){
//获取运动开始的位置
var start=parseInt(this.getStyle(obj,attr));
//获取运动的总距离
var distance=end - start;//number类型
//获取一共要运动多少步(每30毫秒为一步)
var steps=parseInt(time / 30);
//获取速度。也就是每一步运动多远的距离
var speed= distance / steps;
clearInterval(obj.timer);
obj.timer=setInterval(function (){
start+=speed;//每调用一次前进的距离
// console.log(start,end);
if(Math.abs(end-start)<=Math.abs(speed)){//用开始与结束的差值去与每30毫秒运动的距离进行判断,如果差值小于了speed,证明马上到终点了
clearInterval(obj.timer);
start=end;
}
obj.style[attr] = start + "px";
// console.log(1);
},30);
},
/*
*设置元素在窗口一直居中显示
* @param obj<DOM Object> 要居中的元素对象
*
* */
showCenter : function (obj){
var _this=this;
//不知道传入的obj的状态是显示还是隐藏,因此先置为显示状态
obj.style.display="block";
//不确定obj是否含有定位属性
obj.style.position="absolute";
function center(){
/* //获取该元素的宽度和高度
var obj_width=obj.offsetWidth;
var obj_height=obj.offsetHeight;
//获取窗口的宽度和高度
var width=_this.getBody().width;
var height=_this.getBody().height;*/
//
obj.style.top=(_this.getBody().height - obj.offsetHeight) / 2 +"px";
obj.style.left=(_this.getBody().width - obj.offsetWidth) / 2 +"px";
}
center();
//窗口每变化一次,就获取一次窗口的值
window.onresize=center;
},
};
| 8e154703f1e21816f25b11a6249b789a698e1522 | [
"JavaScript"
] | 1 | JavaScript | zFei-runner/H5-1902- | 412bcedce9f246e445977471136f34754fbf9777 | 38e3ce6bf85fd46e69ea22244997857fa9ce176b |
refs/heads/master | <repo_name>jackmalta/eloquent<file_sep>/app/Http/Controllers/LivroController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Livro;
class LivroController extends Controller
{
//
public function index()
{
$livros = Livro::all();
return view('editora.livros', compact('livros'));
}
}
| 9d81f8a6cef70eeced9135fbef9a63b2758ad5c5 | [
"PHP"
] | 1 | PHP | jackmalta/eloquent | 549595d60ac1245928a4ca2f98e96ca9d13405e0 | cb6e11bf76188a1a8653e5777af36e4a7e9bef9c |
refs/heads/master | <file_sep>leaflet-event-controls Changelog
=================
## 0.1.1 (2016-03-18)
Valid package.json
## 0.1.0 (2016-03-18)
Project start. Initial commits.<file_sep>'use strict';
describe('L.EventControls', function() {
let L, map, control1, control2;
beforeEach(() => {
L = require('leaflet')
L.EventControls = require('../src/L.EventControls.js');
map = {
on: jasmine.createSpy(),
off: jasmine.createSpy(),
addControl: jasmine.createSpy().and.callFake(function(control) {
control._map = map;
}),
removeControl: jasmine.createSpy(),
invalidateSize: jasmine.createSpy(),
};
control1 = {
options: {}
};
control2 = {
options: {}
};
});
it('Basic test. Adding control with remove/add event', function() {
var ec = L.eventControls();
//Start with _controls set to []
expect(ec._controls.length).toBe(0);
//Should return this
expect(ec.addTo(map)).toBe(ec);
//String based events
ec.addTo(map).addControl(control1, 'add', 'remove');
ec.addTo(map).addControl(control2, 'add_second', 'remove_second');
//Add it
expect(map.on).toHaveBeenCalled();
expect(map.on).toHaveBeenCalledWith('add', ec._controls[0].addLayerOnEvent);
expect(map.on).toHaveBeenCalledWith('remove', ec._controls[0].removeLayerOnEvent);
var event = {
'fake': 'event'
};
//Trigger the on add event
ec._controls[0].addLayerOnEvent(event);
expect(control1.options.$event).toBe(event);
expect(map.addControl).toHaveBeenCalled();
expect(map.addControl).toHaveBeenCalledWith(control1);
expect(map.invalidateSize).toHaveBeenCalled();
//Trigger the on remove event for first ctrl
ec._controls[0].removeLayerOnEvent(event);
expect(map.removeControl).toHaveBeenCalled();
expect(map.removeControl).toHaveBeenCalledWith(control1);
expect(map.invalidateSize).toHaveBeenCalled();
//Remove from map. Tear down
expect(ec.removeFrom(map)).toBe(ec);
expect(ec._map).toBe(undefined);
});
it('Adding control with multiple remove/add events', function() {
var ec = L.eventControls();
//Array based events
ec.addTo(map).addControl(control1, ['add', 'foo'], ['remove', 'bar']);
//Add it
expect(map.on).toHaveBeenCalled();
expect(map.on).toHaveBeenCalledWith('add', ec._controls[0].addLayerOnEvent);
expect(map.on).toHaveBeenCalledWith('foo', ec._controls[0].addLayerOnEvent);
expect(map.on).toHaveBeenCalledWith('remove', ec._controls[0].removeLayerOnEvent);
expect(map.on).toHaveBeenCalledWith('bar', ec._controls[0].removeLayerOnEvent);
// Remove it
expect(ec._controls.length).toBe(1);
ec.removeControl(control1);
expect(map.off).toHaveBeenCalled();
/*
Have to use jasmine.any(Function), as addLayerOnEvent and removeLayerOnEvent
are gone by the time we have a chance to expect
*/
expect(map.off).toHaveBeenCalledWith('add', jasmine.any(Function));
expect(map.off).toHaveBeenCalledWith('foo', jasmine.any(Function));
expect(map.off).toHaveBeenCalledWith('remove', jasmine.any(Function));
expect(map.off).toHaveBeenCalledWith('bar', jasmine.any(Function));
expect(ec._controls.length).toBe(0);
});
});<file_sep># Leaflet Event Controls
Dynamically add and remove controls from leaflet via leaflet events. | 32768bba9e9a54546ac717e3f167a5616fc46bda | [
"Markdown",
"JavaScript"
] | 3 | Markdown | grantHarris/leaflet-event-controls | 9c8ef9d4da056e02363a297a44b567d56ebdde8b | b32106eec7daed01baf249a09adf7c031afd66bd |
refs/heads/master | <file_sep>import pandas as pd
import numpy as np
import cv2
import os
from shutil import copy
from PIL import Image
import cv2
mappings = pd.read_csv("train.csv")
one_hot_dummies = pd.get_dummies(mappings['Id'])
images =list(mappings['Image'])
Id = list(mappings['Id'])
classes = list(one_hot_dummies.columns)
##
##ids = list(mappings['Image'])
##
##print(one_hot_dummies)
def create_dict_mappings(images,Id):
dicti = {}
for i in range(len(images)):
dicti[images[i]]=Id[i]
return dicti
map_dict = create_dict_mappings(images,Id)
def create_train_path():
newpath = './data'
if not os.path.exists(newpath):
os.makedirs(newpath)
newpath = './data/train'
if not os.path.exists(newpath):
os.makedirs(newpath)
def create_training_folders():
for class_i in classes:
newpath = './data/train/{}'.format(class_i)
if not os.path.exists(newpath):
os.makedirs(newpath)
def add_images_to_training_folders():
ad
src_dir = 'C:\\Users\\demon\\Desktop\\Whale Dataset\\train\\{}'
dest_dir = 'C:\\Users\\demon\\Desktop\\Whale Dataset\\data\\train\\{}\\'
os.chdir(r'C:\Users\demon\Desktop\Whale Dataset\train')
map_dict = create_dict_mappings(images,Id)
for image in map_dict:
print("Copying :",image,"Folder :",dest_dir.format(map_dict[image]))
copy(image,
dest_dir.format(map_dict[image]))
print("Moving to approprate folder")
<file_sep>from keras.models import Sequential
from keras.layers import Dense,Conv2D,MaxPooling2D,Dropout,Flatten
def create_model(n_out_classes):
model = Sequential()
##Conv2d Layer
model.add(Conv2D(32,(3,3),activation='relu',input_shape=(256,256,3)))
##3x3 pooling
model.add(MaxPooling2D(pool_size=(3,3)))
model.add(Conv2D(64,(3,3),activation='relu'))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(n_out_classes,activation='softmax'))
return model
<file_sep>from Keras_Model import create_model
import numpy as np
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import ModelCheckpoint,LearningRateScheduler, TensorBoard, EarlyStopping
##model = create_model(len(data1[1][1]))
##compile and fit model on dataset
##model.compile()
WIDTH,HEIGHT = 256,256
train_data_dir = './data/train'
batch_size = 8
epochs = 1
N_CLASSES = 4251
nb_train_samples = 4125
nb_validation_samples = 466
batch_size = 8
epochs = 2
model = create_model(N_CLASSES)
model.compile(loss = "categorical_crossentropy",
optimizer = optimizers.SGD(lr=0.001,momentum = 0.9),
metrics =["accuracy"] )
train_datagen = ImageDataGenerator(rescale = 1./255,
horizontal_flip = True,
fill_mode = "nearest",
zoom_range = 0.3,
width_shift_range = 0.3,
height_shift_range=0.3,
rotation_range=30)
train_generator = train_datagen.flow_from_directory(train_data_dir,
target_size = (WIDTH, HEIGHT),
batch_size = batch_size,
class_mode = "categorical")
checkpoint = ModelCheckpoint("whale_checkpoint.h5",
monitor='val_acc',
verbose=1, save_best_only=True,
save_weights_only=False,
mode='auto', period=1)
early = EarlyStopping(monitor='val_acc',
min_delta=0,
patience=10,
verbose=1,
mode='auto')
board = TensorBoard(log_dir="logs/logs",
write_graph=True,
batch_size=batch_size)
model.fit_generator(train_generator,
samples_per_epoch = nb_train_samples,
epochs = epochs,
nb_val_samples = nb_validation_samples,
callbacks = [checkpoint, early, board])
model.save("Whale_model_1.h5")
<file_sep># Kaggle-Whale-Dataset
Simple Preprocessing with a keras models on the whale dataset on kaggle https://www.kaggle.com/c/whale-categorization-playground
Step 1:Download the Humpback whale dataset and extract to the desired location
Step 2:Edit the location of extracted dataset in the add_images_to training_folders() in src_dir and edit the dest_dir accordingly
Step 3:edit CreateTrainData.py to run create_train_path(),create_training_folders(),add_images_to_training_folders() (in that order)
Step 4:Run CreateTrainDataset.py and wait till the files have been added to the approprate folders
| 78c63f03eac19ece218d5ea38ffed5b84bbe25cd | [
"Markdown",
"Python"
] | 4 | Python | ajprabhu09/Kaggle-Whale-Dataset | c1e349810585b532d3fda6efe447764e362dc76e | 0e417981b114ea003f76049b808844de2e3b5018 |
refs/heads/master | <repo_name>asingh4/SimplexMart<file_sep>/src/app/login/login-auth/login-service.service.ts
import { Injectable } from '@angular/core';
import { Http } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map';
@Injectable()
export class LoginServiceService {
constructor(private http : Http) {
}
loginService(username,password){
return this.http.post('http://localhost:80/loginCheck/login.php',{"username": username,"password":<PASSWORD>}).map(
res =>{
const data = res.json();
console.log(data);
return data;
}
);
}
}
<file_sep>/src/app/app.routing.ts
import { Routes } from '@angular/router';
import { LoginComponent } from '../app/login/login.component';
import { AppComponent } from './app.component';
export const route : Routes = [
{path : "homepage",component : AppComponent},
{path : "failed",component : LoginComponent}
];<file_sep>/src/app/login/login.component.ts
import { Component, OnInit } from '@angular/core';
import { LoginServiceService } from './login-auth/login-service.service';
import { Router } from '@angular/router';
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
styleUrls: ['./login.component.css'],
providers : [LoginServiceService]
})
export class LoginComponent implements OnInit {
username = '';
password = '';
status ;
errorMessage = '';
constructor(private lss : LoginServiceService,private router : Router) { }
ngOnInit() {
}
invokeLoginProc(){
this.lss.loginService(this.username,this.password).subscribe(
res => {
this.status = res;
if(this.status && this.status[0].status_code == 2){
this.errorMessage = 'Incorrect Credentials';
}else{
this.errorMessage = 'Verified';
this.router.navigate(['homepage']);
}
}
);
}
}
| 311222134ab6943beeac9cf60927db4faf983a70 | [
"TypeScript"
] | 3 | TypeScript | asingh4/SimplexMart | 065f1028e84ff0eb51b1c76b5e06397e974ef5c2 | 0bcbdbbe496071d85b0c9dc5888a4daf6db8db48 |
refs/heads/master | <repo_name>Janx1913/Intelligent-Trash-Can---SSP-B378b<file_sep>/data.cpp
#include "data.h"
data::data()
{
}
double data::getTime(){
return time;
}
double data::getValue(){
return value;
}
void data::setTime(int timeInput){
time = timeInput;
}
void data::setValue(int valueInput){
value = valueInput;
}
<file_sep>/location.h
#ifndef LOCATION_H
#define LOCATION_H
class location
{
public:
location();
double getLatitude();
double getLongtitude();
void setLocation(double latitudeInput, double longtitudeInput);
private:
double latitude;
double longtitude;
};
#endif // LOCATION_H
<file_sep>/data.h
#ifndef DATA_H
#define DATA_H
class data
{
public:
data();
double getTime();
double getValue();
void setTime(int timeInput);
void setValue(int valueInput);
private:
double time;
double value;
};
#endif // DATA_H
<file_sep>/location.cpp
#include "location.h"
#include <qdebug.h>
location::location()
{
}
void location::setLocation(double latitudeInput, double longtitudeInput){
latitude = latitudeInput;
longtitude = longtitudeInput;
}
double location::getLatitude(){
qDebug() << latitude;
return latitude;
}
double location::getLongtitude(){
qDebug() << longtitude;
return longtitude;
}
<file_sep>/main.cpp
#include "mainwindow.h"
#include <QApplication>
#include <list>
#include "trashcan.h"
#include "location.h"
int main(int argc, char *argv[])
{
//Starting a Qapp, with a MainWindow object, called w
QApplication a(argc, argv);
MainWindow w;
//Set name of window and the size of it
w.setWindowTitle("serialTest");
w.setFixedSize(400,250);
w.show();
trashCan trash1, trash2;
location loc1;
loc1.setLocation(570303, 954513);
trash1.setLocation(570300, 954500);
trash2.setLocation(550300, 924500);
std::list<location> locations; // holds all locations, including trashCans
std::list<trashCan> trashCans; // holds only trashCans
locations.push_back(loc1);
locations.push_back(trash1);
locations.push_back(trash2);
trashCans.push_back(trash1);
for (std::list<location>::iterator it = locations.begin(); it != locations.end(); ++it){
it->getLatitude();
it->getLongtitude();
}
return a.exec();
}
<file_sep>/trashcan.h
#ifndef TRASHCAN_H
#define TRASHCAN_H
#include "location.h"
class trashCan : public location {
public:
trashCan();
int getUses();
private:
int uses;
};
#endif // TRASHCAN_H
<file_sep>/trashcan.cpp
#include "trashcan.h"
trashCan::trashCan()
{
}
int trashCan::getUses(){
return uses;
}
| ed25f374c4adba62f8f6fb0b899c6eef43fdb9ff | [
"C++"
] | 7 | C++ | Janx1913/Intelligent-Trash-Can---SSP-B378b | 9abcade28b6f2beffed68917003ff2d110471e18 | fad7339543632179406718e440e74f3b6a018fc3 |
refs/heads/master | <file_sep>"use strict";
const textInputRef = document.querySelector(".user-input-form__input");
const buttonRef = document.querySelector(".user-input-form__submit-button");
const resultOutputBlock = document.querySelector(".result");
let credits = 23580;
const pricePerDroid = 3000;
let totalPrice;
let message = "Вы не можете купить 0 дроидов повторите попитку снова!";
const userBalanceBlock = document.querySelector(".user-balance-output");
const priceBlock = document.querySelector(".price-output");
userBalanceBlock.innerHTML = `${credits}`;
priceBlock.innerHTML = `${pricePerDroid}`;
buttonRef.addEventListener("click", function() {
event.preventDefault();
let quantityPerDroid = textInputRef.value;
let quantityPerDroidConvert = Number(quantityPerDroid);
const isNaNinput = Number.isNaN(quantityPerDroidConvert);
if (quantityPerDroidConvert !== 0 && quantityPerDroid !== null) {
totalPrice = quantityPerDroidConvert * pricePerDroid;
if (totalPrice > credits) {
message = "Недостаточно средств на счету!";
} else if (isNaNinput === true) {
message = "Вы ввели некоректное значение!";
} else {
credits -= totalPrice;
message = `Вы купили ${quantityPerDroidConvert} дроидов, на счету осталось ${credits} кредитов.`;
}
resultOutputBlock.innerHTML = `${message}`;
resultOutputBlock.style.display = "block";
} else {
resultOutputBlock.innerHTML = `${message}`;
resultOutputBlock.style.display = "block";
}
userBalanceBlock.innerHTML = `${credits}`;
});
<file_sep>"use strict";
let userCountry = prompt("укажите с какой вы страны?");
const priceDeliveryChina = 100;
const priceDeliveryChile = 250;
const priceDeliveryAustralia = 170;
const priceDeliveryIndia = 80;
const priceDeliveryJamaica = 120;
let message;
if (userCountry !== null) {
let userCountryResult = userCountry.toLowerCase();
switch (userCountryResult) {
case "китай":
message = `Доставка в Китай будет стоить ${priceDeliveryChina} кредитов`;
break;
case "чили":
message = `Доставка в Чили будет стоить ${priceDeliveryChile} кредитов`;
break;
case "австралия":
message = `Доставка в Австралию будет стоить ${priceDeliveryAustralia} кредитов`;
break;
case "индия":
message = `Доставка в Индию будет стоить ${priceDeliveryIndia} кредитов`;
break;
case "ямайка":
message = `Доставка в Ямайку будет стоить ${priceDeliveryJamaica} кредитов`;
break;
default:
message = `В вашей стране доставка не доступна`;
break;
}
} else {
message = "пользователь передумал заказывать доставку";
}
alert(message);
<file_sep>"use strict";
let credits = 23580;
const pricePerDroid = 3000;
let totalPrice;
let quantityPerDroid = prompt("сколько дроидов вы хотите купить");
let quantityPerDroidConvert = Number(quantityPerDroid);
let message;
if (quantityPerDroid === null) {
message = "Отменено пользователем!";
} else {
if (quantityPerDroidConvert !== 0) {
totalPrice = quantityPerDroidConvert * pricePerDroid;
if (totalPrice > credits) {
message = "Недостаточно средств на счету!";
} else {
credits -= totalPrice;
message = `Вы купили ${quantityPerDroidConvert} дроидов, на счету осталось ${credits} кредитов.`;
}
} else {
message = "Вы не можете купить 0 дроидов повторите попитку снова!";
}
}
alert(message);
| 17a632d34a4c84a07c153610fb6ae9e05c753213 | [
"JavaScript"
] | 3 | JavaScript | ArturStahov/goit-js-hw-01 | bbbb42203a366ecd143dc87a393c2000a6e6dd91 | becc4f28d17c5f25a3b8f519c7892610833866b9 |
refs/heads/master | <file_sep>server.port=8081
spring.datasource.url=jdbc:postgresql://localhost:5432/maxima
spring.datasource.username=postgres
spring.datasource.password=<PASSWORD>
spring.jpa.show-sql=true
spring.jpa.properties.hibernate.format_sql=true
spring.jpa.hibernate.use-new-id-generator-mappings=true
spring.jpa.hibernate.ddl-auto=update
<file_sep>package com.maximatech.app.service;
import com.maximatech.app.entity.Department;
import java.util.List;
public interface DepartmentService {
List<Department> findAll();
}
<file_sep>package com.maximatech.app.repository;
import com.maximatech.app.entity.Department;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface DepartmentRepository extends JpaRepository<Department, Long> {
@Query(value = "SELECT * FROM DEPARTMENT WHERE UUID = ?1 AND CODE = ?2", nativeQuery = true)
List<Department> existByIdUUIDAndCode(String id, String code);
}
<file_sep># Tecnologias utilizadas
Neste projeto utilizei o Java na versão 11, spring na v2.4.1, e postgres na 13.1-1.
# Ajustando o Lombok
-Utilizei o Lombok para maior produtividade e clareza no código, é necessário configurar a sua ide, se for IntelliJ, será automático.<br>
Para caso use o eclipse vou disponibilizar um guia rápido de instalação:<br>
https://howtodoinjava.com/automation/lombok-eclipse-installation-examples/
# Ajustando a base de dados
-Antes de iniciar o projeto é necessário criar uma base de dados Postgres com o nome de "maxima" na porta 5432.<br>
espera-se:
usuário:postgres
senha:<PASSWORD>
-Mas caso precise alterar algumas dessa informações o mesmo pode ser feito no arquivo <br>
< application.properties > localizado em: < app\app\src\main\resources ><br>
# Observações
*A job está rodando a cada 1 minuto.<br>
*A aplicação está rodando na porta 8081
# No mais é isso, qualquer dúvida fale comigo!
whatsapp: (62) 99530-8186 <br>
email: <EMAIL>
<file_sep>package com.maximatech.app.service.impl;
import com.maximatech.app.entity.Product;
import com.maximatech.app.entity.dto.ProductFilterDTO;
import com.maximatech.app.repository.ProductRepository;
import com.maximatech.app.service.ProductService;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import java.util.List;
@Service
@Transactional
public class ProductServiceImpl implements ProductService {
private ProductRepository productRepository;
public ProductServiceImpl(ProductRepository productRepository) {
this.productRepository = productRepository;
}
@Override
public List<Product> findAll() {
return this.productRepository.findAllEnabled();
}
@Override
public Product getOne(Long id) {
return this.productRepository.getOne(id);
}
@Override
public List<Product> findByFilter(ProductFilterDTO productFilterDTO) {
return this.productRepository.findByFilter(
productFilterDTO.getCode(), productFilterDTO.getDescription(),
productFilterDTO.getDepartmentId()
);
}
@Override
public Product save(Product product) {
return this.productRepository.save(product);
}
@Override
public void delete(Long productId) {
this.productRepository.deleteById(productId);
}
}
<file_sep>package com.maximatech.app.controller;
import com.maximatech.app.entity.Department;
import com.maximatech.app.service.DepartmentService;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping(value = DepartmentController.PATH)
public class DepartmentController {
static final String PATH = "department";
private DepartmentService departmentService;
public DepartmentController(DepartmentService departmentService) {
this.departmentService = departmentService;
}
@CrossOrigin(origins = "http://localhost:4200")
@GetMapping("find-all")
List<Department> findAll() {
return this.departmentService.findAll();
}
}
| c44cd7365b6fecb3e64462c1e7be5d94eee8f781 | [
"Markdown",
"Java",
"INI"
] | 6 | INI | decodeme1/crud-java-angular-app | 387b698fe0326594bff6238ea13575f99252f22b | b0c1e6a88f13593f9fcb256624f75e29d69ef96e |
refs/heads/master | <repo_name>jeancha616/sparta<file_sep>/homework/app.py
from flask import Flask, render_template, jsonify, request
app = Flask(__name__)
from pymongo import MongoClient # pymongo를 임포트 하기(패키지 인스톨 먼저 해야겠죠?)
client = MongoClient('localhost', 27017) # mongoDB는 27017 포트로 돌아갑니다.
db = client.dbsparta # 'dbsparta'라는 이름의 db를 만듭니다.
## HTML을 주는 부분
@app.route('/')
def home():
return render_template('index.html')
## API 역할을 하는 부분
@app.route('/orders', methods=['POST'])
def order():
#1 클라이언트가 준 주문자 이름/수량/주소/전화번호 가져오기
name_receive = request.form['name_give']
quantity_receive = request.form['quantity_give']
adress_receive = request.form['adress_give']
contact_receive = request.form['contact_give']
#2 DB에 정보 삽입하기
doc = {
'name': name_receive,
'quantity': quantity_receive,
'adress': adress_receive,
'contact': contact_receive
}
db.order.insert_one(doc)
#3 성공여부 & 성공메세지 반환하기
return jsonify({'result':'success', 'msg': '주문이 완료되었습니다'})
@app.route('/orders', methods=['GET'])
def show_orders():
#1 DB에서 주문 정보 모두 가져오기
#_id는 가져오지 않을 거에요
orders = list(db.order.find({}, {'_id':False}))
#2 성공여부 & 리뷰목록 반환하기
#'all_order'라는 키로 orders를 꺼내 올 것입니다
return jsonify({'result':'success', 'all_order': orders})
if __name__ == '__main__':
app.run('0.0.0.0', port=5000, debug=True)<file_sep>/youtube_monitor/restart.sh
#!/bin/bash
kill -9 `ps -ef | grep 'app.py' | awk '{print $2}'`
sleep 3
nohup python /home/ubuntu/myproject/app.py &
<file_sep>/youtube_monitor/video.py
import json
import requests
import comment
url = "https://www.googleapis.com/youtube/v3/videos?key=<KEY>&id=gaGsZD42PhM&part=snippet"
data = requests.get(url)
json_data = json.loads(data.text) # json_data 를 파싱
thumbnail = json_data['items'][0]['snippet']['thumbnails']['high']['url']
title = json_data['items'][0]['snippet']['title']
date = json_data['items'][0]['snippet']['publishedAt']
print(thumbnail)
print(title)
print(date)
# url = 'https://www.googleapis.com/youtube/v3/search?key=<KEY>&channelId=UC8HNshpReWjQv1WpwzhPHjA&part=snippet&maxResults=1&order=date'
# data = requests.get(url)
# json_data = json.loads(data.text) # json_data 를 파싱
# # print (json_data)
# for i in json_data['items']:
# if i['id']:
# if 'videoId' in i['id'].keys():
# thumbnail = i['snippet']['thumbnails']['default']['url']
# title = i['snippet']['title']
# date = i['snippet']['publishedAt']
# link = 'https://www.youtube.com/watch?v=' + i['id']['videoId']
# comment = comment.commentExtract(i['id']['videoId'])
# print(thumbnail)
# print(title)
# print(date)
# print(comment)
<file_sep>/etc/videos.py
import json
import requests
import comment
url = 'https://www.googleapis.com/youtube/v3/search?key=<KEY>&channelId=UC8HNshpReWjQv1WpwzhPHjA&part=snippet&maxResults=1&order=date'
data = requests.get(url)
json_data = json.loads(data.text) # json 데이터를 파싱
print (json_data)
for i in json_data['items']:
if i['id']:
if 'videoId' in i['id'].keys():
print(i)
print ('https://www.youtube.com/watch?v=' + i['id']['videoId'][])
# print (comment.commentExtract(i['id']['videoId']))<file_sep>/myproject/prac/app.py
from flask import Flask, render_template, jsonify, request
app = Flask(__name__)
# jsonify: 제이슨으로 만들어준다 request: 변수를 받아온다 (requests 와 request는 다름)
# flask 에서 남들이 미리 개발해놓은 친구들을 가져와서 쓴다
## HTML을 주는 부분
@app.route('/')
def home():
return render_template('index.html')
## API 역할을 하는 부분 #api는 '/test'
@app.route('/test', methods=['POST'])
def test_post():
title_receive = request.form['title_give']
name_receive = request.form['name_give'] #name_give로 보내주었으니, 여기서 name_receive로 받는다
print(title_receive, name_receive)
return jsonify({'result':'success', 'msg': '이 요청은 POST!'})
@app.route('/test', methods=['GET'])
def test_get():
title_receive = request.args.get('title_give')
name_receive = request.args.get['name_give']
print(title_receive, name_receive)
return jsonify({'result':'success', 'msg': '이 요청은 GET!'})
# jsonify를 통해 글자를 json형식으로 만들어 주었답니다
if __name__ == '__main__':
app.run('0.0.0.0',port=5000,debug=True)
<file_sep>/etc/3prac/3prac4.py
from pymongo import MongoClient
client = MongoClient('localhost', 27017)
db = client.dbsparta
## 코딩 할 준비 ##
movie = db.movies.find_one({'title':'매트릭스'})
matrix_star = movie['star']
# print(movie['star'])
matrix_list = list(db.movies.find({'star':matrix_star}))
#star가 matrix_star와 같은 것을 찾아서 리스트에 넣어주고, 그 리스트의 이름을 matrix_list라고 한다
for ml in matrix_list:
db.movies.update_one({'title':ml['title']}, {'$set':{'star':0}})
print(ml['title']) #제목의 키는 'title'이니까,
#키값을 가져올 때, [ ]
<file_sep>/youtube_monitor/url_split.py
url = "https://www.youtube.com/channel/UC8HNshpReWjQv1WpwzhPHjA"
split_url = url.split('/')
print (split_url[-1])<file_sep>/youtube_monitor/video_channel.py
import json
import requests
import comment
url = 'https://www.googleapis.com/youtube/v3/search?key=<KEY>&channelId=UC8HNshpReWjQv1WpwzhPHjA&part=snippet&maxResults=1&order=date'
data = requests.get(url)
json_data = json.loads(data.text) # json_data 를 파싱
# print (json_data)
for i in json_data['items']:
if i['id']:
if 'videoId' in i['id'].keys():
thumbnail = i['snippet']['thumbnails']['default']['url']
title = i['snippet']['title']
date = i['snippet']['publishedAt']
link = 'https://www.youtube.com/watch?v=' + i['id']['videoId']
comment = comment.commentExtract(i['id']['videoId'])
print(thumbnail)
print(title)
print(date)
print(comment)
<file_sep>/youtube_monitor/app.py
from flask import Flask, render_template, jsonify, request
app = Flask(__name__)
from pymongo import MongoClient # pymongo를 임포트 하기(패키지 인스톨 먼저 해야겠죠?)
client = MongoClient('mongodb://cyjid:cyjpw@localhost', 27017) # mongoDB는 27017 포트로 돌아갑니다.
db = client.dbsparta # 'dbsparta'라는 이름의 db를 만듭니다.
import json
import requests
import comment
import time
## HTML을 주는 부분
@app.route('/')
def home():
return render_template('index.html')
## API 역할을 하는 부분
@app.route('/monitor', methods=['GET'])
def listing():
# 1. 모든 모니터정보 찾기 / 이 때 모니터정보가 불려오는 곳은 db.monitors
all_monitors = list(db.monitors.find({},{"_id":0}))
# 2. monitors라는 키값으로 모니터정보를 내려준다
return jsonify({'result':'success', 'monitors' : all_monitors})
@app.route('/monitor', methods=['POST'])
def monitoring():
# 클라이언트로부터 받아온 url에서 동영상의 코드를 뽑아낸다
url = request.form['url_give']
split_url = url.split('?v=')
url_edit = split_url[-1]
api = "https://www.googleapis.com/youtube/v3/videos?key=<KEY>&id={0}&part=snippet".format(url_edit)
data = requests.get(api)
json_data = json.loads(data.text) # json_data 를 파싱
now = time.strftime('%Y-%m-%d %H:%M:%S')
thumbnail = json_data['items'][0]['snippet']['thumbnails']['high']['url']
title = json_data['items'][0]['snippet']['title']
date = json_data['items'][0]['snippet']['publishedAt']
# python 문자열 치환
date = date.replace("T", " ").replace("Z", "")
comment_list = comment.commentExtract(url_edit)
# mongoDB에 데이터 넣기
doc = {
'now' : now,
'url' : url,
'thumbnail' : thumbnail,
'title' : title,
'date' : date,
'comment' : comment_list
}
# monitors라는 이름으로 DB를 저장한다
db.monitors.insert_one(doc)
# 성공하면, 메시지를 띄웁니다
return jsonify({'result' : 'success', 'msg' : 'Monitor Completed'})
@app.route('/monitor/delete', methods=['POST'])
def delete_monitoring():
# 클라이언트로부터 받아온 now_give를 now_receive 변수에 넣습니다
now_receive = request.form['now_give']
print(now_receive)
# monitors 목록에서 delete_one으로 now가 now_receive와 일치하는 항목을 제거합니다
db.monitors.delete_one({'now': now_receive})
# 성공하면, 메시지를 띄웁니다
return jsonify({'result' : 'success', 'msg':'삭제되었습니다'})
if __name__ == '__main__':
app.run('0.0.0.0',port=5000,debug=True)
<file_sep>/etc/3prac/3prac3.py
from pymongo import MongoClient # pymongo를 임포트 하기(패키지 인스톨 먼저 해야겠죠?)
client = MongoClient('localhost', 27017) # mongoDB는 27017 포트로 돌아갑니다.
db = client.dbsparta # 'dbsparta'라는 이름의 db를 만듭니다.
# MongoDB에서 데이터 모두 보기
# dbsparta.users에서 찾아주세요~ / 여러개를 찾아오니까 리스트로 받는다
all_users = list(db.users.find({}, {'_id':False})) #[{내용}, {내용}]
#모든걸 가져오되, '_id'를 제외한다
# 참고) MongoDB에서 특정 조건의 데이터 모두 보기
same_ages = list(db.users.find({'age':21}))
john = db.users.find_one({'name':'john'}, {'_id':False})
print(john)
# 생김새
# db.people.update_many(찾을조건,{ '$set': 어떻게바꿀지 })
# many는 잘 안쓴다. 손목잘라야하니까
# 예시 - 오타가 많으니 이 줄을 복사해서 씁시다!
db.users.update_one({'name':'bobby'},{'$set':{'age':19}})
user = db.users.find_one({'name':'bobby'})
#bobby란 이름을 가진 사람 1명만 찾으면 끝
print(user)<file_sep>/youtube_monitor/test.py
# from datetime import datetime
import time
now = time.strftime('%Y-%m-%d %H:%M:%S')
print(now)<file_sep>/review/Day3/python_prac.py
fruits = ['사과', '배', '감', '귤']
for fruit in fruits:
print(fruit)
fruits = ['사과','배','배','감','수박','귤','딸기','사과','배','수박']
count = 0
for fruit in fruits:
if fruit == '사과':
count += 1
print(count)
#함수를 정의할 때, def
#왜 name은 따옴표 안들어가? 매개변수를 정의한 것이기 때문이다!
def count_fruits(name):
count = 0
for fruit in fruits:
if fruit == name:
count += 1
return count
apple_count = count_fruits('사과')
print(apple_count) #사과의 갯수
subak_count = count_fruits('수박')
print(subak_count) #수박의 갯수
gam_count = count_fruits('감')
print(gam_count) #감의 갯수
people = [{'name': 'bob', 'age': 20},
{'name': 'carry', 'age': 38},
{'name': 'john', 'age': 7}]
# 모든 사람의 이름과 나이를 출력해봅시다.
# 제대로 줄 맞추지 않으면, 원하는 값은 절대 나오지 않는답니다
for person in people:
print(person['name'], person['age'])
# 이번엔, 반복문과 조건문을 응용한 함수를 만들어봅시다.
# 이름을 받으면, age를 리턴해주는 함수
def age_return(myname):
for person in people:
if person['name'] == myname:
return person['age']
return '해당하는 이름이 없습니다'
print(age_return('bob')) | 2f0bd491f8bb0e07d4ab560e0d30891be0fde758 | [
"Python",
"Shell"
] | 12 | Python | jeancha616/sparta | 4e337edad7d38937c43965d4b348de572b073520 | 9abcf90f7856b337815e1fd597c4ca11aa185cd9 |
refs/heads/master | <file_sep>//
// BuilingNode.swift
// Project29
//
// Created by slim on 2017/11/05.
// Copyright © 2017 halloween-jack. All rights reserved.
//
import UIKit
class BuilingNode: SKSpriteNode {
}
| b48178f1f414056fe9251ad28a172a680f8a2599 | [
"Swift"
] | 1 | Swift | halloween-jack/Project-29-Exploding-Monkeys | 97520c06273065f6e8fd2e6d2c207da6463dd401 | 5648aa1997f99a027964ffbd185e86e278f50496 |
refs/heads/master | <repo_name>corux/alexa-countryquiz-skill<file_sep>/lambda/src/states/QuizFinished/handlers/YesIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getLocale,
Intents,
ISessionAttributes,
startQuiz,
States,
} from "../../../utils";
import countries from "../../../utils/countries";
@Intents("AMAZON.YesIntent")
export class YesIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const locale = getLocale(handlerInput);
this.setState(handlerInput, States.QuizInProgress);
const region = countries.getRegionByCode(attributes.region, locale);
return startQuiz(handlerInput, region);
}
}
<file_sep>/lambda/src/states/QuizFinished/handlers/index.ts
export * from "./FallbackHandler";
export * from "./NoIntentHandler";
export * from "./QuizIntentHandler";
export * from "./YesIntentHandler";
export * from "./InfoIntentHandler";
<file_sep>/lambda/src/utils/index.ts
export * from "./attributes";
export * from "./BaseIntentHandler";
export * from "./quiz";
export * from "./countries";
export * from "./request";
export * from "./State";
export * from "./StateHandler";
<file_sep>/lambda/src/states/QuizFinished/index.ts
export * from "./QuizFinishedStateHandler";
<file_sep>/README.md
# alexa country quiz skill
[](https://travis-ci.com/corux/alexa-countryquiz-skill)
<file_sep>/lambda/src/states/SessionStarted/handlers/index.ts
export * from "./FallbackHandler";
export * from "./LaunchRequestHandler";
export * from "./NoIntentHandler";
export * from "./YesIntentHandler";
export * from "./QuizIntentHandler";
<file_sep>/lambda/src/utils/countries.ts
import { CountryData, IContinent, ICountry } from "@corux/country-data";
const cache: { [lang: string]: CountryData } = {};
function getInstance(lang: string) {
const countryData = cache[lang] || (cache[lang] = new CountryData(lang));
return countryData;
}
export default {
getAll: (lang: string): ICountry[] => {
return getInstance(lang).getCountries();
},
getByIso3: (iso: string, lang: string): ICountry => {
return getInstance(lang)
.getCountries()
.find(
(value) =>
value.iso3 && iso && value.iso3.toUpperCase() === iso.toUpperCase()
);
},
getRegionByCode: (code: string, lang: string): IContinent => {
const regions = getInstance(lang)
.getContinents()
.filter((item) => item.code === code);
return regions.length ? regions[0] : undefined;
},
};
<file_sep>/lambda/src/states/QuizFinished/handlers/InfoIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { BaseIntentHandler, Intents, ISessionAttributes } from "../../../utils";
@Intents("InfoIntent")
export class InfoIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const reprompt = `Bist du bereit für die nächste Runde?`;
let textVal = "";
if (attributes.round > 0) {
const round = attributes.round === 1 ? "eine" : attributes.round;
textVal = `Du hast bereits ${round} Runde${
attributes.round > 1 ? "n" : ""
} gespielt.`;
}
return handlerInput.responseBuilder
.speak(`${textVal} ${reprompt}`)
.reprompt(reprompt)
.getResponse();
}
}
<file_sep>/lambda/src/interceptors/InitializeSessionInterceptor.ts
import { HandlerInput, ResponseInterceptor } from "ask-sdk-core";
import { IPersistentAttributes, ISessionAttributes } from "../utils";
export class InitializeSessionInterceptor implements ResponseInterceptor {
public async process(handlerInput: HandlerInput): Promise<void> {
if (handlerInput.requestEnvelope.session.new) {
const sessionAttributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
sessionAttributes.round = 0;
sessionAttributes.stateData = {};
const attributes = (await handlerInput.attributesManager.getPersistentAttributes()) as IPersistentAttributes;
attributes.lastAccess = new Date().getTime();
handlerInput.attributesManager.savePersistentAttributes();
}
}
}
<file_sep>/lambda/src/states/QuizFinished/handlers/QuizIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { IntentRequest, Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getLocale,
getSlotValue,
Intents,
startQuiz,
} from "../../../utils";
import countries from "../../../utils/countries";
@Intents("QuizIntent")
export class QuizIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const region = this.getRegion(handlerInput);
return startQuiz(handlerInput, region);
}
private getRegion(handlerInput: HandlerInput) {
const regionValue = getSlotValue(
(handlerInput.requestEnvelope.request as IntentRequest).intent.slots
.region
);
const locale = getLocale(handlerInput);
const region = countries.getRegionByCode(regionValue, locale);
return region;
}
}
<file_sep>/lambda/src/states/QuizInProgress/QuizInProgressStateHandler.ts
import { RequestHandler } from "ask-sdk-core";
import { State, StateHandler, States } from "../../utils";
import {
AnswerIntentHandler,
FallbackHandler,
InfoIntentHandler,
NoIntentHandler,
QuizIntentHandler,
RepeatIntentHandler,
SkipIntentHandler,
YesIntentHandler,
} from "./handlers";
@State(States.QuizInProgress)
export class QuizInProgressStateHandler extends StateHandler {
public get handlers(): RequestHandler[] {
return [
new AnswerIntentHandler(),
new SkipIntentHandler(),
new RepeatIntentHandler(),
new YesIntentHandler(),
new NoIntentHandler(),
new InfoIntentHandler(),
new QuizIntentHandler(),
new FallbackHandler(),
];
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/QuizIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { IntentRequest, Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getLocale,
getSlotValue,
Intents,
} from "../../../utils";
import countries from "../../../utils/countries";
@Intents("QuizIntent")
export class QuizIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const data = this.getStateData(handlerInput);
const region = this.getRegion(handlerInput);
data.nextRegion = region ? region.code : "ALL";
const regionText = region ? `mit Ländern aus ${region.name}` : "";
const reprompt = `Möchtest du trotzdem eine neue Runde ${regionText} starten?`;
return handlerInput.responseBuilder
.speak(`Die aktuelle Runde ist noch nicht beendet. ${reprompt}`)
.reprompt(reprompt)
.getResponse();
}
private getRegion(handlerInput: HandlerInput) {
const regionValue = getSlotValue(
(handlerInput.requestEnvelope.request as IntentRequest).intent.slots
.region
);
const locale = getLocale(handlerInput);
const region = countries.getRegionByCode(regionValue, locale);
return region;
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/HintIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { BaseIntentHandler, getQuestion, Intents } from "../../../utils";
@Intents("HintIntent")
export class HintIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
return getQuestion(
handlerInput,
false,
"Zu dieser Frage gibt es keinen Hinweis."
);
}
}
<file_sep>/lambda/src/states/SessionStarted/index.ts
export * from "./SessionStartedStateHandler";
<file_sep>/lambda/src/handlers/index.ts
export * from "./AMAZON_StopIntentHandler";
export * from "./AMAZON_HelpIntentHandler";
export * from "./CustomErrorHandler";
export * from "./SessionEndedHandler";
<file_sep>/lambda/__tests__/LaunchRequest.spec.ts
import { VirtualAlexa } from "virtual-alexa";
import { handler } from "../src";
describe("LaunchRequest", () => {
let alexa: VirtualAlexa;
beforeEach(() => {
alexa = VirtualAlexa.Builder()
.handler(handler)
.interactionModelFile("skill-package/interactionModels/custom/de-DE.json")
.create();
alexa.dynamoDB().mock();
});
test("Ask to start quiz", async () => {
const result = await alexa.launch();
expect(result.response.outputSpeech.ssml).toContain(
"Bist du bereit für die erste Runde?"
);
expect(result.response.shouldEndSession).toBe(false);
});
test("Should provide help message only on first launch", async () => {
const result1 = await alexa.launch();
expect(result1.response.outputSpeech.ssml).toContain(
"Willkommen beim Länder Quiz"
);
await alexa.endSession();
const result2 = await alexa.launch();
expect(result2.response.outputSpeech.ssml).toContain(
"Willkommen zurück beim Länder Quiz"
);
});
});
<file_sep>/lambda/src/states/QuizInProgress/handlers/AnswerIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { IntentRequest, Response } from "ask-sdk-model";
import {
BaseIntentHandler,
calculatePoints,
getAnswerText,
getLocale,
getNumberOfQuestions,
getQuestion,
getRandomEntry,
getSlotValue,
Intents,
IPersistentAttributes,
isAnswerCorrect,
ISessionAttributes,
States,
} from "../../../utils";
export async function getResponse(
handlerInput: HandlerInput,
successText: string
): Promise<Response> {
const attributesManager = handlerInput.attributesManager;
const attributes = attributesManager.getSessionAttributes() as ISessionAttributes;
const remainingQuestions = attributes.history.filter((item) => !item.answer)
.length;
const isFinished = remainingQuestions === 0;
if (isFinished) {
const totalAnswers = attributes.history.length;
const correctAnswers = attributes.history.filter(isAnswerCorrect).length;
const persistentAttributes = (await attributesManager.getPersistentAttributes()) as IPersistentAttributes;
persistentAttributes.scores = persistentAttributes.scores || [];
persistentAttributes.scores.push({
correct: correctAnswers,
points: calculatePoints(attributes.history),
region: attributes.region,
time: new Date().getTime(),
total: totalAnswers,
});
attributesManager.savePersistentAttributes();
attributes.state = States.QuizFinished;
attributes.round++;
const reprompt = "Möchtest du nochmal spielen?";
let correctAnswersText = `<say-as interpret-as="number">${correctAnswers}</say-as> von ${totalAnswers}`;
if (correctAnswers === 0) {
correctAnswersText = `keine der ${totalAnswers}`;
}
if (correctAnswers === 1) {
correctAnswersText = `eine von ${totalAnswers}`;
}
if (correctAnswers === totalAnswers) {
correctAnswersText = `alle ${totalAnswers}`;
}
let text = `${successText} Du hast ${correctAnswersText} Fragen richtig beantwortet. `;
text += reprompt;
return handlerInput.responseBuilder
.speak(text)
.reprompt(reprompt)
.getResponse();
}
return getQuestion(
handlerInput,
remainingQuestions === 1 || remainingQuestions === getNumberOfQuestions(),
successText
);
}
@Intents("CountryIntent", "ContinentIntent")
export class AnswerIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const current = attributes.history.filter((item) => !item.answer)[0];
const locale = getLocale(handlerInput);
const slots = (handlerInput.requestEnvelope.request as IntentRequest).intent
.slots;
const slotValue = getSlotValue(
current.type === "continent" ? slots.continent : slots.country
);
if (!slotValue) {
const reprompt = "Bitte versuche es noch einmal.";
return handlerInput.responseBuilder
.speak(`Ich habe dich nicht verstanden. ${reprompt}`)
.reprompt(reprompt)
.getResponse();
}
current.answer = slotValue;
if (isAnswerCorrect(current)) {
const speechcon = getRandomEntry([
"richtig",
"bingo",
"bravo",
"prima",
"stimmt",
"super",
"yay",
"jawohl",
]);
return getResponse(
handlerInput,
`<say-as interpret-as='interjection'>${speechcon}</say-as>!`
);
}
return getResponse(
handlerInput,
`Die richtige Antwort war ${getAnswerText(current, locale)}.`
);
}
}
<file_sep>/lambda/src/utils/attributes.ts
import { IQuestion, States } from ".";
export interface ISessionAttributes {
round: number;
history: IQuestion[];
status: "PLAYING" | "STOPPED";
state: States;
stateData: { [key: string]: any };
region?: string;
nextRegion?: string;
}
export interface IPersistentAttributes {
lastAccess: number;
scores: Array<{
time: number;
total: number;
correct: number;
region: string;
points: number;
}>;
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/SkipIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { getResponse } from ".";
import {
BaseIntentHandler,
getAnswerText,
getLocale,
Intents,
ISessionAttributes,
} from "../../../utils";
@Intents("SkipIntent", "AMAZON.NextIntent")
export class SkipIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributesManager = handlerInput.attributesManager;
const attributes = attributesManager.getSessionAttributes() as ISessionAttributes;
const current = attributes.history.filter((item) => !item.answer)[0];
const locale = getLocale(handlerInput);
current.answer = "-";
return await getResponse(
handlerInput,
`Die Lösung war ${getAnswerText(current, locale)}.`
);
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/RepeatIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { BaseIntentHandler, getQuestion, Intents } from "../../../utils";
@Intents("AMAZON.RepeatIntent")
export class RepeatIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
return getQuestion(handlerInput, false);
}
}
<file_sep>/lambda/src/states/index.ts
export * from "./SessionStarted";
export * from "./QuizInProgress";
export * from "./QuizFinished";
<file_sep>/lambda/__tests__/HelpIntent.spec.ts
import { VirtualAlexa } from "virtual-alexa";
import { handler } from "../src";
describe("AMAZON.HelpIntent", () => {
let alexa: VirtualAlexa;
beforeEach(() => {
alexa = VirtualAlexa.Builder()
.handler(handler)
.interactionModelFile("skill-package/interactionModels/custom/de-DE.json")
.create();
alexa.dynamoDB().mock();
});
it("Provide help message", async () => {
await alexa.launch();
const result: any = await alexa.utter("help");
expect(result.response.outputSpeech.ssml).toContain(
"dein Wissen über die Länder der Welt testen"
);
expect(result.response.shouldEndSession).toBe(false);
});
});
<file_sep>/lambda/src/utils/StateHandler.ts
import { HandlerInput, RequestHandler } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { States } from ".";
import { StateManagement } from "./StateManagement";
export function State(state: States) {
return <T extends StateHandler>(target: new () => T) => {
target.prototype.state = state;
};
}
export abstract class StateHandler
extends StateManagement
implements RequestHandler {
private readonly state: States;
public abstract get handlers(): RequestHandler[];
public canHandle(handlerInput: HandlerInput): boolean {
return this.getState(handlerInput) === this.state;
}
public async handle(handlerInput: HandlerInput): Promise<Response> {
for (const handler of this.handlers) {
if (handler.canHandle(handlerInput)) {
return handler.handle(handlerInput);
}
}
}
}
<file_sep>/lambda/src/utils/StateManagement.ts
import { HandlerInput } from "ask-sdk-core";
import { ISessionAttributes, States } from ".";
export abstract class StateManagement {
public getState(handlerInput: HandlerInput): States {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
return attributes.state;
}
public setState(handlerInput: HandlerInput, state: States) {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
attributes.state = state;
}
public getStateData(handlerInput: HandlerInput): any {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const state = attributes.state.toString();
if (!attributes.stateData) {
attributes.stateData = {};
}
if (!attributes.stateData[state]) {
attributes.stateData[state] = {};
}
return attributes.stateData[state];
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/InfoIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getQuestion,
Intents,
isAnswerCorrect,
ISessionAttributes,
} from "../../../utils";
@Intents("InfoIntent", "AMAZON.HelpIntent")
export class InfoIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const correctAnswers = attributes.history.filter(isAnswerCorrect).length;
const totalAnswers = attributes.history.length;
let text: string;
if (correctAnswers === totalAnswers) {
text = `Du hast alle ${totalAnswers} Fragen richtig beantwortet.`;
} else if (correctAnswers === 0) {
text = `Du hast noch keine der ${totalAnswers} Fragen richtig beantwortet.`;
} else if (totalAnswers === 0) {
text = "Du hast noch keine Fragen beantwortet";
}
return getQuestion(handlerInput, false, text);
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/NoIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { BaseIntentHandler, getQuestion, Intents } from "../../../utils";
@Intents("AMAZON.NoIntent")
export class NoIntentHandler extends BaseIntentHandler {
public handle(handlerInput: HandlerInput): Response {
return getQuestion(handlerInput, false);
}
}
<file_sep>/lambda/src/utils/State.ts
export enum States {
SessionStarted,
QuizInProgress,
QuizFinished,
}
<file_sep>/lambda/src/index.ts
import { SkillBuilders } from "ask-sdk-core";
import { DynamoDbPersistenceAdapter } from "ask-sdk-dynamodb-persistence-adapter";
import {
AmazonStopIntentHandler,
CustomErrorHandler,
SessionEndedHandler,
} from "./handlers";
import { LogInterceptor } from "./interceptors";
import { InitializeSessionInterceptor } from "./interceptors/InitializeSessionInterceptor";
import {
QuizFinishedStateHandler,
QuizInProgressStateHandler,
SessionStartedStateHandler,
} from "./states";
const dynamodbAdapter = new DynamoDbPersistenceAdapter({
createTable: true,
tableName: "alexa-countryquiz-skill",
});
export const handler = SkillBuilders.custom()
.addRequestHandlers(
new AmazonStopIntentHandler(),
new SessionEndedHandler(),
new SessionStartedStateHandler(),
new QuizInProgressStateHandler(),
new QuizFinishedStateHandler()
)
.addErrorHandlers(new CustomErrorHandler())
.addRequestInterceptors(new LogInterceptor())
.addResponseInterceptors(
new LogInterceptor(),
new InitializeSessionInterceptor()
)
.withPersistenceAdapter(dynamodbAdapter)
.lambda();
<file_sep>/lambda/src/states/QuizInProgress/index.ts
export * from "./QuizInProgressStateHandler";
<file_sep>/lambda/src/handlers/AMAZON_HelpIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getNumberOfQuestions,
Intents,
ISessionAttributes,
} from "../utils";
@Intents("AMAZON.HelpIntent")
export class AmazonHelpIntentHandler extends BaseIntentHandler {
public async handle(handlerInput: HandlerInput): Promise<Response> {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const reprompt = `Bist du bereit für die ${
attributes.round === 0 ? "erste" : "nächste"
} Runde?`;
const helpText = `Mit dem Länder Quiz kannst du dein Wissen über die Länder der Welt testen.
Pro Runde stelle ich dir ${getNumberOfQuestions()} Fragen.
${reprompt}`;
return handlerInput.responseBuilder
.speak(helpText)
.reprompt(reprompt)
.getResponse();
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/YesIntentHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getLocale,
Intents,
startQuiz,
} from "../../../utils";
import countries from "../../../utils/countries";
@Intents("AMAZON.YesIntent")
export class YesIntentHandler extends BaseIntentHandler {
public canHandle(handlerInput: HandlerInput): boolean {
return (
super.canHandle(handlerInput) &&
this.getStateData(handlerInput).nextRegion
);
}
public async handle(handlerInput: HandlerInput): Promise<Response> {
const locale = getLocale(handlerInput);
const region = countries.getRegionByCode(
this.getStateData(handlerInput).nextRegion,
locale
);
return startQuiz(handlerInput, region);
}
}
<file_sep>/lambda/src/utils/quiz.ts
import { ContinentCode, IContinent, ICountry } from "@corux/country-data";
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { ISessionAttributes } from "./attributes";
import countries from "./countries";
import { getLocale } from "./request";
import { States } from "./State";
export interface IQuestion {
iso: string;
type: "continent" | "capital" | "neighbour";
answer: string;
}
export function getNumberOfQuestions(): number {
return 8;
}
export function getRandomEntry<T>(array: T[]): T {
return array[Math.floor(Math.random() * array.length)];
}
export function shuffle<T>(array: T[]): T[] {
let m = array.length;
// While there remain elements to shuffle
while (m) {
// Pick a remaining element
const i = Math.floor(Math.random() * m--);
// And swap it with the current element
const t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
}
function createContinentQuestions(
allCountries: ICountry[],
num: number
): IQuestion[] {
const knownCountries = allCountries.filter((item) => item.region);
const selected = shuffle(knownCountries).slice(0, num);
const questions = selected.map(
(item) =>
({
iso: item.iso3,
type: "continent",
} as IQuestion)
);
return questions;
}
function createNeighbourQuestions(
allCountries: ICountry[],
num: number
): IQuestion[] {
const knownCountries = shuffle(
allCountries.filter((item) => item.borders.length > 0)
);
const selected: ICountry[] = [];
for (let i = 0; i < knownCountries.length && selected.length < num; i++) {
const current = knownCountries[i];
const neighbourAlreadySelected =
selected
.reduce((a, b) => a.concat(b.borders), [] as string[])
.indexOf(current.iso3) !== -1;
if (!neighbourAlreadySelected) {
selected.push(current);
}
}
return selected.map(
(country) =>
({
iso: country.iso3,
type: "neighbour",
} as IQuestion)
);
}
function createCapitalQuestions(
allCountries: ICountry[],
num: number,
region?: ContinentCode
): IQuestion[] {
const knownCountries = allCountries.filter(
(item) =>
item.continent.code === (region || ContinentCode.EUROPE) &&
item.name !== item.capital &&
item.capital
);
const selected = shuffle(knownCountries).slice(0, num);
return selected.map(
(country) =>
({
iso: country.iso3,
type: "capital",
} as IQuestion)
);
}
function createQuestions(
handlerInput: HandlerInput,
region?: IContinent
): IQuestion[] {
const locale = getLocale(handlerInput);
let all = countries.getAll(locale);
if (region) {
all = all.filter((item) => item.continent.code === region.code);
}
let questions: IQuestion[];
if (region) {
questions = [].concat(
createNeighbourQuestions(all, 7),
createCapitalQuestions(all, 4, region.code)
);
} else {
questions = [].concat(
createContinentQuestions(all, 4),
createNeighbourQuestions(all, 4),
createCapitalQuestions(all, 3)
);
}
return shuffle(questions).slice(0, getNumberOfQuestions());
}
function questionToText(question: IQuestion, country: ICountry): string {
switch (question.type) {
case "capital":
return `Zu welchem Land gehört die Hauptstadt ${country.capital}?`;
case "continent":
return `Auf welchem Kontinent liegt ${country.name}?`;
case "neighbour":
return `Nenne ein Nachbarland von ${country.name}.`;
}
}
export function isAnswerCorrect(question: IQuestion): boolean {
const country = countries.getByIso3(question.iso, "en-US");
switch (question.type) {
case "capital":
return question.answer === question.iso;
case "continent":
return question.answer === country.continent.code;
case "neighbour":
return country.borders.indexOf(question.answer) !== -1;
}
}
export function calculatePoints(questions: IQuestion[]): number {
const correctAnswers = questions.map(isAnswerCorrect);
const allAnswersCorrect = correctAnswers.filter((item) => !item).length === 0;
const pointsForQuestions = correctAnswers
.map((item) => (item ? 5 : 0))
.reduce((a, b) => a + b, 0);
const bonus = allAnswersCorrect ? 10 : 0;
return pointsForQuestions + bonus;
}
export function getAnswerText(question: IQuestion, locale: string): string {
const country = countries.getByIso3(question.iso, locale);
switch (question.type) {
case "capital":
return country.name;
case "continent":
return countries.getRegionByCode(country.continent.code, locale).name;
case "neighbour":
const neighbours = country.borders.map(
(iso) => countries.getByIso3(iso, locale).name
);
if (neighbours.length <= 2) {
return neighbours.join(" oder ");
} else {
return `z.B. ${neighbours.slice(0, 2).join(" oder ")}`;
}
}
}
export function getQuestion(
handlerInput: HandlerInput,
includeQuestionPrefix: boolean,
textPrefix?: string
): Response {
const locale = getLocale(handlerInput);
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
const current = attributes.history.filter((item) => !item.answer)[0];
const country = countries.getByIso3(current.iso, locale);
const reprompt = questionToText(current, country);
let text = `${textPrefix || ""} `;
if (includeQuestionPrefix) {
const isFirstQuestion =
attributes.history.filter((item) => item.answer).length === 0;
const isLastQuestion =
attributes.history.filter((item) => !item.answer).length === 1;
const num = isFirstQuestion
? "erste"
: isLastQuestion
? "letzte"
: "nächste";
text += `Hier ist die ${num} Frage. `;
}
text += reprompt;
const response = handlerInput.responseBuilder.speak(text).reprompt(reprompt);
return response.getResponse();
}
export function startQuiz(
handlerInput: HandlerInput,
region?: IContinent
): Response {
const attributes = handlerInput.attributesManager.getSessionAttributes() as ISessionAttributes;
attributes.state = States.QuizInProgress;
attributes.region = region ? region.code : undefined;
attributes.history = createQuestions(handlerInput, region);
const text = region
? `Das Quiz wird mit Ländern aus ${region.name} gestartet.`
: "";
return getQuestion(handlerInput, true, text);
}
<file_sep>/lambda/src/states/SessionStarted/SessionStartedStateHandler.ts
import { HandlerInput, RequestHandler } from "ask-sdk-core";
import { AmazonHelpIntentHandler } from "../../handlers";
import { State, StateHandler, States } from "../../utils";
import {
FallbackHandler,
LaunchRequestHandler,
NoIntentHandler,
QuizIntentHandler,
YesIntentHandler,
} from "./handlers";
@State(States.SessionStarted)
export class SessionStartedStateHandler extends StateHandler {
public canHandle(handlerInput: HandlerInput): boolean {
const newSession =
handlerInput.requestEnvelope.session &&
handlerInput.requestEnvelope.session.new;
if (newSession) {
this.setState(handlerInput, States.SessionStarted);
}
return super.canHandle(handlerInput);
}
public get handlers(): RequestHandler[] {
return [
new QuizIntentHandler(),
new LaunchRequestHandler(),
new YesIntentHandler(),
new NoIntentHandler(),
new AmazonHelpIntentHandler(),
new FallbackHandler(),
];
}
}
<file_sep>/lambda/src/states/SessionStarted/handlers/LaunchRequestHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import {
BaseIntentHandler,
getNumberOfQuestions,
IPersistentAttributes,
Request,
} from "../../../utils";
@Request("LaunchRequest")
export class LaunchRequestHandler extends BaseIntentHandler {
public canHandle(handlerInput: HandlerInput): boolean {
const session = handlerInput.requestEnvelope.session;
return super.canHandle(handlerInput) || (session && session.new);
}
public async handle(handlerInput: HandlerInput): Promise<Response> {
let text: string;
const attributes = (await handlerInput.attributesManager.getPersistentAttributes()) as IPersistentAttributes;
const oneWeekMs = 7 * 24 * 60 * 60 * 1000;
if (
!attributes.lastAccess ||
attributes.lastAccess < new Date().getTime() - oneWeekMs
) {
text = `Willkommen beim Länder Quiz!
Ich stelle dir ${getNumberOfQuestions()} Fragen zu den Ländern der Welt.
Versuche möglichst viele richtige Antworten zu erzielen.`;
} else {
text = "Willkommen zurück beim Länder Quiz!";
}
const reprompt = "Bist du bereit für die erste Runde?";
return handlerInput.responseBuilder
.speak(`${text} ${reprompt}`)
.reprompt(reprompt)
.getResponse();
}
}
<file_sep>/lambda/src/states/QuizInProgress/handlers/FallbackHandler.ts
import { HandlerInput } from "ask-sdk-core";
import { Response } from "ask-sdk-model";
import { BaseIntentHandler, Fallback } from "../../../utils";
@Fallback()
export class FallbackHandler extends BaseIntentHandler {
public handle(handlerInput: HandlerInput): Response {
return handlerInput.responseBuilder
.speak("Ich habe dich nicht verstanden. Bitte wiederhole den Befehl.")
.reprompt("Bitte wiederhole den Befehl.")
.getResponse();
}
}
<file_sep>/bin/update-schema.ts
import { CountryData } from "@corux/country-data";
import * as program from "commander";
import * as fs from "fs";
import * as path from "path";
import * as process from "process";
import countries from "../lambda/src/utils/countries";
program
.option("--file <path>", "Schema file to update.")
.option("--lang <code>", "Language code to use")
.parse(process.argv);
const file = program.file;
const lang = program.lang;
const schema = JSON.parse(fs.readFileSync(file).toString());
const all = countries.getAll(lang);
const countryOutput: any = {
name: "COUNTRY",
};
countryOutput.values = all
.filter((country) => country && country.iso3 && country.name)
.map((country) => {
let synonyms = []
.concat(
country.longName !== country.name ? [country.longName] : [],
country.altNames || []
)
.filter((n) => !!n);
synonyms = synonyms.filter((n, i) => synonyms.indexOf(n) === i);
return {
iso3: country.iso3,
name: country.name,
synonyms: synonyms.length ? synonyms : undefined,
};
})
.map((country) => ({
id: country.iso3,
name: {
value: country.name,
synonyms: country.synonyms,
},
}));
const continentOutput: any = {
name: "CONTINENT",
};
continentOutput.values = new CountryData(lang)
.getContinents()
.map((region) => ({
id: region.code,
name: {
value: region.name,
},
}));
schema.interactionModel.languageModel.types = [continentOutput, countryOutput];
const schemaFile = path.join(process.cwd(), file);
fs.writeFile(schemaFile, JSON.stringify(schema, null, 2), "utf8", (err) => {
if (err) {
process.exit(1);
}
});
| 52c027eea5b0dee65048b4c951c2c404d491f6b6 | [
"Markdown",
"TypeScript"
] | 36 | TypeScript | corux/alexa-countryquiz-skill | a510d420655e4db91393d93b963a39bfd6f74104 | 125a302af309bd639223708d4fd8e42b8a97774d |
refs/heads/master | <file_sep>apply plugin: 'com.android.application'
android {
compileSdkVersion 26
buildToolsVersion '26.0.1'
defaultConfig {
applicationId "com.jiubai.jiubaijz"
minSdkVersion 16
targetSdkVersion 26
versionCode 2
versionName '1.0.1'
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
vectorDrawables.useSupportLibrary = true
signingConfig signingConfigs.debug
}
buildTypes {
release {
minifyEnabled true
signingConfig signingConfigs.debug
proguardFiles 'proguard-rules.pro'
}
debug {
minifyEnabled true
signingConfig signingConfigs.debug
proguardFiles 'proguard-rules.pro'
}
}
signingConfigs {
debug {
storeFile file("android.keystore")
storePassword "<PASSWORD>"
keyAlias "android.keystore"
keyPassword "<PASSWORD>"
}
}
}
allprojects {
repositories {
jcenter()
mavenCentral()
maven { url "https://jitpack.io" }
}
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
compile 'com.android.support:appcompat-v7:25.3.1'
compile 'com.jakewharton:butterknife:7.0.1'
compile 'com.umeng.analytics:analytics:latest.integration'
testCompile 'junit:junit:4.12'
compile 'com.android.support:design:25.3.1'
compile 'com.google.zxing:core:3.3.0'
compile files('libs/SocialSDK_QQZone_3.jar')
compile files('libs/SocialSDK_umengqq.jar')
compile files('libs/SocialSDK_umengwx.jar')
compile files('libs/SocialSDK_WeiXin_2.jar')
compile files('libs/umeng_social_apiv6.0.0.jar')
compile files('libs/umeng_social_netv6.0.0.jar')
compile files('libs/umeng_social_viewv6.0.0.jar')
compile files('libs/Volley.jar')
}
<file_sep>package com.jiubai.jiubaijz.adapter;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.jiubai.jiubaijz.R;
import java.util.ArrayList;
/**
* Created by <NAME> on 2016/9/23.
*
* 右上角下拉菜单adapter
*/
public class MenuListAdapter extends BaseAdapter {
private Context context;
private ArrayList<Integer> imageIds;
private ArrayList<String> titles;
public MenuListAdapter(Context context, ArrayList<Integer> imageIds, ArrayList<String> titles) {
this.context = context;
this.imageIds = imageIds;
this.titles = titles;
}
@Override
public int getCount() {
return imageIds.size();
}
@Override
public Object getItem(int i) {
return titles.get(i);
}
@Override
public long getItemId(int i) {
return imageIds.get(i);
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
View contentView = LayoutInflater.from(context).inflate(R.layout.menu_item, null);
ImageView imageView = (ImageView) contentView.findViewById(R.id.imageView_item);
TextView textView = (TextView) contentView.findViewById(R.id.textView_item);
imageView.setImageResource(imageIds.get(i));
textView.setText(titles.get(i));
return contentView;
}
}
<file_sep>package com.jiubai.jiubaijz.adapter;
import android.content.Context;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.jiubai.jiubaijz.R;
import com.jiubai.jiubaijz.common.UtilBox;
import java.util.ArrayList;
/**
* Created by <NAME> on 2016/10/2.
*/
public class ViewPagerAdapter extends PagerAdapter {
private Context mContext;
private Callback mCallback;
private View[] views;
public ViewPagerAdapter(Context context, Callback callback) {
this.mContext = context;
this.mCallback = callback;
views = new View[3];
}
/**
* 获得当前界面数
*/
@Override
public int getCount() {
return 3;
}
/**
* 判断是否由对象生成界面
*/
@Override
public boolean isViewFromObject(View arg0, Object arg1) {
return (arg0 == arg1);
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
View view;
if (position == 2) {
view = LayoutInflater.from(mContext).inflate(R.layout.view_guide_last, null);
ImageView imageView = (ImageView) view.findViewById(R.id.imageView);
imageView.setImageBitmap(UtilBox.readBitMap(mContext, R.drawable.guide3));
Button button = (Button) view.findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCallback.onStart();
}
});
} else {
view = LayoutInflater.from(mContext).inflate(R.layout.view_guide_normal, null);
ImageView imageView = (ImageView) view.findViewById(R.id.imageView);
imageView.setImageBitmap(UtilBox.readBitMap(mContext,
position == 0 ? R.drawable.guide1 : R.drawable.guide2));
}
views[position] = view;
container.addView(view, 0);
return view;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView(views[position]);
}
public interface Callback {
void onStart();
}
}<file_sep>package com.jiubai.jiubaijz.ui;
import android.app.ProgressDialog;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.EditText;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.jiubai.jiubaijz.R;
import com.jiubai.jiubaijz.common.Config;
import com.jiubai.jiubaijz.common.UtilBox;
import com.jiubai.jiubaijz.net.VolleyUtil;
import com.umeng.analytics.MobclickAgent;
import org.json.JSONException;
import org.json.JSONObject;
import butterknife.Bind;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 2016/9/26.
*
* 意见反馈activity
*/
public class FeedbackActivity extends AppCompatActivity {
@Bind(R.id.toolbar)
Toolbar mToolbar;
@Bind(R.id.editText)
EditText mEditText;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_feedback);
ButterKnife.bind(this);
initView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_feedback, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()){
case android.R.id.home:
onBackPressed();
return true;
case R.id.action_done:
UtilBox.toggleSoftInput(mEditText, false);
if ("".equals(mEditText.getText().toString())) {
UtilBox.showSnackbar(this, "请输入反馈内容");
} else {
final ProgressDialog progressDialog = new ProgressDialog(this);
progressDialog.setCancelable(false);
progressDialog.setCanceledOnTouchOutside(false);
progressDialog.setMessage("正在上传反馈内容");
progressDialog.show();
String deviceInfo = Build.VERSION.SDK_INT + "_"
+ UtilBox.getPackageInfo(this).versionCode + "_"
+ android.os.Build.MODEL;
Log.i("info", deviceInfo);
String[] keys = {"a", "content", "equipment"};
String[] values = {"app_feedback", mEditText.getText().toString(), deviceInfo};
VolleyUtil.request("http://ucenter.jiubaiwang.cn/app_api.php", keys, values,
new Response.Listener<String>() {
@Override
public void onResponse(String s) {
Log.i("info", s);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
progressDialog.dismiss();
}
}, 500);
try {
JSONObject jsonObject = new JSONObject(s);
int code = jsonObject.getInt("code");
String info = jsonObject.getString("msg");
UtilBox.showSnackbar(FeedbackActivity.this, info);
if (code == 200) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
finish();
}
}, 1500);
}
} catch (JSONException e) {
UtilBox.showSnackbar(FeedbackActivity.this, e.toString());
e.printStackTrace();
}
}
},
new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
progressDialog.dismiss();
UtilBox.showSnackbar(FeedbackActivity.this, "上传失败,请重试");
Log.i("info", volleyError.getMessage());
}
});
}
break;
}
return super.onOptionsItemSelected(item);
}
private void initView() {
initToolbar();
}
private void initToolbar() {
setSupportActionBar(mToolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
mToolbar.setBackgroundColor(Config.barColor);
mToolbar.setTitleTextColor(Config.titleColor);
}
public void onResume() {
super.onResume();
MobclickAgent.onResume(this);
}
public void onPause() {
super.onPause();
MobclickAgent.onPause(this);
}
}
| ad1aa2ac6de0d06e4ee6ed421de11965eca221df | [
"Java",
"Gradle"
] | 4 | Gradle | leunghowell/LazyAccounting_Material | deba424a5f554ecdd5f0f2a06f464ac94ecca5fc | bcc4a501ff58e53caf07727210208d4c7fb64a24 |
refs/heads/master | <file_sep>const http = require('http')
const express = require('express')
const axios = require('axios')
const bodyParser = require('body-parser')
const PORT = process.env.PORT || 1337
const { google } = require('googleapis')
const API_URL = 'https://dm-meeting-app.firebaseio.com/round4.json'
if (process.env.NODE_ENV !== 'production') {
require('dotenv').load()
}
/* Twitter API */
const Twitter = require('twitter')
const twitterClient = new Twitter({
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN_KEY,
access_token_secret: process.env.TWITTER_ACCESS_TOKEN_SECRET
})
/* Twilio Credentials */
const accountSid = process.env.ACCOUNT_SID
const authToken = process.env.AUTH_TOKEN
const client = require('twilio')(accountSid, authToken)
/* Google Firebase API */
let bearerAccessToken
const serviceAccount = require('./serviceAccountKey.json')
const scopes = [
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/firebase.database'
]
const jwtClient = new google.auth.JWT(
serviceAccount.client_email,
null,
serviceAccount.private_key,
scopes
)
jwtClient.authorize(function(error, tokens) {
if (error) {
console.log('Error making request to generate access token:', error)
} else if (tokens.access_token === null) {
console.log(
'Provided service account does not have permission to generate access tokens'
)
} else {
const accessToken = tokens.access_token
setAccessToken(accessToken)
}
})
const app = express()
app.use(bodyParser.urlencoded({ extended: false }))
function setAccessToken(accessToken) {
bearerAccessToken = accessToken
}
app.post('/sms', (req, res) => {
if (req.body.From === process.env.PHONE_NUMBER) {
getMessage(req).then(messageObject => {
postTweet(messageObject)
})
res.set('Content-Type', 'application/xml')
res.send('<Response/>')
}
})
async function getMessage(req) {
/* Set the round that you're on here: */
const round = 4
/* Set the hashtag you want to use here: */
const hashTag = '#100DaysOfCode'
const messageObject = {}
const previousDay = await getDayCount().then(daysObject => {
if (daysObject !== null) {
let objectId = Object.keys(daysObject)[Object.keys(daysObject).length - 1]
return daysObject[objectId].day
} else {
return 0
}
})
const currentDay = previousDay + 1
const todaysDate = getTodaysDate(new Date())
const text = req.body.Body
messageObject.tweet = `R${round}|D${currentDay}:\n${text} \n${hashTag}`
messageObject.day = currentDay
messageObject.date = todaysDate
return messageObject
}
function getDayCount() {
return axios
.get(API_URL, { headers: { Authorization: `Bearer ${bearerAccessToken}` } })
.then(function(response) {
return response.data
})
.catch(function(error) {
console.log(error)
})
}
function getTodaysDate(today) {
const day = today.getDate()
const month = today.getMonth() + 1 //January is 0
const year = today.getFullYear()
return `${month}/${day}/${year}`
}
function postTweet(messageObject) {
twitterClient
.post('statuses/update', {
status: messageObject.tweet
})
.then(function(tweet) {
let message = 'Tweet posted successfully! 😄'
sendText(message)
postTweetToDB(messageObject)
})
.catch(function(error) {
let message = `Uh oh...Looks like we got an error. Tweet not posted :(`
sendText(message)
console.log(`Error: ${JSON.stringify(error)}`)
})
}
function postTweetToDB(messageObject) {
axios
.post(API_URL, messageObject, {
headers: { Authorization: `Bearer ${bearerAccessToken}` }
})
.then(function(response) {
console.log('Successfully posted tweet to DB.')
})
.catch(function(error) {
console.log(error)
})
}
function sendText(message) {
client.messages
.create({
to: process.env.PHONE_NUMBER,
from: process.env.TWILIO_NUMBER,
body: message
})
.then(message => console.log(message.sid))
}
http.createServer(app).listen(PORT, () => {
console.log(`Express server listening on port ${PORT}. Let's get coding 🎉 !`)
})
<file_sep># 100-days-twilio-twitter
An app to help you keep track of your #100DaysOfCode progress. It keeps track of what day you're on, lets you send a text 📱 with what you worked on that day to post to Twitter🐦 and all you have to worry about is coding 🕺🏻💃🏻 (soon, it will automatically commit to GitHub as well!)
## How does it work? 🤔
Using Twilio's API and Twitter's API, you send a text message to your Twilio phone number. You then configure it to make a POST request, which triggers the `app.js` which is built on Node.js and Express.js which then takes the body of the text message and tweets it.
## Getting Started 😀
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
### Prerequisites ✅
Here's what you'll need:
```
node v9.8.0
npm 5.6.0
```
* Basic JavaScript/Node.js experience
* Cell phone with texting capabilities
### Installing 💻
In order to get everything set up correctly, follow the steps listed below:
1. Clone or (fork this repo)[https://github.com/jjprevite/100-days-twilio-twitter#fork-destination-box] 🍴
```
git clone https://github.com/jjprevite/100-days-twilio-twitter.git
```
2. Change directories into the root folder ➡️
```
cd 100-days-twilio-twitter
```
3. Install the necessary dependencies 💿
```
npm install
```
4. Update the `API_URL` variable to your project:
* `API_URL` will be your Firebase Database URL. It should look something like 'https://my-app.firebaseio.com/round1.json'
- Note: with Firebase, the database has a JSON-like structure. This is why we've added `round1.json` to the end of the root URL.
5. Create a `.env` file inside the root using either your text editor or from the command line 📁
```
touch .env
```
We'll use this to store our environment variables, specifically API keys. Don't worry, the `.gitignore` file already includes a `.env` file so there's no risk of you accidentally exposing your API keys to the eyes of GitHub 😉
Here's what it should look like:
```
NODE_ENV=dev
TWITTER_CONSUMER_KEY=
TWITTER_CONSUMER_SECRET=
TWITTER_ACCESS_TOKEN_KEY=
TWITTER_ACCESS_TOKEN_SECRET=
ACCOUNT_SID=
AUTH_TOKEN=
PHONE_NUMBER=
TWILIO_NUMBER=
```
6. Get your variables and add them to your `.env` file 📝
* `TWITTER...` these four variables will come from your Twitter "app", which you set up at [https://apps.twitter.com/](https://apps.twitter.com/).
* `ACCOUNT_SID` and `AUTH_TOKEN` will come from your Twilio account.
* `PHONE_NUMBER` is your phone number or the number of the phone you'll be using to send texts
* `TWILIO_NUMBER` you'll need to "buy" a phone number from Twilio. When you sign up, you should receive some credit so you shouldn't need to actually spend money to get a number. Here's a [quick guide](https://support.twilio.com/hc/en-us/articles/223135247-How-to-Search-for-and-Purchase-Twilio-Phone-Numbers-from-Console) on how purchase a Twilio number.
7. Generate and create a `serviceAccountKey.json` file to hold your Firebase key 🗝
Luckily, the Firebase documentation explains exactly how to do that. Follow [these instructions](https://firebase.google.com/docs/database/rest/auth#generate_an_access_token) to generate a new private key. After you generate it, you should get your `serviceAccountKey.json` file. Add that to the root directory. It will be used to authenticate our REST requests.
8. Take a quick tea 🍵 break - we're almost there 😄
9. Start your app by running this from the command line
```
npm run dev
```
If everything is working correctly, you should see the following printed to your node console:
```
Express server listening on port 1337. Let's get coding 🎉 !
```

10. Expose your port using [ngrok](https://ngrok.com/) so we can connect our Node app with Twilio
* If you're on Mac, I recommend installing `ngrok` globally
```
npm install -g ngrok
```
Then run the following command in a new command line window to expose port 1337:
```
ngrok http 1337
```
If you're successful, you should see something like this:

11. Configure your Twilio number to listen for POST requests.
In order to do this we need to copy the forwarding URL from that command line window where `ngrok` is running. It should look something like this: `http://1962da97.ngrok.io` _It shouldn't matter if you use the `http` or `https` URl_
a. Open up the [Twilio console](https://www.twilio.com/console)
b. Navigate to your [Phone Numbers](https://www.twilio.com/console/phone-numbers/incoming)
c. Click the number you want to use
d. Scroll down to "Messaging"
e. Add your link to "A MESSAGE COMES IN" with `/sms` at the end. So it should look similar to:

🛑 **IMPORTANT** 🛑
> Because we are using the free version of ngrok. Anytime you start/stop `ngrok` you will get a new URL and have to go back in to Twilio's Console and add the new URL. Yes, this can be annoying for testing but if you want to upgrade, ngrok has a [$5/month tier](https://ngrok.com/pricing) where you get 3 reserved domains.
12. Send a text message to your Twilio phone number and check Twitter to see the magic happen 🧙🏼♂️


13. Celebrate success 🕺🏻💃🏻

## Deployment
Deploying this small Node.js app is simple with services like [Heroku](http://www.heroku.com/). There are a few steps, which I will walk you through here:
1. Get Heroku set up locally - here's a [quick tutorial](https://devcenter.heroku.com/articles/getting-started-with-nodejs)
2. Run `heroku create` inside the root directory.
3. This is where it gets a little trickier. You have to be extremely careful here so please **read carefully**. If you don't follow this correctly, you'll risk exposing your API keys.
a. Create a new branch called `production`
```
git checkout -b production
```
b. Inside your `.gitignore` file, remove this line:
```
serviceAccountKey.json
```
We need to have this file in production so we have to remove it from `.gitignore`. But whatever you do, do not push this to GitHub.
c. Stage these files and commit (*note: this does not push them to GitHub*)
```
git add .
git commit -m "remove serviceACcountKey.json from gitignore for Heroku"
```
d. Now push this branch to Heroku (NOT GITHUB)
```
git push heroku production
```
This means we are pushing this branch to production for our app to run. Whenever you make future changes, you will need to pull your `master` branch into `production`. Never run `git push -u origin production` - that will expose your serviceAccountKey.
e. Last step is to add our environment variables. You can do this by navigating to the [Heroku dashboard] page(https://dashboard.heroku.com/apps/). Click on your app, navigate to "Settings" and select "Reveal Config vars". Here is where you'll copy your variables from your `.env` file.
f. Since we've changed the variables, we need to restart our app. You can do so from the command line by running `heroku restart` or by clicking "More" next to "Open app" in the dashboard and selecting "Restart all dynos"
g. BOOM! You've deployed your `100-days-twilio-twitter` app! Start texting and coding away! 🤪
## Built With
* [Firebase](https://firebase.google.com/)
* [Twilio](https://www.twilio.com/try-twilio)
* [Twitter](https://twitter.com/)
* [Twitter "App"](https://apps.twitter.com/)
## Contributing
_Please read [CONTRIBUTING.md](https://github.com/jjprevite/100-days-twilio-twitter/blob/master/CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests._
## Authors
* **<NAME>** - [jjprevite](https://github.com/jjprevite)
## License
_This project is licensed under the MIT License - see the [LICENSE](https://github.com/jjprevite/100-days-twilio-twitter/blob/master/LICENSE) file for details_
## Acknowledgments
* The Twilio DevEd Team for creating [TwilioQuest](https://www.twilio.com/quest) to help me get up and running 🤗
* <NAME> @kallaway - the creator of #100DaysOfCode
* Hat tip to <NAME> - [PurpleBooth](https://github.com/PurpleBooth) for the README.md template 🎩
| 1d4591c52995e27ba5bc84e812d7b7267aa5ae06 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | jjprevite/100-days-twilio-twitter | 91fe4abb0785a4873d602d1c8e26b8e6b14200df | 277506af738c5ea1d9f03fb0c5511064a314a712 |
refs/heads/master | <repo_name>sheehanqmd/Shelfie-simulation-project<file_sep>/server/index.js
const express = require("express");
const massive = require('massive');
const app = express();
require('dotenv').config();
massive(process.env.CONNECTION_STRING).then(db => {
app.set('db', db);
console.log('Database Connected :)');
})
app.use(express.json());
app.post('/api/selfie', (req, res) =>{
const {name, image, price} = req.body;
const db = req.app.get('db');
db.addProduct(name, image, price).then(response => {
res.status(200).json(response);
})
})
app.listen(process.env.SERVER_PORT, () => console.log (`Listening on Port ${process.env.SERVER_PORT}`));<file_sep>/db/addProduct.sql
INSERT INTO shelfie_product
(product_name, descrption, price)
VALUES ($1, $2, $3,);
SELECT * FROM shelfie_product;<file_sep>/src/components/Dashboard.js
import React, {Component} from "react";
import axios from "axios";
import Form from "./Form";
class Dashboard extends Component {
constructor(props){
super(props)
this.state = {
product: [],
loading: true,
error: ""
};
this.updateProduct = this.updateProduct.bind(this);
}
componentDidMount() {
axios.get("./api/product")
.then(respond => {
console.log(response.data)
this.setState({product: Response.data});
})
.catch(error => {
console.log(error);
this.setState({loading: false, error: "An error occurred"});
});
}
updateProduct(newProduct) {
this.setState({ product: newProduct });
}
render() {
const {product} = this.state;
return (
<div className="Dashboard">
{ product.map((product, index) => {
return(
<Product key={index} index={index} product={product} updateProduct={this.updatePro}/>
)}
)}
</div>
);
}
}
export default Dashboard;<file_sep>/src/components/Form.js
import React, {Component} from "react";
import axios from "axios";
class Form extends Component {
constructor(props) {
super(props);
this.state = {
name: "",
image: "",
price: "",
};
handleAddItemChange(e) {
this.setState({ [name]: value});
const {name, image, price} = e.target
handleCancelItemChange(e)
this.setState({ [name]: value});
}
}
render() {
return (
<div className="Form">
<button id="addButton" onClick={e => this.addItem(e)}>Add to inventory</button>
<button id="cancelButton" onClick={e => this.cancelItem(e)}>Cancel to inventory</button>
<input value={this.state.name} name="name" onChange={this.handleChange} placeholder="name" />
<input value={this.state.image} name="image" onChange={this.handleChange} placeholder="image" />
<input value={this.state.price} name="price" onChange={this.handleChange} placeholder="price" />
</div>
);
}
};
export default Form;<file_sep>/server/controller.js
const product =[
{
image: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcR8ebhtiiLpREf9ejXihhqIOBdsLTTrIVoN7IxrK8tjNfszgeuzwA",
price: "$500",
description: "Chanel Excellent condition!",
contact: "(214)555-5555"
},
{
image: "https://images.neimanmarcus.com/ca/5/product_assets/V/3/Z/G/S/NMV3ZGS_mz.jpg",
price: "$300",
description:"Ferragamo",
contact: "(214)555-5555"
},
{
image: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSHlUFhQjxC_3YTdm2Dd_ynNMAmqS2oIIyBT2D1J3CiFkTcA0Tr",
price: "$250",
description: "<NAME>",
contact: "(214)555-5555"
}
];
const getProduct = (req, res) => {
res.json(product);
};
const postProduct = (req, res) => {
product.push(req.body);
res.json(product);
}
module.export = {
getProduct,
postProduct,
deleteProduct,
putProduct
}; | e7e4e0bb9c6e6990c348cf74732c1c27d3d78e57 | [
"JavaScript",
"SQL"
] | 5 | JavaScript | sheehanqmd/Shelfie-simulation-project | 52d7145ee918f0790b038bd9afbac3a77933ec6b | 522ddbc821cbb634c8a8b3dcab11ac8ea9fb7401 |
refs/heads/master | <repo_name>AlNovik/weather-app<file_sep>/utils/WeatherAPI.ts
import Request from '../models/api/Request';
import Geoposition from '../models/Geoposition'
import config from '../config';
class WeatherAPI {
private apiURL: string = config.weatherAPI;
private buildRequest(params: Request) {
var url = this.apiURL.concat('?');
Object.keys(params).forEach(key => url = url.concat(`${key}=${params[key]}&`));
return url;
}
private getPosition() {
return new Promise((resolve: any, reject: any) => {
navigator.geolocation.getCurrentPosition((pos: Geoposition) => {
resolve(pos);
});
})
}
fetch() {
let params = new Request();
params.APPID = config.weatherAPIKey;
params.cnt = config.countCities;
return this.getPosition().then((position: Geoposition) => {
params.lat = position.coords.latitude;
params.lon = position.coords.longitude;
console.log(this.buildRequest(params));
return fetch(this.buildRequest(params));
});
}
}
export default WeatherAPI;<file_sep>/config.ts
export default {
weatherAPI: 'http://api.openweathermap.org/data/2.5/find' as string,
weatherAPIKey: '<KEY>' as string,
countCities: 50 as number
};<file_sep>/app.ts
import City from './models/City';
import WeatherAPI from './utils/WeatherAPI';
import 'materialize-css/dist/css/materialize.min.css';
const city : City = new City("Minsk");
let api = new WeatherAPI();
console.log(city);
api.fetch();
function hello(name: string) {
return 'Hello '+ name;
}
export {hello};<file_sep>/models/Coordinates.ts
class Coordinates {
lon: number;
lat: number;
constructor() {
}
}
export default Coordinates;<file_sep>/models/Snow.ts
class Snow {
'3h': number;
constructor() {
}
}
export default Snow;<file_sep>/models/City.ts
import Wind from './Wind';
import Weather from './Weather';
import Temperature from './Temperature';
import System from './System';
import Snow from './Snow';
import Rain from './Rain';
import Coordinates from './Coordinates';
import Clouds from './Clouds';
class City {
id: number;
name: string;
cod: number;
dt: Date;
base: string;
wind: Wind;
weather: Weather;
main: Temperature;
sys: System;
snow: Snow;
rain: Rain;
coord: Coordinates;
clouds: Clouds;
constructor(name : string) {
this.name = name;
}
}
export default City;<file_sep>/models/System.ts
class System {
id: number;
type: number;
message: number;
country: string;
sunrise: Date;
sunset: Date;
constructor() {
}
}
export default System; | dc72ff490e0ffc99618c55f248a1429ffbd07142 | [
"TypeScript"
] | 7 | TypeScript | AlNovik/weather-app | 89641c964f809f89b30c7766233b7e7d28ade6d1 | f194f50c2c88ec5d6ae69e6d22d75f2ca330b150 |
refs/heads/master | <repo_name>rishav394/Poker-Man<file_sep>/public/js/ui2.js
const standings = document.querySelector('.standings');
const renderStanding = ({ name, balance, wins, losses }, id) => {
let html = `
<div class="standing card-panel red lighten-4 row" data-id="${id}">
<h3 class=" col s9 l9" data-id="${id}">${name}</h3>
<h3 class=" center col s3 l3" data-id="${id}">${balance}</h3>
<p style="display: none;" class="green-text col ">Wins ${wins}</p>
<p style="display: none;" class="red-text col right">Losses ${losses}</p>
</div>`;
standings.innerHTML += html;
};
const removeStanding = (id) => {
document.querySelector(`div[data-id="${id}"]`).remove();
};
const modifyStanding = ({ balance, name, wins, losses }, id) => {
document.querySelector(
`h3[data-id="${id}"][class*="l3"]`,
).innerHTML = balance;
document.querySelector(`h3[data-id="${id}"][class*="l9"]`).innerHTML = name;
document.querySelector(`div[data-id="${id}"] p`).innerHTML = `Wins ${wins}`;
document.querySelectorAll(
`div[data-id="${id}"] p`,
)[1].innerHTML = `Losses ${losses}`;
};
document.addEventListener('DOMContentLoaded', () => {
document
.querySelector('div.standings')
.addEventListener('click', (event) => {
document.querySelectorAll('p').forEach((x) => {
x.style.display = x.style.display === 'none' ? 'block' : 'none';
});
});
});
<file_sep>/README.md
# Poker-Man
A PWA for poker which records everyone's debt to one another and shows up the final standings separately. Basically it is a replacement for the usual pen and paper based calculations and keeping track.
<file_sep>/public/js/ui.js
let currentPot = 1;
let startingPot = 1; // In case of someone leaves change it to 0
let password = '';
const table = document.querySelector('.players.container');
const tableAdder = document.querySelector('.add-table');
const forms = document.querySelectorAll('.side-form');
const currentPotInput = document.getElementById('pot');
const startingPotInput = document.getElementById('starting');
const resetButton = document.getElementById('reset');
const deletePassword = 'ok';
const pokerMasterPassword = 'ok';
setInterval(() => {
password = '';
}, 10000);
document.addEventListener('DOMContentLoaded', function() {
M.Sidenav.init(forms, { edge: 'right' });
startingPotInput.value = startingPot;
currentPotInput.value = currentPot;
resetButton.onclick = resetTable;
currentPotInput.onchange = (event) => {
currentPot = parseInt(event.target.value || currentPot || 1);
currentPot = currentPot === 0 ? 1 : currentPot;
currentPotInput.value = currentPot;
};
startingPotInput.onchange = (event) => {
startingPot = parseInt(event.target.value || startingPot || 0);
startingPotInput.value = startingPot;
};
// On player addition
tableAdder.addEventListener('submit', (event) => {
event.preventDefault();
if (tableAdder.title.value.length > 3) {
const player = {
name: tableAdder.title.value,
balance: 0,
wins: 0,
losses: 0,
position: document.querySelectorAll('.player').length,
};
// Linking DB
addToCollection(player);
tableAdder.title.value = '';
}
});
// On name change
var timer;
var touchduration = 800;
const longTouchEvent = new CustomEvent('longtouch', {
bubbles: true,
});
function touchstart(event) {
timer = setTimeout(
() => event.target.dispatchEvent(longTouchEvent),
touchduration,
);
}
function touchend() {
if (timer) clearTimeout(timer);
}
table.addEventListener('longtouch', (event) => {
if (event.target.tagName === 'STRONG') {
let oldName = event.target.childNodes[0].data;
let id = event.target.children[0].children[0].getAttribute(
'data-id',
);
var newName = prompt('Enter new name', oldName);
if (newName !== oldName && newName) {
dbModifyPlayer(newName, id);
}
}
});
table.addEventListener('touchstart', (event) => {
if (event.target.tagName === 'STRONG') {
touchstart(event);
}
});
table.addEventListener('touchend', (event) => {
if (event.target.tagName === 'STRONG') {
touchend();
}
});
table.addEventListener('click', (event) => {
// On buttons click
if (event.target.tagName === 'BUTTON' || event.target.tagName === 'I') {
let type = event.target.outerText;
let id = event.target.getAttribute('data-id');
// Add to POT
if (type === 'monetization_on') {
let val = document.querySelector(`h3[data-id="${id}"]`);
val.innerHTML = parseInt(val.innerHTML) + currentPot;
}
// Pack
else if (type === 'sentiment_dissatisfied') {
document
.querySelectorAll(`button[data-id="${id}"]`)
.forEach((dom) => dom.classList.add('disabled'));
let btns = document.querySelectorAll(
'button[data-id]:not([class*="disabled"])',
);
if (btns.length === 3) {
let id = btns[0].getAttribute('data-id');
winUI(id);
}
}
// Win
else if (type === 'mood') {
winUI(id);
}
// Remove player
else if (type == 'delete') {
// Linking DB
password =
password === delete<PASSWORD>
? deletePassword
: window.prompt('Enter password');
if (password === deletePassword) deleteFromCollection(id);
}
}
// On name change
});
});
const renderPlayer = (name, id) => {
const player = `
<!-- Player Begin -->
<div data-id="${id}" class="player card-panel red lighten-4">
<strong class="grey-text text-darken-3">${name} <span><i data-id="${id}"
class="material-icons grey-text text-darken-1 right">delete</i></span></strong>
<div class="row">
<!-- Put in money -->
<button data-id="${id}" class="waves-effect waves-light btn-large red lighten-2 col s3">
<i data-id="${id}" class="material-icons">monetization_on</i>
</button>
<!-- Pack -->
<button data-id="${id}" class="waves-effect waves-light btn-large grey darken-1 col s3">
<i data-id="${id}" class="material-icons">sentiment_dissatisfied</i>
</button>
<!-- Win -->
<button data-id="${id}" class="waves-effect waves-light btn-large green lighten-2 col s3">
<i data-id="${id}" class="material-icons">mood</i>
</button>
<!-- Spent -->
<h3 data-id="${id}" class="center blue-text text-darken-5">${startingPot}</h3>
</div>
</div>
<!-- Player end -->
`;
table.innerHTML += player;
};
const modifyPlayer = (newName, id) => {
const target = document.querySelector(`div[data-id="${id}"] strong`);
target.innerHTML =
newName +
'<' +
target.innerHTML
.split('<')
.slice(1)
.join('<');
};
const removePlayer = (id) => {
document.querySelector(`.player[data-id="${id}"]`).remove();
};
const resetTable = (_event) => {
currentPot = startingPot === 0 ? 1 : startingPot;
currentPotInput.value = currentPot;
document
.querySelectorAll('h3')
.forEach((dom) => (dom.innerHTML = startingPot));
document
.querySelectorAll('button')
.forEach((dom) => dom.classList.remove('disabled'));
document
.querySelectorAll('.player')
.forEach((dom) => dom.classList.remove('green'));
};
const winUI = (id) => {
let kvps = [];
let winner = {
id,
};
document.querySelectorAll('.player').forEach((player) => {
let bal = parseInt(player.querySelector('h3').innerHTML);
let tempId = player.getAttribute('data-id');
let name = player.querySelector('strong').innerHTML.split('<')[0];
if (tempId !== id)
kvps.push({
player: name,
id: tempId,
amount: bal,
});
else winner['player'] = name;
});
// Linking DB
win(winner, kvps);
document.querySelector(`.player[data-id="${id}"]`).classList.add('green');
document
.querySelectorAll(`button[data-id]`)
.forEach((dom) => dom.classList.add('disabled'));
};
<file_sep>/public/js/db.js
// enable offline data
db.enablePersistence().catch(function(err) {
if (err.code == 'failed-precondition') {
// probably multible tabs open at once
console.log('persistance failed');
} else if (err.code == 'unimplemented') {
// lack of browser support for the feature
console.log('persistance not available');
}
});
// real-time listener
db.collection('players')
.orderBy('position', 'asc')
.onSnapshot((snapshot) => {
snapshot.docChanges().forEach((change) => {
if (change.type === 'added') {
renderPlayer(change.doc.data().name, change.doc.id);
}
if (change.type === 'removed') {
removePlayer(change.doc.id);
}
if (change.type === 'modified') {
modifyPlayer(change.doc.data().name, change.doc.id);
}
});
});
// Change player name
const dbModifyPlayer = (newName, id) => {
NProgress.start();
db.collection('players')
.doc(id)
.update({
name: newName,
})
.then(() => {
NProgress.done();
console.log('Player name changed');
})
.catch((err) => console.error(err));
};
// Add new player
const addToCollection = (player) => {
NProgress.start();
db.collection('players')
.add(player)
.then(() => {
NProgress.done();
console.log('Document added successfully!');
})
.catch((err) => console.log(err));
};
// Delete player
const deleteFromCollection = (id) => {
NProgress.start();
db.collection('players')
.doc(id)
.delete()
.then(() => {
NProgress.done();
console.log('Document deleted successfully!');
});
};
const win = (winner, kvps) => {
NProgress.start();
var winnings = 0;
// Update Loosers
kvps.map((kvp) => {
winnings += kvp.amount;
let docRef = db.collection('players').doc(kvp.id);
docRef
.get()
.then(function(doc) {
if (doc.exists) {
var tempData = doc.data();
var oldBal = tempData.balance;
var oldLoss = tempData.losses;
docRef
.update({
losses: oldLoss + 1,
balance: oldBal - kvp.amount,
})
.then(function() {
console.log('Deducted from lossers successfully!');
})
.catch(function(error) {
// The document probably doesn't exist.
console.error('Error updating document: ', error);
});
} else {
// doc.data() will be undefined in this case
console.log('No such document!');
}
})
.catch(function(error) {
console.log('Error getting document:', error);
});
});
// Update Winners
var docRef = db.collection('players').doc(winner.id);
docRef
.get()
.then(function(doc) {
if (doc.exists) {
var tempData = doc.data();
var oldBal = tempData.balance;
var oldWins = tempData.wins;
docRef
.update({
wins: oldWins + 1,
balance: oldBal + winnings,
})
.then(function() {
NProgress.done();
console.log('Winner awarded successfully!');
})
.catch(function(error) {
// The document probably doesn't exist.
console.error('Error updating document: ', error);
});
} else {
// doc.data() will be undefined in this case
console.log('No such document!');
}
})
.catch(function(error) {
console.log('Error getting document:', error);
});
// Add match history
let tempkvps = kvps.map((kvp) => {
return {
amount: -kvp.amount,
player: kvp.player,
};
});
db.collection('games')
.add({
game: [...tempkvps, { player: winner.player, amount: winnings }],
created: new Date().toTimeString(),
})
.then(() => console.log('Game history saved'))
.catch((err) => console.error('Error saving game history', err));
};
<file_sep>/public/js/db2.js
// enable offline data
db.enablePersistence().catch(function(err) {
if (err.code == 'failed-precondition') {
// probably multible tabs open at once
console.log('persistance failed');
} else if (err.code == 'unimplemented') {
// lack of browser support for the feature
console.log('persistance not available');
}
});
// real-time listener
db.collection('players')
.orderBy('position', 'asc')
.onSnapshot((snapshot) => {
snapshot.docChanges().forEach((change) => {
if (change.type === 'added') {
renderStanding(change.doc.data(), change.doc.id);
}
if (change.type === 'removed') {
removeStanding(change.doc.id);
}
if (change.type === 'modified') {
modifyStanding(change.doc.data(), change.doc.id);
}
});
});
<file_sep>/public/js/app.js
var toastHTML =
'<span>New Update Available</span><button onClick="reload()" class="btn-flat toast-action">Reload</button>';
function reload() {
newWorker.postMessage({ action: 'skipWaiting' });
}
let newWorker;
if ('serviceWorker' in navigator) {
navigator.serviceWorker
.register('/sw.js')
.then((reg) => {
reg.addEventListener('updatefound', () => {
newWorker = reg.installing;
newWorker.addEventListener('statechange', () => {
switch (newWorker.state) {
case 'installed':
if (navigator.serviceWorker.controller) {
M.toast({
html: toastHTML,
displayLength: 100 * 1000,
classes: 'red rounded center',
});
}
break;
default:
console.log(newWorker.state);
}
});
});
console.log('Service worker registered', reg);
})
.catch((err) => console.log('Service worker not registered', err));
let refreshing;
navigator.serviceWorker.addEventListener('controllerchange', function() {
if (refreshing) return;
window.location.reload();
refreshing = true;
});
}
| 1cf956c63c2e35cf786e9a4c3b750abdf5009062 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | rishav394/Poker-Man | daaea104e45f00591dcd21ecaee68691243ca755 | 1c27915c334a29b5c71552f3098b8d58de0c19ce |
refs/heads/master | <file_sep>module github.com/cucumber/json-formatter-go/v6
replace github.com/cucumber/messages-go/v14 => ../../messages/go
go 1.13
require (
github.com/cucumber/messages-go/v14 v14.0.1
github.com/gogo/protobuf v1.3.2
github.com/onsi/ginkgo v1.15.0
github.com/onsi/gomega v1.10.5
)
<file_sep>#!/usr/bin/env bash
#
# Runs a command once for each npm workspace
#
# Usage: npm-each ARGS...
set -eu -o pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
for workspace in "${DIR}"/../*/javascript/
do
pushd "${workspace}" >/dev/null
"$@"
popd >/dev/null
done
<file_sep>SHELL := /usr/bin/env bash
# https://stackoverflow.com/questions/2483182/recursive-wildcards-in-gnu-make
rwildcard=$(foreach d,$(wildcard $(1:=/*)),$(call rwildcard,$d,$2) $(filter $(subst *,%,$2),$d))
TYPESCRIPT_SOURCE_FILES = $(sort $(call rwildcard,src test,*.ts *.tsx))
PRIVATE = $(shell node -e "console.log(require('./package.json').private)")
NPM ?= npm
IS_TESTDATA = $(findstring -testdata,${CURDIR})
default: .tested
.PHONY: default
.codegen:
touch $@
.tested: .tested-npm
.tested-npm: $(TYPESCRIPT_SOURCE_FILES)
$(NPM) run test
touch $@
pre-release: update-version update-dependencies clean default
.PHONY: pre-release
update-version:
ifeq ($(IS_TESTDATA),-testdata)
# no-op
else
ifdef NEW_VERSION
$(NPM) --no-git-tag-version --allow-same-version version "$(NEW_VERSION)"
else
@echo -e "\033[0;31mNEW_VERSION is not defined. Can't update version :-(\033[0m"
exit 1
endif
endif
.PHONY: update-version
publish: .codegen
ifeq ($(IS_TESTDATA),-testdata)
# no-op
else
ifneq (true,$(PRIVATE))
$(NPM) publish --access public
else
@echo "Not publishing private npm module"
endif
endif
.PHONY: publish
post-release:
.PHONY: post-release
clean: clean-javascript
.PHONY: clean
clean-javascript:
rm -rf .deps .codegen .tested* node_modules coverage dist acceptance
.PHONY: clean-javascript
| 53b56b126b37a1a5454d4554130cc8e21ab0a537 | [
"Makefile",
"Go Module",
"Shell"
] | 3 | Go Module | HSBAWA83/cucumber | 48aaee2678f199f75764f16727d1a01961629c38 | da5af37f1980f0ce2c11c7e2ee3defc0ca33b829 |
refs/heads/master | <file_sep>import React, { Component } from 'react';
class Event extends Component {
render () {
return (
<div>
<a className="swipebox" href={"images/photos/" + this.props.image} title={this.props.description} rel="gallery-1">
<img
className="lazyOwl"
data-src={"images/photos/" + this.props.image}
src={"images/photos/" + this.props.image}
alt={this.props.description} />
<div className="titleItSmall">
<p>{this.props.description}</p>
</div>
</a>
</div>
);
}
}
export default Event;<file_sep>import React, { Component } from 'react';
import OwlCarousel from 'react-owl-carousel';
import Event from './Event';
class EventsList extends Component {
render () {
var eventsList = this.props.data.map(function(event) {
//console.log(fanArt);
return (
<Event
image={event.image}
description={event.description}
key={event.image}
/>
);
}, this);
return (
<div className="photoList">
<OwlCarousel slideSpeed={300} items={4} itemsTablet={[1125,3]} itemsMobile={[700,1]} stopOnHover={true} lazyLoad={true} autoPlay={true} singleItem={false}>
{eventsList}
</OwlCarousel>
</div>
);
}
}
export default EventsList;<file_sep>var gulp = require('gulp'),
gutil = require('gulp-util'),
less = require('gulp-less'),
cleanCSS = require('gulp-clean-css'),
concatCss = require('gulp-concat-css'),
clean = require('gulp-clean');
gulp.task('cleanIt', function () {
return gulp.src('src/css/compiled/*.css', {read: false})
.pipe(clean());
});
gulp.task('less', ['cleanIt'], function () {
return gulp.src('src/css/*.less')
.pipe(less())
.pipe(cleanCSS({compatibility: 'ie8'}))
.pipe(gulp.dest('src/css/compiled'))
});
gulp.task('bundleCSS', ['less'], function () {
gulp.src('src/css/compiled/*.css')
.pipe(concatCss("bundle.css"))
.pipe(cleanCSS({compatibility: 'ie8'}))
.pipe(gulp.dest('src/css/compiled'));
return gutil.log('Bundled..')
});
gulp.task('default', ['watch','bundleCSS']);
gulp.task('watch', function() {
gulp.watch('src/css/*.less', ['bundleCSS']);
});<file_sep>import React, { Component } from 'react';
import Moment from 'react-moment';
class News extends Component {
//src={"images/photos/" + this.props.image}
render () {
return (
<div>
<img
src="images/newsbg.jpg"
alt={this.props.description} />
<div className="titleItMedium">
<p>{this.props.title}</p>
<p dangerouslySetInnerHTML={{__html: this.props.description}} />
<p><em className=""><Moment unix fromNow>{this.props.time}</Moment></em></p>
</div>
</div>
);
}
}
export default News;<file_sep>import React, { Component } from 'react';
import Moment from 'react-moment';
class Community extends Component {
render () {
return (
<a
href={"https://reddit.com/" + this.props.permalink}
target="_blank"
className="collection-item">
<strong>{this.props.author}</strong> - {this.props.title} <em className="timeReddit"><Moment unix fromNow>{this.props.time}</Moment></em>
<i className="material-icons right-align fullText">send</i>
</a>
);
}
}
export default Community;<file_sep>import React, { Component } from 'react';
import {
BrowserRouter as Router,
Switch,
Route // Link
} from 'react-router-dom'
import Home from './Home';
import Frameworks from './Frameworks';
import Chart from './Charts';
import NoMatch from './NoMatch';
class App extends Component {
render() {
return (
<Router>
<div>
<Switch>
<Route exact path="/" component={Home}/>
<Route exact path="/frameworks" component={Frameworks}/>
<Route exact path="/charts" component={Chart}/>
<Route component={NoMatch}/>
</Switch>
</div>
</Router>
);
}
}
export default App;
<file_sep>import React, { Component } from 'react';
import OwlCarousel from 'react-owl-carousel';
import News from './News';
class NewsList extends Component {
render () {
var newsList = this.props.data.map(function(item) {
//console.log(fanArt);
return (
<News
title={item.attributes.title}
image={item.image}
description={item.attributes.body.value}
time={item.attributes.created}
key={item.attributes.created}
/>
);
}, this);
return (
<div className="newsList">
<OwlCarousel slideSpeed={300} items={2} itemsTablet={[1525,2]} itemsMobile={[700,1]} stopOnHover={true} lazyLoad={true} autoPlay={true} singleItem={false}>
{newsList}
</OwlCarousel>
</div>
);
}
}
export default NewsList;<file_sep>// Testing to check for
// does not receive any props.
// check if component always rendered
// check if the rendered div contains everything else that gets rendered.
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
import $ from 'jquery';
it('renders without crashing', () => {
const div = document.createElement('div');
ReactDOM.render(<App />, div);
});
<file_sep>import React, { Component } from 'react';
import Truncate from 'react-truncate';
class Popular extends Component {
//
render () {
return (
<div className="col l4 m4 s12">
<a href={this.props.link} target="_blank">
<h2>{this.props.title}</h2>
</a>
<div className="bumpDown">
<Truncate lines={3} ellipsis={<span>...</span>}>
<p>{this.props.description}</p>
</Truncate>
</div>
</div>
);
}
}
export default Popular;<file_sep>import React, { Component } from 'react';
import '../css/compiled/bundle.css';
import $ from 'jquery';
import FrameworkList from './FrameworkList';
class Frameworks extends Component {
constructor(props) {
super(props);
this.state = {
frameworks: []
};
}
loadData () {
$.when(
$.get("//jarrodsampson.com/api/frameworks/frameworks.php?format=json&version=v1"),
$.get("/data/events.json")
).then(function(frameworks, events) {
this.setState({
frameworks: frameworks[0].data
});
console.log(frameworks[0].data);
}.bind(this));
}
componentDidMount () {
this.loadData();
}
render() {
return (
<div className="App">
<div className="popularBg">
<div className="container">
<div className="wow fadeInLeft col s12 center-align scrollspy" id="popular">
<div className="col s12 spacer-small"></div>
<h1>Most Popular</h1>
<FrameworkList data={this.state.frameworks} />
</div>
<div className="col s12 spacer-small"></div>
</div>
</div>
</div>
);
}
}
export default Frameworks;
<file_sep>import React, { Component } from 'react';
import Event from './Popular';
class PopularList extends Component {
render () {
let index = 0;
var popularList = this.props.data.map(function(item) {
//console.log(fanArt);
if (index >= 3) {
return null;
}
else {
index += 1;
return (
<Event
image={item.image}
description={item.description}
title={item.framework}
link={item.link}
key={item.id}
/>
);
}
}, this);
return (
<div className="popularList row">
{popularList}
</div>
);
}
}
export default PopularList; | 55f128aa302374d82af10c0ade871bce68fa3246 | [
"JavaScript"
] | 11 | JavaScript | planlodge/Technology | ac9e2a46d7d2da675fb986c17846552dcf632d3a | 1a12af66b36b7d7e06fba18e5a9b175c6017dd2d |
refs/heads/master | <repo_name>littleclay/patient-java<file_sep>/src/main/java/org/patient/Patient.java
package org.patient;
import java.time.Duration;
import java.util.Collection;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* An interface defining common methods for waiting patiently for some function
* to return a non-null and non-false value and the ability to configure what
* exceptions will be ignored, the amount of time to wait, how frequently the
* function should be attempted, etc.
*
* @author <NAME>
*
* @param
* <P>
* the type that methods without another return type will return
* (allows for fluent style).
*/
public interface Patient<P> {
/**
* @return the object desired, usually a self reference. Used for methods of
* the Patient object to allow for a fluent style.
*/
public abstract P getReturn();
/**
* Set a {@link Duration} which should be waited before the function is
* executed when one of the until methods is called.
*
* @param duration
* the Duration to set as the value.
* @return the value of {@link Patient#getReturn()}.
* @throws IllegalArgumentException
* if duration is null or negative.
*/
public abstract P setSleepFirst(Duration duration);
/**
* Syntactic sugar for {@link Patient#setSleepFirst(Duration)}.
*
* @param duration
* the Duration to be set as the value.
* @return the value of {@link Patient#getReturn()}.
*/
public default P sleepFirst(Duration duration) {
return setSleepFirst(duration);
}
/**
* Set a {@link Duration} which sets the end time of the waiting. This is
* calculated after the sleepFirst sleep has taken effect, but does not
* guarantee an end time if the given function is long-running. Note that a
* duration of 0 means that the function will only be attempted once.
*
* @param duration
* the Duration to set as the value.
* @return the value of {@link Patient#getReturn()}.
* @throws IllegalArgumentException
* if duration is null or negative.
*/
public abstract P setTryingFor(Duration duration);
/**
* Syntactic sugar for {@link Patient#setTryingFor(Duration)}.
*
* @param duration
* the Duration to be set as the value.
* @return the value of {@link Patient#getReturn()}.
*/
public default P timeout(Duration duration) {
return setTryingFor(duration);
}
/**
* Set a {@link Duration} which will be waited after an unsuccessful
* function execution before trying again.
*
* @param duration
* the Duration to set as the value.
* @return the value of {@link Patient#getReturn()}.
* @throws IllegalArgumentException
* if duration is null or negative.
*/
public abstract P setTryingEvery(Duration duration);
/**
* Syntactic sugar for {@link Patient#setTryingEvery(Duration)}.
*
* @param duration
* the Duration to be set as the value.
* @return the value of {@link Patient#getReturn()}.
*/
public default P polling(Duration duration) {
return setTryingEvery(duration);
}
/**
* Add the given class to the set of ignored exceptions.
*
* @param clazz
* the class to ignore.
* @return the value of {@link Patient#getReturn()}.
*/
public abstract P ignoring(Class<? extends Throwable> clazz);
/**
* Add the given classes to the set of ignored exceptions.
*
* @param classes
* the classes to ignore.
* @return the value of {@link Patient#getReturn()}.
*/
public default P ignoringAll(Collection<Class<? extends Throwable>> classes) {
if (null != classes)
classes.stream().sequential().forEach(this::ignoring);
return getReturn();
}
/**
* Clear the set of already ignored exceptions.
*
* @return the value of {@link Patient#getReturn()}.
*/
public abstract P clearIgnored();
/**
* Execute the function with the input around the given timing values for
* the Patient object. Only non-null, non-false results of the function are
* considered valid results to return.
*
* @param t
* the input to the function.
* @param <T>
* the type of t.
* @param <R>
* the type of the return value.
* @param function
* the function to execute.
* @return the result of applying the inputs to the function.
* @throws NullPointerException
* if function is null.
* @throws TimeoutException
* if no valid result is found before the maximum timeout is
* reached.
*/
public abstract <T, R> R until(T t, Function<T, R> function);
/*
* Use currying to have all until methods share code with the single
* argument function method.
*/
/**
* Execute the function with the inputs around the given timing values for
* the Patient object. Only non-null, non-false results of the function are
* considered valid results to return.
*
* @param t
* the first input of the function.
* @param u
* the second input of the function.
* @param function
* the function to execute.
* @param <T>
* the type of t.
* @param <U>
* the type of u.
* @param <R>
* the type of the return value.
* @return the result of applying the inputs to the function.
* @throws NullPointerException
* if function is null.
* @throws TimeoutException
* if no valid result is found before the maximum timeout is
* reached.
*/
public default <T, U, R> R until(T t, U u, BiFunction<T, U, R> function) {
Objects.requireNonNull(function, "Cannot call until with a null function.");
return this.until(t, input -> function.apply(input, u));
}
/**
* Execute the function with the inputs around the given timing values for
* the Patient object. Only non-null, non-false results of the function are
* considered valid results to return.
*
* @param t
* the first input of the function.
* @param u
* the second input of the function.
* @param v
* the third input of the function.
* @param <T>
* the type of t.
* @param <U>
* the type of u.
* @param <V>
* the type of v.
* @param <R>
* the type of the return value.
* @param function
* the function to execute.
* @return the result of applying the inputs to the function.
* @throws IllegalArgumentException
* if function is null.
* @throws TimeoutException
* if no valid result is found before the maximum timeout is
* reached.
*/
public default <T, U, V, R> R until(T t, U u, V v, TriFunction<T, U, V, R> function) {
Objects.requireNonNull(function, "Cannot call until with a null function.");
return this.until(t, input -> function.apply(input, u, v));
}
/**
* Execute testing the predicate with the input around the given timing
* values for the Patient object.
*
* @param t
* the input of the predicate.
* @param <T>
* the type of t.
* @param predicate
* the predicate to test.
* @throws IllegalArgumentException
* if predicate is null.
* @throws TimeoutException
* if the predicate does not return true before the maximum
* timeout is reached.
*/
public default <T> void untilTrue(T t, Predicate<T> predicate) {
Objects.requireNonNull(predicate, "Cannot call until with a null predicate.");
this.until(t, input -> predicate.test(input));
}
}
<file_sep>/src/test/java/org/patient/TriFunctionTest.java
package org.patient;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit tests of the {@link TriFunction} interface.
*
* @author <NAME>
*/
public final class TriFunctionTest {
@Test(expected = NullPointerException.class)
public void testTriFunctionAndThenThrowsExceptionGivenNullFunction() {
TriFunction<Integer, Integer, Integer, Integer> function = (t, u, v) -> t + u + v;
function.andThen(null);
}
@Test
public void testTriFunctionAndThenComposesCorrectly() {
TriFunction<Integer, Integer, Integer, Integer> function = (t, u, v) -> t + u + v;
int result = function.andThen((r) -> r + 1).apply(1, 2, 3);
Assert.assertTrue("TriFunction.andThen should compose functions properly.",
result == 7);
}
}
<file_sep>/src/main/java/org/patient/ThreadLocalPatientWait.java
package org.patient;
import java.time.Duration;
import java.util.function.Function;
/**
* An implementation of the {@link Patient} interface that creates a new
* {@link PatientWait} object for each thread.
*
* @author <NAME>
*/
public final class ThreadLocalPatientWait implements Patient<ThreadLocalPatientWait> {
private final ThreadLocal<PatientWait> wait;
/**
* Constructs a new ThreadLocalPatientWait object. The first time a Thread
* accesses this object, it will generate a new PatientWait object
* specifically for that thread. The new PatientWait object will be set to
* it's default timing values.
*/
public ThreadLocalPatientWait() {
wait = ThreadLocal.withInitial(() -> new PatientWait());
}
/**
* Constructs a new ThreadLocalPatientWait object. The first time a Thread
* accesses this object, it will generate a new PatientWait object
* specifically for that thread. The new PatientWait object will be set to
* the given timing values.
*
* <pre>
* PatientWait pw = new PatientWait().setSleepFirst(sleepFirst)
* .setTryingFor(tryingFor)
* .setTryingEvery(tryingEvery);
* </pre>
*
* @param sleepFirst
* the Duration to set for sleeping first.
* @param tryingFor
* the Duration to set for timeout.
* @param tryingEvery
* the Duration to set for the polling time.
*/
public ThreadLocalPatientWait(Duration sleepFirst, Duration tryingFor, Duration tryingEvery) {
wait = ThreadLocal.withInitial(() -> {
PatientWait pw = new PatientWait().setSleepFirst(sleepFirst)
.setTryingFor(tryingFor)
.setTryingEvery(tryingEvery);
return pw;
});
}
@Override
public ThreadLocalPatientWait getReturn() {
return this;
}
@Override
public ThreadLocalPatientWait setSleepFirst(Duration duration) {
wait.get().setSleepFirst(duration);
return this;
}
@Override
public ThreadLocalPatientWait setTryingFor(Duration duration) {
wait.get().setTryingFor(duration);
return this;
}
@Override
public ThreadLocalPatientWait setTryingEvery(Duration duration) {
wait.get().setTryingEvery(duration);
return this;
}
@Override
public ThreadLocalPatientWait ignoring(Class<? extends Throwable> clazz) {
wait.get().ignoring(clazz);
return this;
}
@Override
public ThreadLocalPatientWait clearIgnored() {
wait.get().clearIgnored();
return this;
}
@Override
public <T, R> R until(T t, Function<T, R> function) {
return wait.get().until(t, function);
}
}
<file_sep>/src/main/java/org/patient/Sleep.java
package org.patient;
import java.time.Duration;
/**
* A simple class with only a single static method for encapsulating a call to
* {@link Thread#sleep(long, int)} but taking in a {@link Duration}.
*
* @author <NAME>
*/
public final class Sleep {
/*
* Disallow the instantiation of this class.
*/
private Sleep() {
throw new AssertionError("No org.patient.Sleep instances for you!");
}
/**
* Put the current thread to sleep for the amount of time requested.
*
* @param duration
* the amount of time to sleep for. A duration of 0 is ignored.
* @throws IllegalArgumentException
* if duration is null or negative.
* @throws ArithmeticException
* if converting the duration to milliseconds overflows a long.
* @throws RuntimeException
* if the Thread is interrupted while sleeping.
*/
public static void sleepFor(Duration duration) {
if (null == duration || duration.isNegative())
throw new IllegalArgumentException("Cannot sleep for a null or negative duration.");
if (!Duration.ZERO.equals(duration)) {
int nanos = duration.getNano() % 1_000_000;
long millis = Math.addExact(duration.getNano() / 1_000_000,
Math.multiplyExact(duration.getSeconds(), 1_000));
try {
Thread.sleep(millis, nanos);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Thread performing the Sleep.sleepFor(Duration) call was interrupted.", e);
}
}
}
}
<file_sep>/pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- Basic Project Information -->
<modelVersion>4.0.0</modelVersion>
<groupId>com.github.littleclay</groupId>
<artifactId>patient-java</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
<name>Patient Java</name>
<description>
Patient Java is a simple library that includes a couple of components.
The main portion is the implementation of a dynamic wait.
You can give the wait object a function that it will call with given input.
If the function returns a null or false value, then it is considered a failed execution and the wait will try it again.
The total amount of time spent retrying the function, the minimum duration between attempts, etc. are all configurable on the fly via a fluent API.
The second portion is parameterized static validation of arguments.
This is similar to the Objects.requireNonNull methods introduced in Java 1.7, but use predicates as arguments as well to give more flexibility to users.
Requires: Java 1.8+
</description>
<url>https://github.com/littleclay/patient-java</url>
<!-- Properties -->
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<!-- Dependencies -->
<dependencies>
<!-- Testing Dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>
<!-- Build Settings -->
<build>
<plugins>
<!-- Compiler Settings -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.3</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<!-- Unit Test Settings -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
</plugin>
<!-- JavaDoc Settings -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.10.3</version>
<configuration>
<doctitle>Patient Java API version ${project.version}</doctitle>
<windowtitle>Patient Java API version ${project.version}</windowtitle>
</configuration>
</plugin>
</plugins>
</build>
<!-- Report Settings -->
<reporting>
<plugins>
<!-- Project Report -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>2.8</version>
</plugin>
<!-- Unit Test Results -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>2.18.1</version>
</plugin>
<!-- Test / Source code linking -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jxr-plugin</artifactId>
<version>2.5</version>
<reportSets>
<reportSet>
<reports>
<report>jxr</report>
</reports>
</reportSet>
</reportSets>
</plugin>
<!-- There is no code coverage because at this point it doesn't work right with java 8 -->
<!-- JavaDocs -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.10.3</version>
<configuration>
<show>public</show>
</configuration>
<reportSets>
<reportSet>
<reports>
<report>javadoc</report>
</reports>
</reportSet>
</reportSets>
</plugin>
</plugins>
</reporting>
</project>
<file_sep>/src/test/java/org/patient/TimeoutExceptionTest.java
package org.patient;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit tests of the {@link TimeoutException} class.
*
* @author <NAME>
*/
public final class TimeoutExceptionTest {
@Test
public void testCanInstantiateTimeoutException() {
TimeoutException exception = new TimeoutException();
Assert.assertNotNull("Should be able to instantiate a TimeoutException.",
exception);
}
@Test
public void testTimeoutExceptionIsRuntimeException() {
TimeoutException exception = new TimeoutException();
Assert.assertTrue("TimeoutException should be an instance of RuntimeException.",
exception instanceof RuntimeException);
}
@Test
public void testEmptyArgConstructorHasNullMessageAndNullCause() {
TimeoutException exception = new TimeoutException();
Assert.assertNull("Empty arg constructor should have null message.",
exception.getMessage());
Assert.assertNull("Empty arg constructor should have null cause.",
exception.getCause());
}
@Test
public void testStringConstructorHasNullCause() {
TimeoutException exception = new TimeoutException("message");
Assert.assertNull("String arg constructor should have null cause.",
exception.getCause());
}
@Test
public void testStringConstructHasGivenMessage() {
String message = "hello, world!";
TimeoutException exception = new TimeoutException(message);
Assert.assertEquals("String arg constructor should have given message.",
message, exception.getMessage());
}
@Test
public void testCauseConstructorHasGivenCause() {
Throwable cause = new RuntimeException("whoa");
TimeoutException exception = new TimeoutException(cause);
Assert.assertEquals("Cause arg constructor should have given cause.",
cause, exception.getCause());
}
@Test
public void testTwoArgConstructorHasNonNullMessageAndNonNullCause() {
String message = "hello, world!";
Throwable cause = new RuntimeException("whoa");
TimeoutException exception = new TimeoutException(message, cause);
Assert.assertNotNull("Two arg constructor should have non-null message.",
exception.getMessage());
Assert.assertNotNull("Two arg constructor should have non-null cause.",
exception.getCause());
}
@Test
public void testTwoArgConstructorHasGivenMessageAndGivenCause() {
String message = "hello, world!";
Throwable cause = new RuntimeException("whoa");
TimeoutException exception = new TimeoutException(message, cause);
Assert.assertEquals("Two arg constructor should have given message.",
message, exception.getMessage());
Assert.assertEquals("Two arg constructor should have given cause.",
cause, exception.getCause());
}
}
<file_sep>/README.md
# Patient Java
Patient Java is a simple library that includes a couple of components. The main portion is the implementation of a dynamic wait. You can give the wait object a function that it will call with given input. If the function returns a null or false value, then it is considered a failed execution and the wait will try it again. The total amount of time spent retrying the function, the minimum duration between attempts, etc. are all configurable on the fly via a fluent API.
The second portion is parameterized static validation of arguments. This is similar to the Objects.requireNonNull methods introduced in Java 1.7, but use predicates as arguments as well to give more flexibility to users.
Requires: Java 1.8+
| b43387cbaf5dd09295e3e2ae7772f1c377bffeaf | [
"Markdown",
"Java",
"Maven POM"
] | 7 | Java | littleclay/patient-java | 5157e02dcf602d448c1d5a528f0cd81e21cc5678 | 1c365c9f4ee69330cd9ed012881edba33d7da38b |
refs/heads/master | <repo_name>jonalexander/emoticon-translator-001-prework-web<file_sep>/lib/translator.rb
require 'yaml'
require 'pry'
def load_library(filePath)
library = YAML.load_file(filePath)
get_meaning = Hash.new
get_emoticon = Hash.new
library.each do |meaning, emoticons|
get_meaning[emoticons[1]] = meaning
#get_meaning => {japEmo: meaning}
get_emoticon[emoticons[0]] = emoticons[1]
#get_emoticon => {engEmo: japEmo}
end
{"get_meaning" => get_meaning, "get_emoticon" => get_emoticon}
end
def get_japanese_emoticon(filePath, emoticon)
library = load_library(filePath)
if library["get_emoticon"][emoticon]
library["get_emoticon"][emoticon]
else
return "Sorry, that emoticon was not found"
end
end
def get_english_meaning(filePath, emoticon)
library = load_library(filePath)
if library["get_meaning"][emoticon]
#return value for [emotion] key
library["get_meaning"][emoticon]
else
return "Sorry, that emoticon was not found"
end
end
| 6bf4282ed1f03e643050cef7349180b642bc0ff4 | [
"Ruby"
] | 1 | Ruby | jonalexander/emoticon-translator-001-prework-web | c26638542472296a4c00bd018b8e628fa98af75a | 8a637b0d8360eb19556e4d33540ade34978958a9 |
refs/heads/master | <file_sep>apply plugin: 'com.android.application'
apply plugin: 'android-apt'
android {
compileSdkVersion 24
buildToolsVersion "24.0.3"
defaultConfig {
applicationId "com.samyotech.exitpoll"
minSdkVersion 15
targetSdkVersion 24
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
ext {
permissionsDispatcherVersion = '2.2.0'
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
// RecyclerView
// CardView
// Glide
compile "com.github.hotchemi:permissionsdispatcher:${permissionsDispatcherVersion}"
apt "com.github.hotchemi:permissionsdispatcher-processor:${permissionsDispatcherVersion}"
compile files('libs/gson-1.7.1.jar')
compile 'com.android.support:appcompat-v7:24.2.1'
compile 'com.android.support:recyclerview-v7:24.2.1'
compile 'com.android.support:design:24.2.1'
compile 'de.hdodenhof:circleimageview:2.0.0'
compile 'com.android.support:cardview-v7:24.2.1'
compile 'com.nostra13.universalimageloader:universal-image-loader:1.9.5'
compile 'com.github.bumptech.glide:glide:3.7.0'
compile 'com.cocosw:bottomsheet:1.+@aar'
compile 'com.isseiaoki:simplecropview:1.1.4'
compile 'com.google.android.gms:play-services-location:9.0.1'
compile 'com.google.android.gms:play-services-analytics:9.0.1'
compile 'com.google.firebase:firebase-messaging:9.0.1'
compile 'com.facebook.fresco:fresco:0.14.1'
compile 'me.relex:photodraweeview:1.1.2'
}
apply plugin: 'com.google.gms.google-services'
<file_sep>package com.samyotech.exitpoll.utils;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog;
import android.widget.EditText;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
import com.samyotech.exitpoll.R;
import com.samyotech.exitpoll.activity.SysApplication;
import java.text.SimpleDateFormat;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by samyotech on 30/11/16.
*/
public class ProjectUtils {
/*validation code here*/
public static boolean isPasswordValid(String number) {
//String regexStr = "^([0-9\\(\\)\\/\\+ \\-]*)$";
String regexStr = " (?!^[0-9]*$)(?!^[a-zA-Z]*$)^([a-zA-Z0-9]{8,20})$";
if (number.length() < 8 || number.length() > 10 || number.matches(regexStr) == false) {
// Log.d("tag", "Number is not valid");
return false;
}
return true;
}
public static boolean isEmailValid(String email) {
String expression = "^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$";
Pattern pattern = Pattern.compile(expression, Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(email);
if (matcher.matches()) {
return true;
} else if (email.equals("")) {
return false;
}
return false;
}
public boolean validateDate(String date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
return false;
}
public static boolean isPhoneNumberValid(String number) {
//String regexStr = "^([0-9\\(\\)\\/\\+ \\-]*)$";
String regexStr = "^((0)|(91)|(00)|[7-9]){1}[0-9]{3,14}$";
if (number.length() < 10 || number.length() > 10 || number.matches(regexStr) == false) {
// Log.d("tag", "Number is not valid");
return false;
}
return true;
}
public static boolean isPinCodeValid(String number) {
//String regexStr = "^([0-9\\(\\)\\/\\+ \\-]*)$";
String regexStr = "^[1-9][0-9]{6}$";
if (number.length() < 6 || number.length() > 6 || number.matches(regexStr) == false) {
// Log.d("tag", "Number is not valid");
return false;
}
return true;
}
public static boolean isEditTextFilled(EditText text) {
if (text.getText() != null && text.getText().toString().trim().length() > 0) {
return true;
} else {
return false;
}
}
public static boolean isEditTextFilled1(EditText text) {
if (text.getText() != null && text.getText().toString().trim().length() > 5) {
return true;
} else {
return false;
}
}
/*validation code here*/
public static boolean hasPermissionInManifest(Activity activity, int requestCode, String permissionName) {
if (ContextCompat.checkSelfPermission(activity,
permissionName)
!= PackageManager.PERMISSION_GRANTED) {
// No explanation needed, we can request the permission.
ActivityCompat.requestPermissions(activity,
new String[]{permissionName},
requestCode);
} else {
return true;
}
return false;
}
public static void showAnalytics(SysApplication sysApplication, String nameOfScreen) {
Tracker tracker = sysApplication.getDefaultTracker();
tracker.setScreenName(nameOfScreen);
tracker.send(new HitBuilders.ScreenViewBuilder().build());
tracker.enableExceptionReporting(true);
}
}
<file_sep>package com.samyotech.exitpoll.adapter;
import android.content.Context;
import android.content.Intent;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.samyotech.exitpoll.R;
import com.samyotech.exitpoll.activity.ViewDetailsActivity;
import com.samyotech.exitpoll.dto.Album;
import java.util.Collections;
import java.util.List;
public class Recycler_View_Adapter extends RecyclerView.Adapter<View_Holder> {
List<Album> albumList = Collections.emptyList();
Context context;
public Recycler_View_Adapter(List<Album> albumList, Context context) {
this.albumList = albumList;
this.context = context;
}
@Override
public View_Holder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext()).inflate(R.layout.row_layout, parent, false);
View_Holder holder = new View_Holder(v);
return holder;
}
@Override
public void onBindViewHolder(View_Holder holder, final int position) {
holder.title.setText(albumList.get(position).name);
holder.description.setText(albumList.get(position).post);
// holder.imageView.setImageResource(albumList.get(position).img);
ImageLoader.getInstance().displayImage("http://samyotechlabs.com/exitpoll/" + albumList.get(position).img, holder.imageView);
holder.layout_root.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent in = new Intent(context, ViewDetailsActivity.class);
in.putExtra("politicsid", albumList.get(position).id);
in.putExtra("name", albumList.get(position).name);
in.putExtra("post", albumList.get(position).post);
in.putExtra("party", albumList.get(position).partyname);
in.putExtra("whichpost", albumList.get(position).whichpost);
in.putExtra("education", albumList.get(position).education);
in.putExtra("age", albumList.get(position).age);
in.putExtra("img", albumList.get(position).img);
in.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(in);
}
});
animate(holder);
}
@Override
public int getItemCount() {
return albumList.size();
}
@Override
public void onAttachedToRecyclerView(RecyclerView recyclerView) {
super.onAttachedToRecyclerView(recyclerView);
}
// Insert a new item to the RecyclerView
public void insert(int position, Album album) {
albumList.add(position, album);
notifyItemInserted(position);
}
// Remove a RecyclerView item containing the Data object
public void remove(Album album) {
int position = albumList.indexOf(album);
albumList.remove(position);
notifyItemRemoved(position);
}
public void animate(RecyclerView.ViewHolder viewHolder) {
final Animation animAnticipateOvershoot = AnimationUtils.loadAnimation(context, R.anim.anticipate_overshoot_interpolator);
viewHolder.itemView.setAnimation(animAnticipateOvershoot);
}
}
<file_sep>package com.samyotech.exitpoll.dto;
/**
* Created by varun on 30/11/16.
*/
public class Album {
public String id, name, post, partyname, whichpost, education, age, img;
public Album() {
}
public Album(String id, String name, String post, String partyname, String whichpost, String education, String age, String img) {
this.id = id;
this.name = name;
this.post = post;
this.partyname = partyname;
this.whichpost = whichpost;
this.education = education;
this.age = age;
this.img = img;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPost() {
return post;
}
public void setPost(String post) {
this.post = post;
}
public String getPartyname() {
return partyname;
}
public void setPartyname(String partyname) {
this.partyname = partyname;
}
public String getWhichpost() {
return whichpost;
}
public void setWhichpost(String whichpost) {
this.whichpost = whichpost;
}
public String getEducation() {
return education;
}
public void setEducation(String education) {
this.education = education;
}
public String getAge() {
return age;
}
public void setAge(String age) {
this.age = age;
}
public String getImg() {
return img;
}
public void setImg(String img) {
this.img = img;
}
}
<file_sep>package com.samyotech.exitpoll.activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.SwitchCompat;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.samyotech.exitpoll.R;
import com.samyotech.exitpoll.sharedpref.SharedPrefrence;
import com.samyotech.exitpoll.utils.ProjectUtils;
public class Settings extends AppCompatActivity implements View.OnClickListener {
LinearLayout back;
RelativeLayout rlLogout, rlResetPass, updatepro;
SharedPrefrence preference;
SwitchCompat not_switch;
boolean notifyAlert = true;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_settings);
preference = SharedPrefrence.getInstance(this);
ProjectUtils.showAnalytics((SysApplication) getApplication(), "Settings");
notifyAlert = preference.getBooleanValue(SharedPrefrence.NOTIFIOCATION_ENABLE);
init();
}
public void init() {
back = (LinearLayout) findViewById(R.id.back);
rlLogout = (RelativeLayout) findViewById(R.id.rlLogout);
rlResetPass = (RelativeLayout) findViewById(R.id.rlResetPass);
updatepro = (RelativeLayout) findViewById(R.id.updatepro);
not_switch = (SwitchCompat) findViewById(R.id.not_switch);
not_switch.setChecked(notifyAlert);
back.setOnClickListener(this);
rlLogout.setOnClickListener(this);
rlResetPass.setOnClickListener(this);
updatepro.setOnClickListener(this);
not_switch.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.back:
startActivity(new Intent(Settings.this, MainActivity.class));
overridePendingTransition(R.anim.pull_in_left, R.anim.push_out_right);
finish();
break;
case R.id.rlLogout:
confirmLogout();
Log.e("isloginlogout", SharedPrefrence.IS_LOGIN);
break;
case R.id.rlResetPass:
startActivity(new Intent(Settings.this, ResetpassActivity.class));
finish();
break;
case R.id.updatepro:
startActivity(new Intent(Settings.this, UpdateprofileActivity.class));
finish();
break;
case R.id.not_switch:
clickSwitch();
break;
}
}
private void clickSwitch() {
if (not_switch.isChecked()) {
preference.setBooleanValue(SharedPrefrence.NOTIFIOCATION_ENABLE, true);
} else {
preference.setBooleanValue(SharedPrefrence.NOTIFIOCATION_ENABLE, false);
}
}
public void confirmLogout() {
try {
new AlertDialog.Builder(this)
.setIcon(R.drawable.exitpoll)
.setTitle("ExitPoll")
.setMessage("Are you sure want to logout?")
.setPositiveButton("Yes!", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
logout();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.show();
} catch (Exception e) {
e.printStackTrace();
}
}
private void logout() {
preference.clearPreferences(SharedPrefrence.IS_LOGIN);
preference.clearPreferences(SharedPrefrence.FAV_RECORD);
preference.clearPreferences(SharedPrefrence.POLITATION_RECORD);
preference.clearPreferences(SharedPrefrence.USER_DETAIL);
preference.clearPreferences(SharedPrefrence.VIEW_POLITATION_RECORD);
startActivity(new Intent(Settings.this, LoginActivity.class));
finish();
}
@Override
public void onBackPressed() {
super.onBackPressed();
startActivity(new Intent(Settings.this, MainActivity.class));
overridePendingTransition(R.anim.pull_in_left, R.anim.push_out_right);
finish();
}
}
<file_sep>package com.samyotech.exitpoll.activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import com.google.firebase.iid.FirebaseInstanceId;
import com.samyotech.exitpoll.R;
import com.samyotech.exitpoll.dto.Album;
import com.samyotech.exitpoll.https.NetworkTask;
import com.samyotech.exitpoll.sharedpref.SharedPrefrence;
import com.samyotech.exitpoll.utils.Consts;
import com.samyotech.exitpoll.utils.DialogUtility;
import com.samyotech.exitpoll.utils.ProjectUtils;
import java.util.ArrayList;
public class SplashActivity extends AppCompatActivity {
SharedPrefrence preference;
NetworkTask networkTask;
private static int SPLASH_TIME_OUT = 3000;
private Context mContext;
public ArrayList<Album> albumList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
preference = SharedPrefrence.getInstance(this);
getDATA();
mContext = SplashActivity.this;
ProjectUtils.showAnalytics((SysApplication) getApplication(), "SplashActivity");
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (preference.getBooleanValue(SharedPrefrence.IS_LOGIN)) {
Log.e("islogin1", SharedPrefrence.IS_LOGIN);
startActivity(new Intent(mContext, MainActivity.class));
finish();
} else {
startActivity(new Intent(mContext, LoginActivity.class));
finish();
}
}
}, SPLASH_TIME_OUT);
}
public void getDATA() {
DialogUtility.showProgressDialog(this, false, "Please wait...");
networkTask = new NetworkTask(Consts.DATA_METHOD, getParam(), SplashActivity.this);
networkTask.execute(Consts.POST_METHOD);
networkTask.setOnTaskFinishedEvent(new NetworkTask.AsyncResponse() {
@Override
public void processFinish(boolean output, String message) {
Log.e("output", String.valueOf(output));
if (output) {
albumList = preference.getList(SharedPrefrence.POLITATION_RECORD);
DialogUtility.pauseProgressDialog();
} else {
DialogUtility.pauseProgressDialog();
}
}
});
}
public ContentValues getParam() {
ContentValues values = new ContentValues();
return values;
}
}
<file_sep>package com.samyotech.exitpoll.activity;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.design.widget.TextInputLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.google.firebase.iid.FirebaseInstanceId;
import com.samyotech.exitpoll.R;
import com.samyotech.exitpoll.https.NetworkTask;
import com.samyotech.exitpoll.sharedpref.SharedPrefrence;
import com.samyotech.exitpoll.utils.Consts;
import com.samyotech.exitpoll.utils.DialogUtility;
import com.samyotech.exitpoll.utils.ProjectUtils;
public class LoginActivity extends AppCompatActivity implements View.OnClickListener {
private TextView signUpTV, tvForgotPassword;
private TextInputLayout emailTextInputLayout, passwordTextInputLayout;
private EditText emailET, passwordET;
private Button submitBTN;
NetworkTask networkTask;
SharedPrefrence preference;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
preference = SharedPrefrence.getInstance(this);
initView();
}
public void initView() {
signUpTV = (TextView) findViewById(R.id.signUpTV);
tvForgotPassword = (TextView) findViewById(R.id.tvForgotPassword);
emailTextInputLayout = (TextInputLayout) findViewById(R.id.emailTextInputLayout);
passwordTextInputLayout = (TextInputLayout) findViewById(R.id.passwordTextInputLayout);
emailET = (EditText) findViewById(R.id.emailET);
passwordET = (EditText) findViewById(R.id.passwordET);
submitBTN = (Button) findViewById(R.id.submitBTN);
signUpTV.setOnClickListener(this);
tvForgotPassword.setOnClickListener(this);
submitBTN.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.signUpTV:
startActivity(new Intent(LoginActivity.this, SignupActivity.class));
overridePendingTransition(R.anim.pull_in_right, R.anim.push_out_left);
finish();
break;
case R.id.tvForgotPassword:
startActivity(new Intent(LoginActivity.this, ForgetpasswordActivity.class));
overridePendingTransition(R.anim.pull_in_right, R.anim.push_out_left);
finish();
break;
case R.id.submitBTN:
submitForm();
break;
}
}
private void submitForm() {
if (!validateEmail()) {
return;
}
if (!validatePassword()) {
return;
} else {
DialogUtility.showProgressDialog(this, false, "Please wait...");
getLogin();
}
}
public void getLogin() {
networkTask = new NetworkTask(Consts.LOGIN_METHOD, getParam(), LoginActivity.this);
networkTask.execute(Consts.POST_METHOD);
networkTask.setOnTaskFinishedEvent(new NetworkTask.AsyncResponse() {
@Override
public void processFinish(boolean output, String message) {
Log.e("output", String.valueOf(output));
if (preference.getBooleanValue("loginStatus")) {
preference.setBooleanValue(SharedPrefrence.IS_LOGIN, true);
Log.e("isloginlogin", SharedPrefrence.IS_LOGIN);
DialogUtility.showToast(getApplicationContext(), message);
DialogUtility.pauseProgressDialog();
preference.setValue(SharedPrefrence.EMAIL, emailET.getText().toString());
preference.setValue(SharedPrefrence.PASSWORD, passwordET.getText().toString());
startActivity(new Intent(LoginActivity.this, MainActivity.class));
finish();
} else {
DialogUtility.showToast(getApplicationContext(), message);
DialogUtility.pauseProgressDialog();
}
}
});
}
public ContentValues getParam() {
ContentValues values = new ContentValues();
values.put(Consts.EMAIL, emailET.getText().toString().trim());
values.put(Consts.PASSWORD, passwordET.getText().toString().trim());
values.put(Consts.FCMKEY, preference.getValue(SharedPrefrence.TOKAN));
Log.e("tokensss", preference.getValue(SharedPrefrence.TOKAN));
return values;
}
private boolean validatePassword() {
if (!ProjectUtils.isEditTextFilled(passwordET)) {
passwordTextInputLayout.setError(getString(R.string.err_msg_password));
passwordET.requestFocus();
return false;
} else {
passwordTextInputLayout.setErrorEnabled(false);
}
return true;
}
public boolean validateEmail() {
if (!ProjectUtils.isEmailValid(emailET.getText().toString().trim())) {
emailTextInputLayout.setError(getString(R.string.err_msg_email));
emailET.requestFocus();
return false;
} else {
emailTextInputLayout.setErrorEnabled(false);
return true;
}
}
@Override
public void onBackPressed() {
clickDone();
}
public void clickDone() {
new AlertDialog.Builder(this)
.setIcon(R.drawable.exitpoll)
.setTitle("ExitPoll")
.setMessage("Are you sure want to close ExitPoll?")
.setPositiveButton("Yes!", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
Intent i = new Intent();
i.setAction(Intent.ACTION_MAIN);
i.addCategory(Intent.CATEGORY_HOME);
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(i);
finish();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.show();
}
}
| 00f3c4dbde5c02230fdad1b9ca7da7490eb8ced9 | [
"Java",
"Gradle"
] | 7 | Gradle | samyotech/ExitPoll | 88f85a28c22933dfc6c5db6bb9bb520ff8081f56 | cbbc49e3bf308918b2350457ecea69cad023e00b |
refs/heads/master | <repo_name>Piyush026/DjangoBlog<file_sep>/icode/blog/views.py
from django.shortcuts import render , HttpResponse , redirect
from blog.models import Post, PostComment
from django.contrib import messages
from blog.templatetags import getDict
# Create your views here.
def blogHome(request):
allPost = Post.objects.all()
# print(allPost)
context = {'allPost':allPost}
return render(request,'blog/blog.html',context)
def blogPost(request,slug):
post = Post.objects.filter(slug=slug).first()
# print("post",post)
cmnt = PostComment.objects.filter(post=post,parent=None)
replies = PostComment.objects.filter(post=post).exclude(parent=None)
usr = request.user
# print("reply",replies)
# print("commentss",cmnt)
repliesDict = {}
for reply in replies:
if reply.parent.sno not in repliesDict.keys():
repliesDict[reply.parent.sno] = [reply]
else:
repliesDict[reply.parent.sno].append(reply)
# print("repliesDict",repliesDict)
context = {"post":post,"comments":cmnt,"user":usr, "replyDict":repliesDict}
return render(request,'blog/blogpost.html',context)
def commentPost(request):
user = request.user
comment = request.POST.get("comment")
postsno = request.POST.get("postsno")
post = Post.objects.get(sno=postsno)
parentsno = request.POST.get("parentSno")
# print(pare)
if parentsno == "":
comment = PostComment(user=user,comment=comment,post=post)
comment.save()
messages.success(request,"comment posted!!")
else:
parent = PostComment.objects.get(sno=parentsno)
comment = PostComment(user=user,comment=comment,post=post,parent=parent)
comment.save()
messages.success(request,"reply posted!!")
return redirect(f"/blog/{post.slug}")<file_sep>/icode/blog/urls.py
from django.urls import path, include
from blog import views
urlpatterns = [
path('postComment', views.commentPost, name='commentPost'),
path('', views.blogHome, name='blogHome'),
path('<str:slug>', views.blogPost, name='blogPost'),
]<file_sep>/icode/home/urls.py
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.index, name='home'),
path('contact/', views.contact, name='contact'),
path('about/', views.about,name='about'),
path('search/',views.search,name="search"),
path('signup/',views.signup,name="signup"),
path('login',views.userlogin,name="login"),
path('logout/',views.userlogout,name="logout"),
]<file_sep>/icode/home/views.py
from django.shortcuts import render , HttpResponse, redirect, HttpResponseRedirect
from .models import Contact
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import User
from blog.models import Post
from django.db import IntegrityError
# Create your views here.
def index(request):
return render(request,'home/home.html')
def contact(request):
# messages.error(request,"welcome to Contact")
if request.method == 'POST':
name=request.POST['name']
phone=request.POST['phone']
email=request.POST['email']
msg=request.POST['content']
if len(name)<5 or len(phone)<10 or len(email)<10 or len(msg)<10:
messages.error(request,"please fill Form correctly")
else:
contact = Contact(name=name,phone=phone,email=email,content=msg)
messages.success(request,"form submited")
contact.save()
return render(request,'home/contact.html')
def about(request):
return render(request,'home/about.html')
def search(request):
query = request.GET["query"]
searchPost = Post.objects.filter(tittle__icontains=query)
searchContent = Post.objects.filter(content__icontains=query)
search = searchPost.union(searchContent)
if search:
context = {"allPost":search,"query":query}
elif len(query) > 50:
data = "Data not found"
context = {"query":data}
else:
data = "Data not found"
context = {"query":data}
messages.warning(request,"try another..")
return render(request,'home/search.html',context)
def signup(request):
try:
if request.method=="POST":
username = request.POST['username']
name = request.POST['name']
email = request.POST['email']
pass1 = request.POST['pass1']
pass2 = request.POST['pass2']
# validation
if pass1 != pass2:
messages.error(request,"password must be same and minimum 8 char")
return HttpResponseRedirect('/')
if len(pass1) < 8:
messages.error(request,"password must be minimum 8 char")
return HttpResponseRedirect('/')
icoderUser = User.objects.create_user(username,email,pass1)
icoderUser.first_name = username
icoderUser.last_name = name
icoderUser.save()
messages.success(request,"User created successfully")
return HttpResponseRedirect('/')
else:
return HttpResponse("404 NOT FOUND")
except IntegrityError:
messages.error(request,"Username must be unique")
return HttpResponseRedirect('/')
except:
return HttpResponse("404 NOT FOUND")
# return render_to_response("template.html", {"message": e.message})
def userlogin(request):
if request.method=='POST':
Uname = request.POST['Uname']
loginpass = request.POST['loginpass']
user = authenticate(username=Uname,password=<PASSWORD>)
if user is not None:
login(request,user)
messages.success(request,"logged in successfully")
return redirect('home')
else:
messages.error(request,"invalid login")
return redirect('home')
return HttpResponse("404 not found")
def userlogout(request):
logout(request)
messages.success(request,"logout successfully")
return redirect('home') | 6fce2ad9673084525f4fcdd33bccca40308588b8 | [
"Python"
] | 4 | Python | Piyush026/DjangoBlog | e5851f1270e75088c101352b3e99d74c2ec67fe8 | 64c50d25ca96c6a46e815e8994755e122a6c16de |
refs/heads/main | <file_sep>from django.test import TestCase
# Create your tests here.
x = -50
with open('ibra.txt', 'w') as f:
lst = []
lst.append(x)
for i in range(100):
lst.append(i+x)
f.write(str(lst))<file_sep>from django.shortcuts import render,redirect
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth import login as Login_process ,logout,authenticate
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required(login_url='login')
def Home(request):
return render(request, 'webAi/home.html')
@login_required(login_url='login')
def submit(request):
if request.method == 'GET':
pass
else:
input1 = request.POST.get('input1')
input1 = float(input1)
input1 = round(input1, 2)
input2 = request.POST.get('input2')
input2 = float(input2)
input2 = round(input1, 2)
input3 = request.POST.get('input3')
input3 = float(input3)
input3 = round(input1, 2)
input4 = request.POST.get('input4')
input4 = float(input4)
input4 = round(input1, 2)
input5 = request.POST.get('input5')
input5 = float(input5)
input5 = round(input1, 2)
input6 = request.POST.get('input6')
input6 = float(input6)
input6 = round(input1, 2)
input7 = request.POST.get('input7')
input7 = float(input7)
input7 = round(input1, 2)
input8 = request.POST.get('input8')
input8 = float(input8)
input8 = round(input1, 2)
input9 = request.POST.get('input9')
input9 = float(input9)
input9 = round(input1, 2)
input10 = request.POST.get('input10')
input10 = float(input10)
input10 = round(input1, 2)
input11 = request.POST.get('input11')
input11 = float(input11)
input11 = round(input1, 2)
input12 = request.POST.get('input12')
input12 = float(input12)
input1 = round(input1, 2)
input13 = request.POST.get('input13')
input13 = float(input13)
input13 = round(input1, 2)
input14 = request.POST.get('input14')
input14 = float(input14)
input14 = round(input1, 2)
input15 = request.POST.get('input15')
input15 = float(input5)
input15 = round(input1, 2)
input16 = request.POST.get('input16')
input16 = float(input16)
input16 = round(input1, 2)
input17 = request.POST.get('input17')
input17 = float(input17)
input17 = round(input1, 2)
input18 = request.POST.get('input18')
input18 = float(input18)
input18 = round(input1, 2)
input19 = request.POST.get('input19')
input19 = int(input19)
input20 = request.POST.get('input20')
input20 = int(input20)
# Call Your API or function that will produce graph value using the inputs
val = .3
return render(request, 'webAi/new_page.html', {'val': val})
# https://jsfiddle.net/BlackLabel/x9vo0tr6/
def Login(request):
if request.method=='POST':
username = request.POST.get('username')
pass1 = request.POST.get('pass1')
user = authenticate(request, username=username, password=<PASSWORD>)
if user is not None:
Login_process(request, user)
return redirect('/')
else:
messages.info(request, 'Username OR password is incorrect')
return render(request, 'webAi/login.html')
def Register(request):
if request.method == 'POST':
fname = request.POST.get('fname')
lname = request.POST.get('lname')
username=request.POST.get('username')
email = request.POST.get('email')
pass1 = request.POST.get('pass1')
pass2 = request.POST.get('pass2')
if str(pass1) == str(pass2):
user_object = User(first_name=fname, last_name=lname, email=email,username=username)
user_object.set_password(pass1)
user_object.save()
return redirect('login')
else:
messages.error(request, 'Both Passwords did not match!')
return render(request, 'webAi/register.html')
return render(request, 'webAi/register.html')
def logoutUser(request):
logout(request)
return redirect('/') | 4ce095353d341448d3734d531fab850657126b80 | [
"Python"
] | 2 | Python | Ibrahimkhalill/webAi | 9c8b90a6a5b3846cfaf77b11733f308d2a35a289 | 4705be243a4a582277c5a716f467a450c819dd08 |
refs/heads/master | <file_sep>import os
import re
import operator
import pandas as pd
import numpy as np
from textblob import TextBlob
from textblob.sentiments import NaiveBayesAnalyzer
from sklearn.naive_bayes import MultinomialNB
from nltk.corpus import wordnet
from nltk.corpus import stopwords
from sklearn.pipeline import Pipeline
from nltk import word_tokenize
from nltk.stem import WordNetLemmatizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn import preprocessing
from scipy.sparse import csr_matrix
from sklearn.tree import DecisionTreeClassifier
pd.set_option('display.width', 192, 'display.max_rows', None)
# constants
SENTIMENT = 'sentiment'
TEXT = 'text'
POSITIVE = 'positive'
NEGATIVE = 'negative'
NEUTRAL = 'neutral'
SPLIT_LINE = '\n'
SPLIT_WORD = ' '
KEYWORD = 'hypernym'
FREQUENCY = 'freq'
FEATURE_KEY = [KEYWORD, FREQUENCY]
stop = list(set(stopwords.words('english'))) # stopwords
import nltk
nltk.download('stopwords')
nltk.download('wordnet')
class SongSentiment(object):
'''
Generic Twitter Class for sentiment analysis.
'''
def __init__(self):
'''
Class constructor or initialization method.
'''
def clean_stanza(self, stanza):
'''
Utility function to clean stanza text by removing links, special characters
using simple regex statements.
'''
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", stanza).split())
def lyrics_analyzer(self, fetched_lyrics, flag = 0, pp = 0):
'''
'''
# empty dictionary to store required params of a lyrics
# expanding vocabulary: Hypernym approach.
lyrics_hypernyms = {}
# iterate over stanza one by one
for stanza in fetched_lyrics.split('\n\n'):
if stanza != '':
# iterate over line by line
for line in self.clean_stanza(stanza).split(SPLIT_LINE):
# iterate over word by word in a line
for word in line.split(SPLIT_WORD):
word = word.lower()
if word not in stop:
if pp: print('+', word) # word is being used
# find synonyms sets
for synset in wordnet.synsets(word):
# iterate over synset hypernyms
for hypernym in synset.hypernyms():
# extract hypernym name
hypernym_0 = hypernym.name().split('.')[0]
# if hypernym name already exists then increase the count value
if hypernym_0 in lyrics_hypernyms.keys():
lyrics_hypernyms[hypernym_0] += 1
else:
lyrics_hypernyms[hypernym_0] = 1
else:
if pp: print('-', word) # word is not being used
# returning
if not flag:
return lyrics_hypernyms
else:
return [[word[0],word[1]] for word in sorted(lyrics_hypernyms.items(), key=operator.itemgetter(1), reverse=True)]
def reduce_noise(self, lyrics_hypernyms, min_count, max_count, length, flag = 0):
'''
'''
frequent_hypernyms = {}
for key in lyrics_hypernyms.keys():
if (lyrics_hypernyms[key] >= min_count) & (lyrics_hypernyms[key] <= max_count):
frequent_hypernyms[key] = lyrics_hypernyms[key]
# returning
if not flag:
return frequent_hypernyms
else:
return [[word[0],word[1]] for word in sorted(frequent_hypernyms.items(), key=operator.itemgetter(1), reverse=True)][:length]
def get_stanza_sentiment(self, stanza):
'''
Utility function to classify sentiment of passed stanza
using textblob's sentiment method
'''
# create TextBlob object of passed stanza text
analysis = TextBlob(self.clean_stanza(stanza), analyzer=NaiveBayesAnalyzer())
print(analysis.sentiment)
# set sentiment
if (analysis.sentiment.polarity > 0) & (analysis.sentiment.subjectivity > 0):
return POSITIVE
elif (analysis.sentiment.polarity < 0) & (analysis.sentiment.subjectivity > 0):
return NEGATIVE
elif (analysis.sentiment.polarity == 0) & (analysis.sentiment.subjectivity > 0):
return NEUTRAL
else:
return None
def get_lyrics(self, fetched_lyrics):
'''
Main function to fetch lyrics and parse them.
'''
# empty list to store parsed lyrics
lyrics = []
# unique_hypernyms
unique_hypernyms = set()
print('The Song lyrics:', type(fetched_lyrics), '\n', fetched_lyrics)
# parsing lyrics one by one
for stanza in fetched_lyrics.split('\n\n'):
# empty dictionary to store required params of a stanza
parsed_stanza = {}
print('-------------------------------------------------')
# saving text of stanza
parsed_stanza[TEXT] = stanza
print('saving text of stanza:', parsed_stanza[TEXT])
# saving sentiment of stanza
parsed_stanza[SENTIMENT] = self.get_stanza_sentiment(stanza)
print('saving sentiment of stanza:', parsed_stanza[SENTIMENT])
# if stanza has relyrics, ensure that it is appended only once
if parsed_stanza not in lyrics:
lyrics.append(parsed_stanza)
# else:
# lyrics.append(parsed_stanza)
print('-------------------------------------------------')
# return parsed lyrics
return lyrics
def label_encoder(self, key_list):
'''
'''
self.le = preprocessing.LabelEncoder()
self.le.fit(key_list)
def label_transform(self, key_list):
'''
'''
try:
self.le
except:
self.le = preprocessing.LabelEncoder()
self.le.fit(key_list)
return self.le.transform(key_list)
def sparse_matrix(self, freq_hypernyms):
'''
'''
data = freq_hypernyms[0]
return csr_matrix((data, (row, col))).toarray()
def main():
# creating object of SongSentiment Class
song = SongSentiment()
# path = 'Sadness'
# filename = 'cheap_thrills_sia.txt'
# file_path = os.path.join(path, filename)
# # text lyrics
# song_lyrics = open(file=file_path, mode='r', encoding='ISO-8859-1').read()
# print('------------------------------------------------------------------------------')
# # calling function to hypernyms
# frequent_hypernyms = song.lyrics_analyzer(song_lyrics)
# print('------------------------------------------------------------------------------')
# # clean hypernyms data
# frequent_hypernyms = song.reduce_noise(frequent_hypernyms, 2, 100, 200)
# print('------------------------------------------------------------------------------')
# text_clf = Pipeline([('vect', TfidfVectorizer()), ('clf', MultinomialNB(alpha=0.1))])
# # text_clf = Pipeline([('vect', TfidfVectorizer()), ('tfidf', TfidfTransformer()), ('clf', MultinomialNB(alpha=0.1))])
SONG_TYPE = ['Sadness', 'Anger']
# recursively list all song files
def find_songs(path):
for root, dirs, files in os.walk(path):
yield(root, dirs, files)
def read_content(file_path):
print(file_path)
return open(file=file_path, mode='r', encoding='ISO-8859-1').read()
df = pd.DataFrame(columns=['mood', 'filename'])
for MOOD in SONG_TYPE:
for song_type, dirs, filenames in find_songs(MOOD):
for filename in filenames:
df = df.append(pd.Series([song_type, filename], index=df.columns), ignore_index=True)
df['lyrics'] = df[['mood','filename']].apply(lambda x: read_content(os.path.join(*x)), axis=1)
# df['hypernyms'] = df[['lyrics']].apply(lambda x: song.lyrics_analyzer(*x), axis=1)
df['hypernyms'] = df[['lyrics']].apply(lambda x: song.reduce_noise(song.lyrics_analyzer(*x), 1, 20, 200), axis=1)
print(df)
hypernyms_dict = {}
[hypernyms_dict.update(hypernyms) for hypernyms in df['hypernyms']]
features = list(hypernyms_dict.keys())
print(len(hypernyms_dict))
print(type(features))
print('----------------------------------------')
process_df = pd.DataFrame(columns = features)
for i in df.index:
x = pd.DataFrame(df['hypernyms'][i], index=[i])
process_df = process_df.append(x)
# process_df['mood'] = df['mood']
process_df.fillna(0, inplace=True)
print(process_df)
# clf = tree.DecisionTreeClassifier()
clf = MultinomialNB()
clf.fit(process_df, df['mood'])
SONG_TYPE = ['Test']
df = pd.DataFrame(columns=['mood', 'filename'])
for MOOD in SONG_TYPE:
for song_type, dirs, filenames in find_songs(MOOD):
for filename in filenames:
df = df.append(pd.Series([song_type, filename], index=df.columns), ignore_index=True)
df['lyrics'] = df[['mood','filename']].apply(lambda x: read_content(os.path.join(*x)), axis=1)
df['hypernyms'] = df[['lyrics']].apply(lambda x: song.lyrics_analyzer(*x), axis=1)
# df['hypernyms'] = df[['lyrics']].apply(lambda x: song.reduce_noise(song.lyrics_analyzer(*x), 2, 40, 200), axis=1)
print(df)
process_df = pd.DataFrame(columns = features)
for i in df.index:
x = pd.DataFrame(df['hypernyms'][i], index=[i])
process_df = process_df.append(x)
# process_df['mood'] = df['mood']
process_df.fillna(0, inplace=True)
print(process_df)
ans = clf.predict(process_df[features])
print(ans)
# print('------------------------------------------------------------------------------')
# song_lyrics = open(file=file_path, mode='r', encoding='ISO-8859-1')
# clf = TfidfVectorizer()
# # clf = MultinomialNB()
# # clf = CountVectorizer()
# res = clf.fit_transform(song_lyrics)
# print(res)
# print(res.shape)
# print(type(song_lyrics))
# for i in res:
# print(i)
# print(type(i))
# print(i.shape)
# print('------------------------------------------------------------------------------')
# res = text_clf.fit_transform(song_lyrics)
# print(res)
# print(res.shape)
# print(type(song_lyrics))
# for i in res:
# print(i)
# print('------------------------------------------------------------------------------')
# X = pd.DataFrame([[ i[0] for i in frequent_hypernyms1], [ i[0] for i in frequent_hypernyms2]])
# print(X)
# Y = ['cheap','cold']
# text_clf.fit_transform(X, Y)
# ans = text_clf.predict(frequent_hypernyms3)
# print(ans)
# print('------------------------------------------------------------------------------')
# lyrics = song.get_lyrics(song_lyrics)
# # picking positive lyrics from lyrics
# positive_lyrics = [stanza for stanza in lyrics if stanza[SENTIMENT] == POSITIVE]
# # percentage of positive lyrics
# print("Positive lyrics percentage: {} %".format(100*len(positive_lyrics)/len(lyrics)))
# # picking negative lyrics from lyrics
# negative_lyrics = [stanza for stanza in lyrics if stanza[SENTIMENT] == NEGATIVE]
# # percentage of negative lyrics
# print("Negative lyrics percentage: {} %".format(100*len(negative_lyrics)/len(lyrics)))
# # picking negative lyrics from lyrics
# neutral_lyrics = [stanza for stanza in lyrics if stanza[SENTIMENT] == NEUTRAL]
# # percentage of neutral lyrics
# print("Neutral lyrics percentage: {} % ".format(100*len(neutral_lyrics)/len(lyrics)))
# # printing first 5 positive lyrics
# print("\n\nPositive lyrics:")
# for stanza in positive_lyrics[:5]:
# print(stanza['text'])
# # printing first 5 negative lyrics
# print("\n\nNegative lyrics:")
# for stanza in negative_lyrics[:5]:
# print(stanza['text'])
if __name__ == "__main__":
# calling main function
main()
<file_sep># LyricsMood
Music Mood Classification based on the Lyrics using Machine Learning
This is a beginner level program, which focus on Lyrics of a song to classify the music type.
<file_sep>from sklearn import svm
from sklearn.datasets import samples_generator
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import f_regression
from sklearn.pipeline import Pipeline
# generate some data to play with
X, y = samples_generator.make_classification(n_informative=5, n_redundant=0, random_state=42)
# ANOVA SVM-C
anova_filter = SelectKBest(f_regression, k=5)
clf = svm.SVC(kernel='linear')
anova_svm = Pipeline([('anova', anova_filter), ('svc', clf)])
# You can set the parameters using the names issued
# For instance, fit using a k of 10 in the SelectKBest
# and a parameter 'C' of the svm
anova_svm.set_params(anova__k=10, svc__C=.1).fit(X, y)
# Pipeline(memory=None, steps=[('anova', SelectKBest(...)), ('svc', SVC(...))])
prediction = anova_svm.predict(X)
anova_svm.score(X, y)
# 0.829...
# getting the selected features chosen by anova_filter
anova_svm.named_steps['anova'].get_support()
# array([False, False, True, True, False, False, True, True, False,
# True, False, True, True, False, True, False, True, True,
# False, False], dtype=bool)
# Another way to get selected features chosen by anova_filter
anova_svm.named_steps.anova.get_support()
# array([False, False, True, True, False, False, True, True, False,
# True, False, True, True, False, True, False, True, True,
# False, False], dtype=bool)<file_sep># import nltk
# nltk.download('stopwords')
# nltk.download('wordnet')
# import numpy as np
# from sklearn.pipeline import Pipeline
# from nltk import word_tokenize
# from nltk.stem import WordNetLemmatizer
# from sklearn.feature_extraction.text import TfidfVectorizer
# from sklearn.feature_extraction.text import TfidfTransformer
# from sklearn.feature_extraction.text import CountVectorizer
# from scipy.sparse import csr_matrix
# from sklearn.tree import DecisionTreeClassifier
import os
import re
import operator
import pandas as pd
from textblob import TextBlob
from textblob.sentiments import NaiveBayesAnalyzer
from sklearn.naive_bayes import MultinomialNB
from nltk.corpus import wordnet
from nltk.corpus import stopwords
from sklearn import preprocessing
# set display terminal width
pd.set_option('display.width', 192, 'display.max_rows', None)
# constants
SENTIMENT = 'sentiment'
TEXT = 'text'
POSITIVE = 'positive'
NEGATIVE = 'negative'
NEUTRAL = 'neutral'
SPLIT_LINE = '\n'
SPLIT_WORD = ' '
KEYWORD = 'hypernym'
FREQUENCY = 'freq'
FEATURE_KEY = [KEYWORD, FREQUENCY]
stop = list(set(stopwords.words('english'))) # stopwords
class SongSentiment(object):
'''
Generic Twitter Class for sentiment analysis.
'''
def __init__(self, train, test):
'''
Class constructor or initialization method.
'''
self.MOOD = 'mood'
self.FILENAME = 'filename'
self.LYRICS = 'lyrics'
self.HYPERNYMS = 'hypernyms'
self.ENCODING = 'ISO-8859-1'
self.FREQ = 3
self.train = train
self.test = test
def preprocessing_df(self, moods):
print('preprocessing_df')
self.df = pd.DataFrame(columns=[self.MOOD, self.FILENAME])
for mood in moods:
for song_type, dirs, filenames in self.find_songs(mood):
for filename in filenames:
self.df = self.df.append(pd.Series([song_type, filename], index=self.df.columns), ignore_index=True)
self.df[self.LYRICS] = self.df[[self.MOOD, self.FILENAME]].apply(lambda x: self.read_content(os.path.join(*x)), axis=1)
# self.df[HYPERNYMS] = self.df[[LYRICS]].apply(lambda x: self.lyrics_analyzer(*x), axis=1)
self.df[self.HYPERNYMS] = self.df[[self.LYRICS]].apply(lambda x: self.reduce_noise(self.lyrics_analyzer(*x), 1, 20, 200), axis=1)
# self.df[['positiveness','negativeness']] = self.df[['lyrics']].apply(lambda x: self.overall_lyrics_mood(*x), axis=1)
def train_model_df(self):
print('train_model_df')
# create train model dataframe
for i in self.df.index:
self.train_df = self.df[self.HYPERNYMS].apply(pd.Series)
# fill all NaN values to '0'
self.train_df.fillna(0, inplace=True)
for column in self.train_df.columns:
if self.train_df[column].sum() <= self.FREQ:
self.train_df = self.train_df.drop(column, axis = 1)
self.features = list(self.train_df.columns)
def test_model_df(self):
print('test_model_df')
# create test model dataframe
for i in self.df.index:
self.test_df = self.df[self.HYPERNYMS].apply(pd.Series)
new_columns = set(self.features) - set(self.test_df.columns)
for column in new_columns:
self.test_df[column] = 0
# fill all NaN values to '0'
self.test_df.fillna(0, inplace=True)
def predict(self):
'''
'''
print('predict')
self.result = self.clf.predict(self.test_df[self.features])
# recursively list all song files
def find_songs(self, path):
'''
'''
for root, dirs, files in os.walk(path):
yield(root, dirs, files)
def read_content(self, file_path):
'''
'''
return open(file=file_path, mode='r', encoding=self.ENCODING).read()
def clean_stanza(self, stanza):
'''
Utility function to clean stanza text by removing links, special characters
using simple regex statements.
'''
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", stanza).split())
def lyrics_analyzer(self, fetched_lyrics, flag = 0, pp = 0):
'''
'''
# empty dictionary to store required params of a lyrics
# expanding vocabulary: Hypernym approach.
lyrics_hypernyms = {}
# iterate over stanza one by one
for stanza in fetched_lyrics.split('\n\n'):
if stanza != '':
# iterate over line by line
for line in self.clean_stanza(stanza).split(SPLIT_LINE):
# iterate over word by word in a line
for word in line.split(SPLIT_WORD):
word = word.lower()
if word not in stop:
if pp: print('+', word) # word is being used
# find synonyms sets
for synset in wordnet.synsets(word):
# iterate over synset hypernyms
for hypernym in synset.hypernyms():
# extract hypernym name
hypernym_0 = hypernym.name().split('.')[0]
# if hypernym name already exists then increase the count value
if hypernym_0 in lyrics_hypernyms.keys():
lyrics_hypernyms[hypernym_0] += 1
else:
lyrics_hypernyms[hypernym_0] = 1
else:
if pp: print('-', word) # word is not being used
# returning
if not flag:
return lyrics_hypernyms
else:
return [[word[0],word[1]] for word in sorted(lyrics_hypernyms.items(), key=operator.itemgetter(1), reverse=True)]
def reduce_noise(self, lyrics_hypernyms, min_count, max_count, length, flag = 0):
'''
'''
frequent_hypernyms = {}
for key in lyrics_hypernyms.keys():
if (lyrics_hypernyms[key] >= min_count) & (lyrics_hypernyms[key] <= max_count):
frequent_hypernyms[key] = lyrics_hypernyms[key]
# returning
if not flag:
return frequent_hypernyms
else:
return [[word[0],word[1]] for word in sorted(frequent_hypernyms.items(), key=operator.itemgetter(1), reverse=True)][:length]
def overall_lyrics_mood(self, fetched_lyrics):
'''
Main function to fetch lyrics and parse them.
'''
# empty list to store parsed lyrics
lyrics = []
# parsing lyrics one by one
for stanza in fetched_lyrics.split('\n\n'):
# create TextBlob object of passed stanza text
analysis = TextBlob(self.clean_stanza(stanza), analyzer=NaiveBayesAnalyzer())
print(analysis.sentiment)
# return parsed lyrics
return
def run_ml_classifier(self):
self.preprocessing_df(self.train)
# self.features_selection()
self.train_model_df()
# train model
self.clf = MultinomialNB()
self.clf.fit(self.train_df, self.df['mood'])
self.preprocessing_df(self.test)
self.test_model_df()
self.predict()
# additional task to print the dataframe
self.df = self.df.drop([self.LYRICS, self.HYPERNYMS], axis=1)
self.df[self.MOOD] = self.result
print(self.df)
return pd.Series(self.result)
def main():
# creating object of SongSentiment Class
song = SongSentiment(['Sadness', 'Anger'], ['MoodPredict'])
song.run_ml_classifier()
if __name__ == "__main__":
# calling main function
main()
| 0aa35d31c4464f00714fa0c848d5b1f0d99b50bf | [
"Markdown",
"Python"
] | 4 | Python | abhayycs/LyricsMood | 40b990cecfe487f29db016d696eafe74175da3ac | 9e28adf4823d9596749ca3b14e1340c907133642 |
HEAD | <repo_name>shusharin/mvc<file_sep>/controllers/error.php
<?php
class Error extends Core {
function __construct() {
parent::__construct();
//echo 'This is the error';
$this->view->msg = 'File does not exist';
$this->view->render('error/layout');
}
}
<file_sep>/models/about_model.php
<?php
class About_Model extends Model {
function __construct() {
echo 'about model';
}
}
<file_sep>/controllers/about.php
<?php
class About {
function __construct() {
echo 'We are in About!<br>';
}
public function other() {
echo 'function other! <br>';
require 'models/about_model.php';
$model = new About_Model();
}
}<file_sep>/libs/Core.php
<?php
class Core {
function __construct() {
//echo 'Core<br>';
$this->view = new View();
}
}<file_sep>/controllers/welcome.php
<?php
class Welcome extends Core {
function __construct() {
parent::__construct();
echo 'This Welcome!';
$this->view->render('welcome/layout');
}
}<file_sep>/libs/Model.php
<?php
class Model {
function __construct() {
echo 'This is the Model';
}
} | 6c9b2b78f825e9148fa4b9105115d9b21f5dcdc0 | [
"PHP"
] | 6 | PHP | shusharin/mvc | 63e427857e026c922bfc4e8cb67578351d181b35 | 2ad0ee52600adf8c7524d3029e1ee39534718709 |
refs/heads/main | <repo_name>JR7Z/WebPortJR7<file_sep>/assignment6/assignment6/login.js
window.onload = loginLoad;
function loginLoad()
{
let checkuser = document.getElementById("myLogin");
checkuser.onsubmit = checkLogin;
}
function checkLogin()
{
const checkurl = new URLSearchParams(window.location.search);
const Username = checkurl.get('username');
const Password = checkurl.get('password');
var Usernamelogin = document.forms['myLogin']['username'].value;
var Passwordlogin = document.forms['myLogin']['password'].value;
if(Username == Usernamelogin && Password == Passwordlogin )
{
alert("Welcome")
}
else
{
alert("Username or Password ไม่ถูกต้องนะจ้ะ")
return false;
}
//ถ้าตรวจสอบแล้วพบว่ามีการ login ไม่ถูกต้อง ให้ return false ด้วย
}
<file_sep>/java/80+90.js
function name()
{
var name = document.getElementById("name1").innerHTML=80+90;
}
window.onload=name; | 528374db0e2c0796b5bc56e9358cfa27c17ce7a3 | [
"JavaScript"
] | 2 | JavaScript | JR7Z/WebPortJR7 | 0dbb8c0bd0a32dea3abd869b768eaa4c8b9b102f | cafa6ba664a22fbd3ac2556dc84253e3256d5973 |
refs/heads/master | <repo_name>essamyousry/MyFIT<file_sep>/app/src/main/java/com/myfit/brownies/myfit/CustomAdapter.java
package com.myfit.brownies.myfit;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.TextView;
public class CustomAdapter extends BaseAdapter {
private FoodStructure<Food> items;
private Context context;
//public constructor
public CustomAdapter(Context context, FoodStructure<Food> items) {
this.context = context;
this.items = items;
}
public void setListData(FoodStructure<Food> data){
items = data;
}
@Override
public int getCount() {
return items.size(); //returns total of items in the list
}
@Override
public Food getItem(int position) {
return items.dumpData(position);
}
@Override
public long getItemId(int position) {
return position;
}
public void remove(int position){
items.remove(getItem(position));
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
// inflate the layout for each list row
if (convertView == null) {
convertView = LayoutInflater.from(context).
inflate(R.layout.list_item, parent, false);
}
// get the TextView for item name and item description
TextView textViewItemName = (TextView)
convertView.findViewById(R.id.name);
TextView textViewItemDescription = (TextView)
convertView.findViewById(R.id.calories);
//sets the text for item name and item description from the current item object
textViewItemName.setText(getItem(position).getIName());
textViewItemDescription.setText(Integer.toString(((int) Double.parseDouble(getItem(position).getCalories()))));
Button deleteBtn = (Button) convertView.findViewById(R.id.delete_btn);
deleteBtn.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
FoodDatabase foodDatabaseHelper = new FoodDatabase(context);
foodDatabaseHelper.deleteFood(getItem(position).getID());
Log.d("CUSTOMADAPTER POSITION", Integer.toString(position));
Log.d("CUSTOMADAPTER DATABASE", Integer.toString(getItem(position).getID()));
remove(position);
notifyDataSetChanged();
}
});
// returns the view for the current row
return convertView;
}
}
<file_sep>/app/src/main/java/com/myfit/brownies/myfit/FoodLog.java
package com.myfit.brownies.myfit;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.design.widget.TextInputEditText;
import android.support.design.widget.TextInputLayout;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Locale;
public class FoodLog extends DashBoardActivity {
TextInputEditText textInputEditTextFood;
TextInputLayout textInputLayoutFood;
TextView ViewDay;
Button GTP;
Button btn_Check;
Button btn_Scan;
ImageButton btnSpeak;
public static CustomAdapter adapter;
ListView lv;
String trial;
Calendar calendar;
static int capacity = 7;
public static FoodStructure<Food> FoodArray = new FoodStructure<>(capacity);
FoodDatabase foodDatabaseHelper = new FoodDatabase(FoodLog.this);
GetFoodTest TestFood;
GetFoodUPC FoodUPC;
public static int getCalories() {
int sum = 0;
for (int i = 0; i < FoodArray.size(); i++) {
sum = sum + (int) Double.parseDouble(FoodArray.get(i).getCalories());
}
return sum;
}
public static int getProteinTotal() {
int sum = 0;
for (int i = 0; i < FoodArray.size(); i++) {
sum = sum + (int) Double.parseDouble(FoodArray.get(i).getProtein());
}
return sum;
}
public static int getCarbsTotal() {
int sum = 0;
for (int i = 0; i < FoodArray.size(); i++) {
sum = sum + (int) Double.parseDouble(FoodArray.get(i).getCarbs());
}
return sum;
}
public static int getFatsTotal() {
int sum = 0;
for (int i = 0; i < FoodArray.size(); i++) {
sum = sum + (int) Double.parseDouble(FoodArray.get(i).getFats());
}
return sum;
}
private final int REQ_CODE_SPEECH_INPUT = 100;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.food_diary);
lv = (ListView) findViewById(R.id.list);
textInputLayoutFood = (TextInputLayout) findViewById(R.id.textInputLayoutFood);
textInputEditTextFood = (TextInputEditText) findViewById(R.id.textInputEditTextFood);
ViewDay = (TextView) findViewById(R.id.ViewDay);
btnSpeak = (ImageButton) findViewById(R.id.Speak);
btn_Check = (Button) findViewById(R.id.btn_CheckFood);
btn_Scan = (Button) findViewById(R.id.btn_ScanFood);
calendar = Calendar.getInstance();
String Day = getCurrentDay();
ViewDay.setText(Day);
GTP = (Button) findViewById(R.id.btn_GTP);
GTP.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(FoodLog.this, Nutrition.class);
startActivity(intent);
}
});
btnSpeak.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
promptSpeechInput();
}
});
btn_Check.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
trial = textInputEditTextFood.getText().toString().trim();
if (!trial.isEmpty()) {
TestFood = new GetFoodTest();
TestFood.execute(trial);
textInputEditTextFood.setText(null);
}
}
});
btn_Scan.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
FoodUPC = new GetFoodUPC();
String r = QRActivity.getUPC();
FoodUPC.execute(r);
goToQR();
}
});
}
@Override
public void onStart() {
super.onStart();
}
@Override
public void onResume(){
super.onResume();
adapter = new CustomAdapter(FoodLog.this, FoodArray);
List<Food> foodList = foodDatabaseHelper.getAllFood(getDay());
FoodStructure<Food> newList = new FoodStructure<>();
for (int i = 0; i < foodList.size(); i++){
newList.add(foodList.get(i), getDay());
}
adapter.setListData(newList);
lv.setAdapter(adapter);
foodDatabaseHelper.close();
}
public void goToQR(){
Intent intent = new Intent(this, QRActivity.class);
startActivity(intent);
}
@Override
public void onBackPressed(){
Intent intent = new Intent(this, HomeActivity.class);
startActivity(intent);
}
private void promptSpeechInput() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
} catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT: {
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
textInputEditTextFood.setText(result.get(0));
}
break;
}
}
}
public String getCurrentDay(){
String daysArray[] = {"Sunday","Monday","Tuesday", "Wednesday","Thursday","Friday", "Saturday"};
calendar = Calendar.getInstance();
int day = calendar.get(Calendar.DAY_OF_WEEK) - 1;
return daysArray[day];
}
public int getDay(){
return calendar.get(Calendar.DAY_OF_WEEK) - 1;
}
public class GetFoodTest extends AsyncTask<String, String, Void> {
private String TAG = "";
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected Void doInBackground(String... string) {
GetHTTP sh = new GetHTTP();
String url = "https://api.nutritionix.com/v1_1/search/" + string[0] + "?results=0:1&fields=item_name,brand_name,nf_calories,nf_total_carbohydrate,nf_protein,nf_total_fat&appId=5bdd20e0&appKey=<KEY>";
// Making a request to url and getting response
String jsonStr = sh.makeCall(url);
Log.e(TAG, "Response from url: " + jsonStr);
if (jsonStr != null) {
try {
Food FoodGroup1 = new Food();
JSONObject jsonObj = new JSONObject(jsonStr);
JSONArray Food = jsonObj.getJSONArray("hits");
for (int i = 0; i < Food.length(); i++) {
JSONObject all = Food.getJSONObject(i);
JSONObject fields = all.getJSONObject("fields");
String itemName = fields.getString("item_name");
FoodGroup1.setIName(itemName);
String BrandName = fields.getString("brand_name");
FoodGroup1.setBName(BrandName);
String Calories = fields.getString("nf_calories");
FoodGroup1.setCalories(Calories);
String Protein = fields.getString("nf_protein");
FoodGroup1.setProtein(Protein);
String Carbs = fields.getString("nf_total_carbohydrate");
FoodGroup1.setCarbs(Carbs);
String Fats = fields.getString("nf_total_fat");
FoodGroup1.setFats(Fats);
FoodArray.add(FoodGroup1, getDay());
foodDatabaseHelper.addFood(FoodGroup1);
}
} catch (final JSONException e) {
Log.v(TAG, "Json parsing error: " + e.getMessage());
}
}
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
List<Food> foodList = foodDatabaseHelper.getAllFood(getDay());
FoodStructure<Food> newList = new FoodStructure<>();
for (int i = 0; i < foodList.size(); i++){
newList.add(foodList.get(i), getDay());
}
adapter.setListData(newList);
adapter.notifyDataSetChanged();
}
}
public class GetFoodUPC extends AsyncTask<String, String, Void> {
private String TAG = "";
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected Void doInBackground(String...string) {
GetHTTP sh = new GetHTTP();
String url = "https://api.nutritionix.com/v1_1/item?upc=" + string[0] + "&appId=5bdd20e0&appKey=<KEY>";
// Making a request to url and getting response
String jsonStr = sh.makeCall(url);
Log.e(TAG, "Response from url: " + jsonStr);
if (jsonStr != null) {
try {
Food FoodGroup2 = new Food();
JSONObject jsonObj = new JSONObject(jsonStr);
String itemName = jsonObj.getString("item_name");
FoodGroup2.setIName(itemName);
String BrandName = jsonObj.getString("brand_name");
FoodGroup2.setBName(BrandName);
String Calories = jsonObj.getString("nf_calories");
FoodGroup2.setCalories(Calories);
String Protein = jsonObj.getString("nf_protein");
FoodGroup2.setProtein(Protein);
String Carbs = jsonObj.getString("nf_total_carbohydrate");
FoodGroup2.setCarbs(Carbs);
String Fats = jsonObj.getString("nf_total_fat");
FoodGroup2.setFats(Fats);
FoodArray.add(FoodGroup2, getDay());
foodDatabaseHelper.addFood(FoodGroup2);
} catch (final JSONException e) {
Log.v(TAG, "Json parsing error: " + e.getMessage());
}
}
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
List<Food> foodList = foodDatabaseHelper.getAllFood(getDay());
FoodStructure<Food> newList = new FoodStructure<>();
for (int i = 0; i < foodList.size(); i++){
newList.add(foodList.get(i), getDay());
}
adapter.setListData(newList);
adapter.notifyDataSetChanged();
}
}
}<file_sep>/app/src/main/java/com/myfit/brownies/myfit/RegisterActivity.java
package com.myfit.brownies.myfit;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.design.widget.TextInputEditText;
import android.support.design.widget.TextInputLayout;
import android.support.v4.widget.NestedScrollView;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.AppCompatButton;
import android.support.v7.widget.AppCompatTextView;
import android.view.View;
public class RegisterActivity extends AppCompatActivity implements View.OnClickListener {
private final AppCompatActivity activity = RegisterActivity.this;
private NestedScrollView nestedScrollView;
private TextInputLayout textInputLayoutName;
private TextInputLayout textInputLayoutEmail;
private TextInputLayout textInputLayoutPassword;
private TextInputLayout textInputLayoutConfirmPassword;
private TextInputLayout textInputLayoutWeight;
private TextInputLayout textInputLayoutActivity;
private TextInputLayout textInputLayoutHeight;
private TextInputLayout textInputLayoutAge;
private TextInputLayout textInputLayoutSex;
private TextInputLayout textInputLayoutGoal;
private TextInputEditText textInputEditTextName;
private TextInputEditText textInputEditTextEmail;
private TextInputEditText textInputEditTextPassword;
private TextInputEditText textInputEditTextConfirmPassword;
private TextInputEditText textInputEditTextWeight;
private TextInputEditText textInputEditTextActivity;
private TextInputEditText textInputEditTextHeight;
private TextInputEditText textInputEditTextAge;
private TextInputEditText textInputEditTextSex;
private TextInputEditText textInputEditTextGoal;
private AppCompatButton appCompatButtonRegister;
private AppCompatTextView appCompatTextViewLoginLink;
private InputValidation inputValidation;
private UserDatabase userDatabaseHelper;
private User user;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_register);
getSupportActionBar().hide();
initViews();
initListeners();
initObjects();
userDatabaseHelper.close();
}
private void initViews() {
nestedScrollView = (NestedScrollView) findViewById(R.id.nestedScrollView);
textInputLayoutName = (TextInputLayout) findViewById(R.id.textInputLayoutName);
textInputLayoutEmail = (TextInputLayout) findViewById(R.id.textInputLayoutEmail);
textInputLayoutPassword = (TextInputLayout) findViewById(R.id.textInputLayoutPassword);
textInputLayoutConfirmPassword = (TextInputLayout) findViewById(R.id.textInputLayoutConfirmPassword);
textInputLayoutWeight = (TextInputLayout) findViewById(R.id.textInputLayoutWeight);
textInputLayoutActivity = (TextInputLayout) findViewById(R.id.textInputLayoutActivity);
textInputLayoutHeight = (TextInputLayout) findViewById(R.id.textInputLayoutHeight);
textInputLayoutAge = (TextInputLayout) findViewById(R.id.textInputLayoutAge);
textInputLayoutSex = (TextInputLayout) findViewById(R.id.textInputLayoutSex);
textInputLayoutGoal = (TextInputLayout) findViewById(R.id.textInputLayoutGoal);
textInputEditTextName = (TextInputEditText) findViewById(R.id.textInputEditTextName);
textInputEditTextEmail = (TextInputEditText) findViewById(R.id.textInputEditTextEmail);
textInputEditTextPassword = (TextInputEditText) findViewById(R.id.textInputEditTextPassword);
textInputEditTextConfirmPassword = (TextInputEditText) findViewById(R.id.textInputEditTextConfirmPassword);
textInputEditTextWeight = (TextInputEditText) findViewById(R.id.textInputEditTextWeight);
textInputEditTextActivity = (TextInputEditText) findViewById(R.id.textInputEditTextActivity);
textInputEditTextHeight = (TextInputEditText) findViewById(R.id.textInputEditTextHeight);
textInputEditTextAge = (TextInputEditText) findViewById(R.id.textInputEditTextAge);
textInputEditTextSex = (TextInputEditText) findViewById(R.id.textInputEditTextSex);
textInputEditTextGoal = (TextInputEditText) findViewById(R.id.textInputEditTextGoal);
appCompatButtonRegister = (AppCompatButton) findViewById(R.id.appCompatButtonRegister);
appCompatTextViewLoginLink = (AppCompatTextView) findViewById(R.id.appCompatTextViewLoginLink);
}
private void initListeners() {
appCompatButtonRegister.setOnClickListener(this);
appCompatTextViewLoginLink.setOnClickListener(this);
}
private void initObjects() {
inputValidation = new InputValidation(activity);
userDatabaseHelper = new UserDatabase(activity);
user = new User();
}
private boolean postDataToSQLite() {
boolean PostSuccessful = true;
if (!inputValidation.isInputEditTextFilled(textInputEditTextName, textInputLayoutName, getString(R.string.error_message_name))) {
return false;
}
if (!inputValidation.isInputEditTextFilled(textInputEditTextEmail, textInputLayoutEmail, getString(R.string.error_message_email))) {
return false;
}
if (!inputValidation.isInputEditTextEmail(textInputEditTextEmail, textInputLayoutEmail, getString(R.string.error_message_email))) {
return false;
}
if (!inputValidation.isInputEditTextFilled(textInputEditTextPassword, textInputLayoutPassword, getString(R.string.error_message_password))) {
return false;
}
if (!inputValidation.isInputEditTextMatches(textInputEditTextPassword, textInputEditTextConfirmPassword,
textInputLayoutConfirmPassword, getString(R.string.error_password_match))) {
return false;
}
if (!userDatabaseHelper.checkUser(textInputEditTextEmail.getText().toString().trim())) {
user.setName(textInputEditTextName.getText().toString().trim());
user.setEmail(textInputEditTextEmail.getText().toString().trim());
user.setPassword(<PASSWORD>EditTextPassword.getText().toString().trim());
user.setWeight(Integer.parseInt(textInputEditTextWeight.getText().toString().trim()));
user.setActivity(textInputEditTextActivity.getText().toString().trim());
user.setHeight(Integer.parseInt(textInputEditTextHeight.getText().toString().trim()));
user.setAge(Integer.parseInt(textInputEditTextAge.getText().toString().trim()));
user.setSex(textInputEditTextSex.getText().toString().trim());
user.setGoal(textInputEditTextGoal.getText().toString().trim());
user.setBMR(user.getBMR(Integer.parseInt(textInputEditTextWeight.getText().toString().trim()), Integer.parseInt(textInputEditTextHeight.getText().toString().trim()), Integer.parseInt(textInputEditTextAge.getText().toString().trim()), textInputEditTextSex.getText().toString().trim(), textInputEditTextActivity.getText().toString().trim(), textInputEditTextGoal.getText().toString().trim()));
userDatabaseHelper.addUser(user);
// Snack Bar to show success message that record saved successfully
Snackbar.make(nestedScrollView, getString(R.string.success_message), Snackbar.LENGTH_LONG).show();
emptyInputEditText();
} else {
// Snack Bar to show error message that record already exists
Snackbar.make(nestedScrollView, getString(R.string.error_email_exists), Snackbar.LENGTH_LONG).show();
return false;
}
userDatabaseHelper.close();
return PostSuccessful;
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.appCompatButtonRegister:
if (postDataToSQLite()) {
Intent intent = new Intent(getApplicationContext(), LoginActivity.class);
startActivity(intent);
break;
}
else break;
case R.id.appCompatTextViewLoginLink:
Intent b = new Intent(getApplicationContext(), LoginActivity.class);
startActivity(b);
break;
}
}
private void emptyInputEditText() {
textInputEditTextName.setText(null);
textInputEditTextEmail.setText(null);
textInputEditTextPassword.setText(null);
textInputEditTextConfirmPassword.setText(null);
}
}<file_sep>/README.md
# MyFIT
Android Studio (XML for front-end and Java) <br>
Nutritionix API (REST architecture, data parsed and handled in JSON) <br>
SQLite (embedded database within Android Studio to store User information and Food Logs) <br>
This is a simple demonstration of how the application works.
1. User is prompted to login <br>
2. If no account exists, user is taken to a register page where all necessary information is entered to calculate the BMR (Basal Metabollic Rate). <br>
3. User is taken to the main page where two options are shown: Progress and Food Log. <br>
4. The Food Log allows users to add items through text, speech and scan (String query and UPC code are appended to the URL before making the HTTP call to the API, METHOD: "GET"). <br>
5. The Progress section allows users to see all nutrients added together through a progressive bar as well as their daily calorie intake. <br>


<file_sep>/app/src/main/java/com/myfit/brownies/myfit/HomeActivity.java
package com.myfit.brownies.myfit;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
public class HomeActivity extends DashBoardActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dashboard_layout);
setHeader(getString(R.string.text_title), false, true);
}
@Override
public void onBackPressed(){};
public void onButtonClicker(View v)
{
Intent intent;
switch (v.getId()) {
case R.id.btn_progress:
intent = new Intent(this, Nutrition.class);
startActivity(intent);
break;
case R.id.btn_diary:
intent = new Intent(this, FoodLog.class);
startActivity(intent);
break;
case R.id.btn_logout:
SharedPreferences sharedpreferences = getSharedPreferences(LoginActivity.MyPREFERENCES, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedpreferences.edit();
editor.clear();
editor.apply();
intent = new Intent(this, LoginActivity.class);
startActivity(intent);
break;
default:
break;
}
}
} | 4082412e0bf171bb27b6c37827a960f6665b0fe7 | [
"Markdown",
"Java"
] | 5 | Java | essamyousry/MyFIT | 4b6d41826d9440f5e427f93601dec4903a36ae6e | 66bf830fc7e03d53361b8580f319148f686e361f |
refs/heads/master | <file_sep>// modules
const
MongoClient = require( "mongodb" ).MongoClient;
class Database {
constructor ( dbUrl = "mongodb://localhost:27017/vi-literal-narratives" ) {
this.dbUrl = dbUrl;
this.connected = false;
this.db = null;
}
set ( word, data ) {
return new Promise( ( resolve, reject ) => {
if ( !this.connected ) {
this.connect().then( () => {
this.insert( word, data ).then( () => {
resolve();
} ).catch( reject );
} ).catch( reject );
} else {
this.insert( word, data ).then( () => {
resolve();
} ).catch( reject );
}
} );
}
exists ( word ) {
return new Promise( ( resolve ) => {
this.get( word ).then( () => {
resolve( true );
} ).catch( () => {
resolve( false );
} );
} );
}
get ( word ) {
return new Promise( ( resolve, reject ) => {
if ( !this.connected ) {
this.connect().then( () => {
this.collection.find( { word: word } ).toArray( ( error, data ) => {
if ( error !== null || !data.length ) {
reject( error );
}
resolve( data );
} );
} ).catch( reject );
} else {
this.collection.find( { word: word } ).toArray( ( error, data ) => {
if ( error !== null ) {
reject( error );
}
resolve( data );
} );
}
} );
}
insert ( word, data ) {
return new Promise( ( resolve, reject ) => {
this.collection.insertOne( {
word: word,
data: data
}, ( error ) => {
if ( error !== null ) {
reject( error );
} else {
resolve();
}
} );
} );
}
connect () {
return new Promise( ( resolve, reject ) => {
MongoClient.connect( this.dbUrl, ( error, db ) => {
if ( error !== null ) {
reject( error );
} else {
this.db = db;
this.connected = true;
this.collection = this.db.collection( "documents" );
resolve();
}
} );
} );
}
}
module.exports = Database;
new Database().exists( "leck mich" ).then( ( exists ) => {
console.log( exists );
} ).catch( ( error ) => {
console.log( "Error:", error );
} );
<file_sep>// modules
import App from "./modules/app.js";
// main variables
new App( {} );<file_sep># VI-Literal-Narratives
FH Potsdam course output…
# Requirements
- Node.js & NPM
- Homebrew (for MongoDB)<file_sep>// modules
const
DudenSearchApi = require( "../../duden-search-api" ),
Database = require( "./database.js" ),
FileSystem = require( "fs" ),
Path = require( "path" ),
NarrativeText = FileSystem.readFileSync( Path.resolve( __dirname, "text/text.txt" ), "utf8" ),
StopWords = JSON.parse( FileSystem.readFileSync( Path.resolve( __dirname, "json/stop-words.json" ), "utf8" ) );
// constants
const RegExWordSplitting = /[^a-zA-Z_öäüß]+/i;
module.exports = class TextAnalyzer {
constructor ( baseText = NarrativeText ) {
this.baseText = baseText;
this.wordList = null;
this.wordDictArray = null;
this.db = null;
this.init();
}
init () {
let splittedText;
splittedText = TextAnalyzer.splitText( this.baseText );
splittedText = TextAnalyzer.removeStopWords( splittedText );
this.wordList = splittedText;
this.wordDictArray = TextAnalyzer.generateDict( this.wordList, true );
this.wordDictArray = TextAnalyzer.removeNumbers( this.wordDictArray );
this.db = new Database();
this.dudenApi = new DudenSearchApi();
}
static generateDict ( wordList, convertToArray = false ) {
let dict = {};
for ( let word of wordList ) {
if ( !dict[ word ] ) {
dict[ word ] = 1;
} else {
dict[ word ]++;
}
}
if ( convertToArray ) {
return Object.keys( dict );
}
return dict;
}
static removeNumbers ( array ) {
let preparedArray = [];
for ( let value of array ) {
if ( !value.match( /[0-9]+/g ) ) {
preparedArray.push( value );
}
}
return preparedArray;
}
findWordsInDB ( words ) {
let wordList = words.slice( 0 ),
wordsData = [];
return new Promise( ( resolve ) => {
let doesExists = () => {
if ( wordList.length ) {
let word = wordList.pop();
this.db.exists( word ).then( ( exists ) => {
wordsData.push( exists );
doesExists();
} ).catch( ( error ) => {
wordsData.push( { exists: false, word: word } );
doesExists();
} );
} else {
resolve( wordsData );
}
};
doesExists();
} );
}
getExistingWords ( words ) {
let wordList = words.slice( 0 ),
wordsData = [];
return new Promise( ( resolve, reject ) => {
let get = () => {
if ( wordList.length ) {
let word = wordList.pop();
this.db.get( word ).then( ( data ) => {
wordsData.push( data );
get();
} ).catch( reject );
} else {
resolve( wordsData );
}
};
get();
} );
}
static cloneObj ( obj ) {
return JSON.parse( JSON.stringify( obj ) );
}
static filterDuplicates ( list ) {
let newList = [];
for ( let item of list ) {
if ( newList.indexOf( item ) === -1 ) {
newList.push( item );
}
}
return newList;
}
searchForWords ( words ) {
return new Promise( ( resolve ) => {
let wordIndex = 0;
this.dudenApi.searchWordList( words, ( data ) => {
resolve( data );
}, ( word, data, current, total, error ) => {
if ( !error && data !== null ) {
return new Promise( ( resolve, reject ) => {
this.db.set( word, data ).then( () => {
wordIndex++;
console.log( "[%s of %s] Added word: [%s]", wordIndex, words.length, word );
resolve( data );
} ).catch( reject );
} );
}
} );
} );
}
enrichWords () {
return new Promise( ( resolve, reject ) => {
let foundEntries = [],
notFoundEntries = [];
this.findWordsInDB( this.wordDictArray ).then( ( data ) => {
data.map( ( item ) => {
if ( item.exists ) {
foundEntries.push( item.word );
} else {
notFoundEntries.push( item.word );
}
} );
if ( !notFoundEntries.length ) {
this.getExistingWords( foundEntries ).then( resolve ).catch( reject );
} else {
let promises = [ this.searchForWords( notFoundEntries ) ];
if ( foundEntries.length ) {
promises.push( this.getExistingWords( foundEntries ) );
}
Promise.all( promises ).then( ( data ) => {
console.log( data );
resolve( TextAnalyzer.flattenArray( data ) );
} ).catch( ( error ) => {
reject( error );
} );
}
} ).catch( reject );
} );
}
static flattenArray ( array ) {
return array.reduce( ( flat, toFlatten ) => {
return flat.concat( Array.isArray( toFlatten ) ? TextAnalyzer.flattenArray( toFlatten ) : toFlatten );
}, [] );
}
static splitText ( text ) {
let splittedWords = text.split( RegExWordSplitting ),
preparedWords = [];
for ( let word of splittedWords ) {
if ( word.trim() !== "" ) {
preparedWords.push( word );
}
}
return preparedWords;
}
static removeStopWords ( wordList ) {
for ( let stopWord of StopWords ) {
for ( let i = 0; i < wordList.length; i++ ) {
if ( stopWord === wordList[ i ].toLowerCase() ) {
wordList.splice( i, 1 );
}
}
}
return wordList;
}
};
<file_sep>{
"name": "VI-Literal-Narratives",
"version": "1.0.0",
"description": "Visualization of narratives literals. Built with Webpack 2 + Treeshaking, ES6, Babel, SCSS & Pug Transpilers",
"main": "src/js/main.js",
"scripts": {
"postinstall": "brew update; brew install mongodb",
"start": "npm run build; npm run watch-sass & npm run watch-pug & npm run watch-webpack & npm run start-browser-sync",
"analyze": "node ./server/main.js",
"start-mongodb": "mongod --dbpath \"${PWD}/server/database/\"",
"start-browser-sync": "browser-sync start --proxy 'vi-literal-narratives.local' --files 'built/**'",
"watch-sass": "node-sass --watch ./src/scss --recursive --output-style nested --output ./built/files/css/",
"watch-webpack": "echo $PWD; webpack --watch --color --config ./webpack.config.js;",
"watch-pug": "pug ./src/pug/ --watch -O ./pug.config.json --out ./built/",
"build-pug": "pug ./src/pug/ -O ./pug.config.json --out ./built/",
"build-postcss": "postcss --use autoprefixer -o built/files/css/*.css built/files/css/*.css",
"build-sass": "node-sass ./src/scss --output-style compressed --output ./built/files/css/",
"test-plugins": "CURRENT_PROJECT=$(pwd); cd ../../Sites/plugins.coderwelsch.com/; npm run test; cd $CURRENT_PROJECT;",
"build": "npm run test-plugins; npm run build-pug; npm run build-sass; npm run build-postcss; webpack --config ./webpack.config.js -p; echo; node ../../Sites/deploy-check.js"
},
"author": "<NAME>, Coderwelsch - Coding & Design",
"license": "MIT",
"devDependencies": {
"autoprefixer": "^6.3.6",
"babel-core": "^6.24.1",
"babel-loader": "^6.4.1",
"babel-polyfill": "^6.16.0",
"babel-preset-env": "^1.4.0",
"babel-preset-es2015": "^6.6.0",
"browser-sync": "^2.18.6",
"expose-loader": "^0.7.1",
"imports-loader": "^0.6.5",
"node-sass": "^3.13.1",
"nodemon": "^1.9.2",
"postcss": "^5.2.11",
"postcss-cli": "^2.5.2",
"pug": "^2.0.0-beta6",
"pug-cli": "^1.0.0-alpha6",
"raw-loader": "^0.5.1",
"webpack": "^1.15.0"
},
"dependencies": {
"duden-search-api": "^1.0.0",
"lowdb": "^0.16.2",
"mongodb": "^2.2.26",
"require-json": "0.0.1",
"require-text": "0.0.1"
}
}
<file_sep>// imports
import $ from "../../../../../Sites/plugins.coderwelsch.com/js/es6/com.coderwelsch.Query.js";
import Data from "../../json/text-data.json";
export default class App {
constructor () {
this.settings = {
classes: {
},
selectors: {
content: "#content"
}
};
// module variables
this.selectors = this.settings.selectors;
this.classes = this.settings.classes;
this.$text = new $( this.selectors.content );
// init
this.initText();
}
initText () {
this.createNodes();
}
createNodes () {
let html = Data.text,
wordDef;
html = html.replace( /([^\W]|[äöü\-_])+/gi, ( string ) => {
for ( let word of Data.splittedWords ) {
if ( string === word ) {
wordDef = this.getWordDef( word );
return `<def data-type="${ wordDef.wordProperties ? wordDef.wordProperties.type : "none" }">${ word }</def>`;
}
}
return string;
} );
this.$text.html( html );
}
getWordDef ( word ) {
let data = Data.wordDefs[ word ].data.data;
return data;
}
}
| bc06397ebae3d310fc7782324539f08e35ee73c6 | [
"JavaScript",
"JSON",
"Markdown"
] | 6 | JavaScript | Coderwelsch/VI-Literal-Narratives | 3e1daca6d4209ecb8e359af7cbf928f5a4d597a6 | dad04acd13b925e32fbb401aea07f8c18a44b9f6 |
refs/heads/master | <repo_name>rhassmus/dados<file_sep>/README.md
# dados
#<NAME>
<file_sep>/src/ejercicio/Dado.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ejercicio;
/**
*
* @author Estudiantes
*/
public class Dado {
int lado;
public void Lado(){
lado = (int)(Math.random()*5+1);
}
}
| 186f6c6a005f1c5ef446946cd8b4ac8451eb7bf5 | [
"Markdown",
"Java"
] | 2 | Markdown | rhassmus/dados | 6481fa74a1be8c79c403f9b14cce3d77280d8ab5 | bce9089633dea4af7c270a0b997be0a0f3d08169 |
refs/heads/master | <repo_name>uros-5/fasterTranslateForSchool<file_sep>/kopiranje.py
import pyperclip,webbrowser,cyrtranslit,time,os
#print(os.getcwd())
while(True):
try:
n = int(input(">>>"))
break
except:
temp_a = pyperclip.paste()
a = pyperclip.paste()
a = a.replace("\r\n"," ")
webbrowser.open("https://translate.google.rs/#view=home&op=translate&sl=en&tl=sr&text="+str(a))
#pyperclip.copy(a)
while(True):
time.sleep(1)
if(temp_a != pyperclip.paste()):
a = pyperclip.paste()
a = a.replace("\r\n"," ")
a = cyrtranslit.to_latin(a)
pyperclip.copy(a)
if("dokk.txt" not in os.listdir(".")):
fajl = open("dokk.txt","w",encoding="utf-8")
fajl.write(pyperclip.paste()+"\n")
fajl.close()
else:
fajl = open("dokk.txt","a+",encoding="utf-8")
fajl.write(pyperclip.paste()+"\n")
fajl.close()
break
continue
| 5fe73e835fe57d0a3e23487f74bdad9776a9a1a2 | [
"Python"
] | 1 | Python | uros-5/fasterTranslateForSchool | 23372956f38e04bdda896ca9bde4d3cf0b216a67 | 72302ea5693570132592984bb750485fe6ee9899 |
refs/heads/main | <file_sep><?php
print_r($_GET);
// year
$yearNow = date("Y");
// $monthDate = date("m");
if( isset($_GET['month'] ) ) {
$monthDate = $_GET['month'];
}else {
$monthDate = date("n");
}
if( isset($_GET['years'])) {
$yearNow = $_GET['years'];
}else {
$yearNow = date("Y");
}
// Détermine si une variable est déclarée et est différente de null
$month = array(
1 =>"Janvier",
"Février",
"Mars",
"Avril",
"Mai",
"Juin",
"Juillet",
"Août",
"Septembre",
"Octobre",
"Novembre",
"Décembre"
);
$dayWeek = array(
1 => "Lundi",
"Mardi",
"Mercredi",
"Jeudi",
"vendredi",
"Samedi",
"Dimanche");
// Mois et année récupération
$yearsDate = date("Y");
// récupération du nombre de jour dans le mois actuel
$dayNumber = cal_days_in_month(CAL_GREGORIAN, $monthDate, $yearsDate);
//récupération jour actuel
$day = date("j");
?>
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Calendrier</title>
<link rel="stylesheet" href="assets/style.css">
</head>
<body>
<div class="grid-container">
<div class="date">
<?php echo $month[$monthDate] ;?>
<!-- Boucle du nombre de jour dans le mois -->
</div>
<!-- Flèche -->
<div class="flecheD">
<img src="https://img.icons8.com/flat-round/64/000000/arrow-right.png"/>
</div>
<div class="flecheG">
<img src="https://img.icons8.com/flat-round/64/000000/arrow-left.png"/>
</div>
<!-- Mois -->
<form action="" method="get">
<div class="month">
<label for="monthNumber"></label>
<select name="month" id="monthNumber">
<!-- " : " et endforeach permet d'écrire plus simplement le code html -->
<!-- Boucle mois -->
<?php foreach ($month as $index => $name) : ?>
<option value="<?php echo $index ?>"><?php echo $name ?></option>
<?php endforeach;?>
</select>
</div>
<!-- Année -->
<div class="years">
<label for="yearsNumber"></label>
<select name="years" id="yearsNumber">
<?php for ($year = $yearNow - 100; $year <= $yearNow + 100; $year++) : ?>
<option value="<?php echo $year ?>"> <?php echo $year ?></option>;
<?php endfor; ?>
</select>
</div>
<div class="valide">
<input type="submit" class="myButton" value="Validé">
</div>
</form>
<!-- Boucle pour les jours de la semaine -->
<?php foreach ($dayWeek as $index => $name) : ?>
<div class="<?php echo strtolower($name) ?>">
<?php echo $name ?>
</div>
<?php endforeach;?>
<?php
// for ($day; $day <= $dayNumber; $day++) {
// echo $day;
for ($day = 1; $day <= 30 ; $day++) : ?>
<div class="<?php echo strtolower($day) ?>">
<?php echo $day ?>
</div>
<?php endfor;?>
</div>
</body>
</html> | b3888db47ba40777ebdc2281272fc367d0db7b8f | [
"PHP"
] | 1 | PHP | WillemPreterre/CalendrierPhp | 34d786b0a2fef16297e188ce2b8733756999b88c | 5b5ff19e6615b22610ebe04fee986f08b05bb825 |
refs/heads/master | <file_sep>/**
* Created by jianzhiqiang on 2017/5/11.
*/
"use strict";
var models = require('../../server/models');
var request = require('request');
var assert = require('assert');
var nconf = require('nconf');
var fs = require('fs');
describe('controllers', function () {
before(function (done) {
models.drop().then(function () {
models.sync().then(function () {
done();
});
});
});
/* after(function (done) {
models.drop().then(function () {
done();
});
});*/
var baseUrl = 'http://127.0.0.1:9688';
var cid, csid, mid, rid, sid;
describe('#customerSuccess', function () {
describe('POST /register', function () {
it('should response with json', function (done) {
request.post({
url: baseUrl + '/register',
form: {email: '<EMAIL>', passwd: '<PASSWORD>'}
}, function (err, res) {
assert.ifError(err);
var cs = JSON.parse(res.body);
assert.equal(cs.code, 200);
csid = cs.msg.csid;
done();
});
});
});
describe('POST /login', function () {
it('should response with json', function (done) {
request.post({
url: baseUrl + '/login',
form: {email: '<EMAIL>', passwd: '<PASSWORD>'}
}, function (err, res) {
assert.ifError(err);
var cs = JSON.parse(res.body);
assert.equal(cs.code, 200);
done();
});
});
});
describe('POST /logout', function () {
it('should response with json', function (done) {
request.post({
url: baseUrl + '/logout',
form: {email: '<EMAIL>', passwd: '<PASSWORD>'}
}, function (err, res) {
assert.ifError(err);
var cs = JSON.parse(res.body);
assert.equal(cs.code, 200);
done();
});
});
});
describe('PATCH /customersuccesses/:csid', function () {
it('should response with json', function (done) {
request.patch({
url: baseUrl + '/customersuccesses/' + csid,
form: {name: 'suse'}
}, function (err, res) {
assert.ifError(err);
var cs = JSON.parse(res.body);
assert.equal(cs.code, 200);
done();
});
});
});
describe('POST /customersuccesses/:csid/avatar', function () {
it('should response with success', function (done) {
var formData = {
// Pass data via Streams
avatars: fs.createReadStream(__dirname + '/../../client/static/images/contact.png')
};
request.post({
url: baseUrl + '/customersuccesses/' + csid + '/avatar',
formData: formData
}, function (err, res) {
assert.ifError(err);
var customer = JSON.parse(res.body);
assert.equal(customer.code, 200);
done();
});
});
});
describe('GET /customersuccesses/:csid/avatar', function () {
it('should response with success', function (done) {
request.get(baseUrl + '/customersuccesses/' + csid + '/avatar', function (err, res) {
assert.ifError(err);
var customersuccess = JSON.parse(res.body);
assert.equal(customersuccess.code, 200);
done();
});
});
});
describe('PUT /customersuccesses/:csid/passwd', function () {
it('should response with success', function (done) {
request.put({
url: baseUrl + '/customersuccesses/' + csid + '/passwd',
form: {passwd: '<PASSWORD>'}
}, function (err, res) {
assert.ifError(err);
var customersuccess = JSON.parse(res.body);
assert.equal(customersuccess.code, 200);
done();
});
});
});
describe('DELETE /customersuccesses/:csid', function () {
it('should response with json', function (done) {
request.delete({
url: baseUrl + '/customersuccesses/' + csid
}, function (err, res) {
assert.ifError(err);
var cs = JSON.parse(res.body);
assert.equal(cs.code, 200);
done();
});
});
});
});
describe('#customerSession', function () {
describe('POST /customers/', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/customers',
form: {url: 'uuchat.com'}
}, function (err, res) {
assert.ifError(err);
var customer = JSON.parse(res.body);
assert.equal(customer.code, 200);
cid = customer.msg.cid;
done();
});
});
});
describe('GET /customers/:uuid', function () {
it('should response with rate data', function (done) {
request.get(baseUrl + '/customers/' + cid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('GET /customers/cid/:cid', function () {
it('should response with success', function (done) {
request.get(baseUrl + '/customers/cid/' + cid, function (err, res) {
assert.ifError(err);
var customer = JSON.parse(res.body);
assert.equal(customer.code, 200);
done();
});
});
});
describe('PATCH /customers/:uuid', function () {
it('should response with success', function (done) {
request.patch({
url: baseUrl + '/customers/' + cid,
form: {name: 'customer', ip: '127.0.0.1'}
}, function (err, res) {
assert.ifError(err);
var customer = JSON.parse(res.body);
assert.equal(customer.code, 200);
done();
});
});
});
describe('PATCH /customers/cid/:cid', function () {
it('should response with success', function (done) {
request.patch({
url: baseUrl + '/customers/cid/' + cid,
form: {}
}, function (err, res) {
assert.ifError(err);
var customer = JSON.parse(res.body);
assert.equal(customer.code, 200);
done();
});
});
});
describe('DELETE /customers/:uuid', function () {
it('should response with rate data', function (done) {
request.delete(baseUrl + '/customers/' + cid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});
describe('#message', function () {
describe('POST /messages/customer/:cid/cs/customer:csid', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/messages/customer/' + cid + '/cs/' + csid,
form: {message: 'hello'}
}, function (err, res) {
assert.ifError(err);
var message = JSON.parse(res.body);
assert.equal(message.code, 200);
mid = message.msg.uuid;
done();
});
});
});
describe('GET /messages/customer/:cid/cs/customer:csid', function () {
it('should response with object list', function (done) {
request.get(baseUrl + '/messages/customer/' + cid + '/cs/' + csid, function (err, res) {
assert.ifError(err);
var message = JSON.parse(res.body);
assert.equal(message.code, 200);
done();
});
});
});
describe('POST /messages/customer/:cid/cs/:csid/image', function () {
it('should response with success', function (done) {
var formData = {
// Pass data via Streams
image: fs.createReadStream(__dirname + '/../../client/static/images/contact.png')
};
request.post({
url: baseUrl + '/messages/customer/' + cid + '/cs/' + csid + '/image',
formData: formData
}, function (err, res) {
assert.ifError(err);
var message = JSON.parse(res.body);
assert.equal(message.code, 200);
done();
});
});
});
describe('GET /messages/:uuid', function () {
it('should response with message data', function (done) {
request.get(baseUrl + '/messages/' + mid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('GET /messages/customer/:cid', function () {
it('should response with message data', function (done) {
request.get(baseUrl + '/messages/customer/' + cid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('DELETE /messages/:uuid', function () {
it('should response with message data', function (done) {
request.delete(baseUrl + '/messages/' + mid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});
describe('#rate', function () {
describe('POST /rates/', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/rates',
form: {cid: cid, csid: csid, rate: 80}
}, function (err, res) {
assert.ifError(err);
var rate = JSON.parse(res.body);
assert.equal(rate.code, 200);
rid = rate.msg.uuid;
done();
});
});
});
describe('GET /rates/report', function () {
it('should response with rate data', function (done) {
request.get(baseUrl + '/rates/report', function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
//console.log(JSON.stringify(data.msg));
done();
});
});
});
describe('GET /rates/:uuid', function () {
it('should response with rate data', function (done) {
request.get(baseUrl + '/rates/' + rid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('GET /rates/customer/:cid', function () {
it('should response with rate data', function (done) {
request.get(baseUrl + '/rates/customer/' + cid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
done();
});
});
});
describe('GET /rates/customersuccess/:csid', function () {
it('should response with rate data', function (done) {
request.get(baseUrl + '/rates/customersuccess/' + csid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('PATCH /rates/:uuid', function () {
it('should response with success info', function (done) {
request.patch({
url: baseUrl + '/rates/' + rid,
form: {rate: 90}
}, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
done();
});
});
});
describe('DELETE /rates/:uuid', function () {
it('should response with rate data', function (done) {
request.delete(baseUrl + '/rates/' + rid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});
describe('#offline', function () {
describe('POST /offlines', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/offlines',
form: {name: 'jian', email: '<EMAIL>', content: 'test'}
}, function (err, res) {
assert.ifError(err);
var rate = JSON.parse(res.body);
assert.equal(rate.code, 200);
rid = rate.msg.uuid;
done();
});
});
});
});
describe('#chatHistory', function () {
describe('POST /chathistories/cs/:csid/customer/:cid', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/chathistories/cs/' + csid + '/customer/' + cid,
form: {}
}, function (err, res) {
assert.ifError(err);
var rate = JSON.parse(res.body);
assert.equal(rate.code, 200);
rid = rate.msg.uuid;
done();
});
});
});
describe('GET /chathistories/cs/:csid', function () {
it('should response with list', function (done) {
request.get(baseUrl + '/chathistories/cs/' + csid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('GET /chathistories/cs/:csid/latestmonth', function () {
it('should response with message data', function (done) {
request.get(baseUrl + '/chathistories/cs/' + csid + '/latestmonth', function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});
describe('#shortcut', function () {
describe('POST /shortcuts', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/shortcuts',
form: {
shortcuts:'hello',
message:'Hello, welcome to uuchat.What is your problem?'
}
}, function (err, res) {
assert.ifError(err);
var shortcut = JSON.parse(res.body);
assert.equal(shortcut.code, 200);
sid = shortcut.msg.uuid;
done();
});
});
});
describe('POST /shortcuts/cs/:csid', function () {
it('should response with success', function (done) {
request.post({
url: baseUrl + '/shortcuts/cs/' + csid,
form: {
shortcuts:'address',
message:'Please send to shenzhen nanshan software park 4B#2F'
}
}, function (err, res) {
assert.ifError(err);
var shortcut = JSON.parse(res.body);
assert.equal(shortcut.code, 200);
done();
});
});
});
describe('GET /shortcuts', function () {
it('should response with list', function (done) {
request.get(baseUrl + '/shortcuts', function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('GET /shortcuts/cs/:csid', function () {
it('should response with list', function (done) {
request.get(baseUrl + '/shortcuts/cs/' + csid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});
describe('GET /shortcuts/cs/:csid/all', function () {
it('should response with list', function (done) {
request.get(baseUrl + '/shortcuts/cs/' + csid + '/all', function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
describe('PATCH /shortcuts/cs/:csid', function () {
it('should response with success', function (done) {
request.patch({
url: baseUrl + '/shortcuts/' + sid,
form: {
shortcuts:'address',
message:'Please send to shenzhen nanshan software park 1B'
}
}, function (err, res) {
assert.ifError(err);
var shortcut = JSON.parse(res.body);
assert.equal(shortcut.code, 200);
done();
});
});
});
describe('DELETE /shortcuts', function () {
it('should response with success', function (done) {
request.delete(baseUrl + '/shortcuts/' + sid, function (err, res) {
assert.ifError(err);
var data = JSON.parse(res.body);
assert.equal(data.code, 200);
done();
});
});
});
});<file_sep>'use strict';
var fs = require("fs");
var path = require("path");
var _ = require("lodash");
var Sequelize = require("sequelize");
var databaseConfig = require('../../config.json').database;
var sequelize;
if (process.env.DATABASE_URL) {
sequelize = new Sequelize(process.env.DATABASE_URL, databaseConfig);
} else {
sequelize = new Sequelize(databaseConfig.database, databaseConfig.username, databaseConfig.password, databaseConfig);
}
var db = {};
fs.readdirSync(__dirname)
.filter(function (file) {
return (file.indexOf(".") !== 0) && (file !== "index.js");
})
.forEach(function (file) {
var model = sequelize.import(path.join(__dirname, file));
db[model.name] = model;
});
_.keys(db).forEach(function (modelName) {
if ("associate" in db[modelName]) {
db[modelName].associate(db);
}
});
/**
* Sync this Model to the DB, that is create the table.
* @param options
* @returns {Promise}
*/
function sync(options){
return sequelize.sync(options);
}
/**
* Drop the table represented by this Model
* @param options
* @returns {Promise}
*/
function drop(options){
return sequelize.drop(options);
}
db.sequelize = sequelize;
db.Sequelize = Sequelize;
db.sync = sync;
db.drop = drop;
//if (global.env === 'production') {
// bug: It will not create correct column when column has a alias name .
// db.sync({ alter: true });
db.sync();
//}
module.exports = db;<file_sep>"use strict";
var nconf = require('nconf');
var _ = require('lodash');
var async = require('async');
var logger = require('../logger');
var utils = require('../utils');
var CustomerSession = require('../database/customerSession');
var customerSessionController = module.exports;
customerSessionController.get = function (req, res, next) {
CustomerSession.findById(req.params.uuid, function (err, customerSession) {
if (err) return next(err);
res.json({code: 200, msg: customerSession});
});
};
customerSessionController.query = function (req, res, next) {
var condition = {cid: req.params.cid};
CustomerSession.findOne(condition, function (err, customerSession) {
if (err) return next(err);
res.json({code: 200, msg: customerSession});
});
};
customerSessionController.create = function (req, res, next) {
var customer = {
cid: req.body.cid,
ip: req.body.ip,
name: req.body.name,
email: req.body.email,
photo: req.body.photo,
browser: req.body.browser,
systemName: req.body.systemName || 'win10',
version: req.body.version,
platform: req.body.platform,
os: req.body.os,
device: req.body.device,
url: req.body.url
};
CustomerSession.insert(customer, function (err, data) {
if (err) return next(err);
res.json({code: 200, msg: data});
});
};
customerSessionController.update = function (req, res, next) {
var customer = {
ip: req.body.ip,
name: req.body.name,
email: req.body.email,
browser: req.body.browser,
systemName: req.body.systemName,
version: req.body.version,
platform: req.body.platform,
os: req.body.os,
'url': req.body.url
};
if (req.body.device) customer.device = req.body.device;
var condition = {};
if (req.params.uuid) condition.uuid = req.params.uuid;
if (req.params.cid) condition.cid = req.params.cid;
CustomerSession.update(customer, condition, function (err, data) {
if (err) return next(err);
res.json({code: 200, msg: 'success update'});
});
};
customerSessionController.delete = function (req, res, next) {
var condition = {uuid: req.params.uuid};
CustomerSession.delete(condition, function (err, data) {
if (err) return next(err);
res.json({code: 200, msg: 'success delete'});
});
};
customerSessionController.checkMonthlyUploadSize = function (req, res, next) {
var condition = {cid: req.params.cid};
async.waterfall([
function (callback) {
CustomerSession.findOne(condition, callback);
},
function (customer, callback) {
if (!customer) return res.json({code: 2000, message: 'customer not found'});
var today = new Date().toDateString();
var monthlyUploadSize = 0, fileSize = req.file.size || 0;
var day = today;
if (customer.upload) {
day = customer.upload.slice(0, 10);
monthlyUploadSize = utils.parsePositiveInteger(customer.upload.slice(11));
}
// first day of month
if (today.slice(8, 10) === '01') {
if (day === today) {
monthlyUploadSize += fileSize;
} else {
monthlyUploadSize = fileSize;
}
} else {// other days of month
// same month
if (day.slice(0, 7) === today.slice(0, 7)) {
monthlyUploadSize += fileSize;
} else { //different month
monthlyUploadSize = fileSize;
}
}
if (monthlyUploadSize > nconf.get("images:monthlyMaxSize")) {
return res.json({code: 5000, msg: 'EXCEED_MONTHLY_MAX_SIZE'});
}
req.file.monthlyUploadSize = today + 'D' + monthlyUploadSize;
callback(null, req.file.monthlyUploadSize);
},
function (upload, callback) {
var customer = {upload: upload};
var condition = {uuid: customer.uuid};
CustomerSession.update(customer, condition, callback);
}
], function (err, result) {
if (err) return next(err);
next();
});
};<file_sep>"use strict";
var crypto = require('crypto');
var nconf = require('nconf');
function hash(body, next) {
var postArray = [];
var key = {};
for (var pro in body) {
key.name = pro;
key.value = body[pro];
if (pro !== 'sign') {
postArray.push(key);
}
key = {};
}
postArray.sort(function (object1, object2) {
return ~~(object1['name'] > object2['name']);
});
var postStr = nconf.get('socket.io:secretKey');
postArray.forEach(function (post) {
postStr += post.name + post.value;
});
postStr += nconf.get('socket.io:secretKey');
next(crypto.createHash('md5').update(postStr).digest('hex'));
}
/**
* checksum req.body
* @param req
* @param res
* @param next
*/
function checksum(req, res, next) {
hash(req.body, function (sign) {
if (req.body.sign !== sign) {
res.json({code: 10000, mesg: 'checksum_failed'});
} else {
next();
}
});
}
/**
*
* @param resJson
*/
function sign(resJson) {
hash(resJson.msg, function (sign) {
resJson.msg.sign = sign;
return resJson;
});
}
module.exports = function (middleware) {
middleware.checksum = checksum;
middleware.sign = sign;
};<file_sep>/**
* Created by jianzhiqiang on 2017/5/11.
*/
"use strict";
var server = require('../../index');
var models = require('../../server/models');
var request = require('request');
var should = require('chai').should();
var nconf = require('nconf');
var fs = require('fs');
describe('controllers', function () {
describe('#upload', function () {
describe('POST /messages/customer/:cid/cs/:csid/image', function () {
it('should response with success', function (done) {
var formData = {
// Pass data via Streams
image: fs.createReadStream(__dirname + '/../../client/static/images/user_avatar.png')
};
request.post({
url: 'http://127.0.0.1:9688' + '/messages/customer/cid/cs/csid/image',
formData: formData
}, function (err, res) {
should.not.exist(err);
var customer = JSON.parse(res.body);
customer.code.should.equal(200);
done();
});
});
});
});
}); | 1393e4265565e15446d45ca475ff94333de5c2c1 | [
"JavaScript"
] | 5 | JavaScript | jzhwjian/uuchat | dfbf24e5d2f2398dca6bcef1e60d89f741f2b026 | ab13da7173e852d16a5c15be4a5fc7812f60049e |
refs/heads/master | <file_sep># work-03
Work with spring hibernate H2 Metrics - Note to be corrected
Teste prático - Marcos
Requisitos
- Java 1.7.x
- Jboss 6.4 EAP GA
- H2 3.0.1
- Maven 3.x
- Postgres 9.1
1) Execução de script de banco:
Para criação da massa de dados é necessário execução do script sql localizado em:
vectorx/database/script.sql
Obs: O script deve ser executado tanto no banco de dados H2 quanto no postgres.
No banco de dados H2 as credenciais utilizadas foram
- Url: jdbc:h2:file:~/test
- Usuário: sa
- Senha: (<PASSWORD>)
No banco de dados Postgres as credenciais utilizadas foram
- Url: jdbc:postgresql://localhost:5432/postgres
- Usuário: postgres
- Senha: admin
2) Para compilar o projeto gerando o arquivo de instalaão WAR é necessário executar o seguinte comando maven:
mvn -T 4 -U clean package
3) Para alteração de banco de dados Postgres/H2 é necessário alterar o arquivo persistence.xml:
Para banco de dados H2 utilizar:
<property name="javax.persistence.jdbc.url" value="jdbc:h2:file:~/test" />
<property name="javax.persistence.jdbc.user" value="sa" />
<property name="javax.persistence.jdbc.driver" value="org.h2.Driver" />
<property name="javax.persistence.jdbc.password" value="" />
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
Para banco de dados Postgres utilizar:
<property name="javax.persistence.jdbc.url" value="jdbc:postgresql://localhost:5432/postgres" />
<property name="javax.persistence.jdbc.user" value="postgres" />
<property name="javax.persistence.jdbc.driver" value="org.postgresql.Driver" />
<property name="javax.persistence.jdbc.password" value="<PASSWORD>" />
<property name="hibernate.dialect" value="org.hibernate.dialect.PostgreSQLDialect"/>
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>vectorx</groupId>
<artifactId>vectorx</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<name>marcos-test</name>
<properties>
<cxf.version>3.1.1</cxf.version>
</properties>
<dependencies>
<dependency>
<groupId>javax.faces</groupId>
<artifactId>jsf-api</artifactId>
<version>2.0</version>
<scope>provided</scope>
</dependency>
<!-- Spring framework -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<!-- Hibernate 4.3.6 core library library -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>4.3.6.Final</version>
</dependency>
<!-- Hibernate 4.3.6 JPA support -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>4.3.6.Final</version>
</dependency>
<!-- Spring ORM -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<!-- Spring test -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<!-- Spring Web -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>4.0.0.RELEASE</version>
</dependency>
<!-- Dependencies for Eclipse JPA Persistence API -->
<dependency>
<groupId>org.eclipse.persistence</groupId>
<artifactId>eclipselink</artifactId>
<version>2.5.0-RC1</version>
</dependency>
<!-- Servlet api for tests -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-core</artifactId>
<version>3.2.7.RELEASE</version>
<exclusions>
<exclusion>
<artifactId>spring-core</artifactId>
<groupId>org.springframework</groupId>
</exclusion>
<exclusion>
<artifactId>spring-expression</artifactId>
<groupId>org.springframework</groupId>
</exclusion>
<exclusion>
<artifactId>spring-context</artifactId>
<groupId>org.springframework</groupId>
</exclusion>
<exclusion>
<artifactId>spring-tx</artifactId>
<groupId>org.springframework</groupId>
</exclusion>
<exclusion>
<artifactId>spring-aop</artifactId>
<groupId>org.springframework</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.openid4java</groupId>
<artifactId>openid4java</artifactId>
<version>0.9.5</version>
</dependency>
<dependency>
<groupId>net.bootsfaces</groupId>
<artifactId>bootsfaces</artifactId>
<version>0.6.6</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-openid</artifactId>
<version>3.2.7.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-config</artifactId>
<version>3.2.7.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-web</artifactId>
<version>3.2.7.RELEASE</version>
</dependency>
<!-- <dependency> <groupId>org.springframework.data</groupId> <artifactId>spring-data-jpa</artifactId>
<version>1.8.1.RELEASE</version> </dependency> -->
<dependency>
<groupId>org.primefaces</groupId>
<artifactId>primefaces</artifactId>
<version>5.2</version>
</dependency>
<dependency>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
<version>1.4.01</version>
</dependency>
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
</dependency>
<!-- Junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
<!-- JSR-330 -->
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
</dependency>
<!-- apache cxf -->
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxws</artifactId>
<version>${cxf.version}</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
<version>${cxf.version}</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-transports-http</artifactId>
<version>${cxf.version}</version>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>2.2.2</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.4.187</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4-1201-jdbc41</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-servlets</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-servlet</artifactId>
<version>3.1.0</version>
</dependency>
</dependencies>
</project><file_sep>package org.vectorx.reopsitory;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceUnit;
import javax.persistence.Query;
import javax.persistence.TemporalType;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vectorx.entity.DayOff;
import org.vectorx.ws.DaysOffServiceImpl;
@Named
public class DayOffRepository {
// persistence unit
@PersistenceUnit(name="jpa-persistence")
private EntityManagerFactory entityManagerFactory;
// Console logger
static final Logger logger = LogManager.getLogger(DaysOffServiceImpl.class.getName());
private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
/**
* Gets next day off.
*/
public String getNextDayOff() throws Exception {
EntityManager em = entityManagerFactory.createEntityManager();
try {
// the the current date.
Date today = new Date();
String sql = "SELECT d FROM DayOff d WHERE d.day > :today";
Query query = em.createQuery(sql);
query.setParameter("today", today, TemporalType.DATE);
List<Object> list = query.getResultList();
if (list != null && !list.isEmpty()) {
DayOff dayoff = (DayOff) list.get(0);
return dayoff.getDay() + " " + dayoff.getDescription();
}
} finally {
em.close();
}
// return message with no results.
return "Sem resultados";
}
/**
* Finds if the date parameter is a day off.
*
* @param date
* to be checked.
*/
public boolean isDayOff(String date) throws Exception {
EntityManager em = entityManagerFactory.createEntityManager();
try {
Date dayoff = dateFormat.parse(date);
String sql = "SELECT d FROM DayOff d WHERE d.day = :date";
Query query = em.createQuery(sql);
query.setParameter("date", dayoff, TemporalType.DATE);
List<Object> list = query.getResultList();
if (list != null && !list.isEmpty()) {
return true;
}
} finally {
em.close();
}
return false;
}
/**
* Finds if the date parameter is a day off.
*
* @param date
* to be checked.
*/
public List<DayOff> getAllDayOff() throws Exception {
EntityManager em = entityManagerFactory.createEntityManager();
List<DayOff> list = null;
try {
String sql = "SELECT d FROM DayOff d";
Query query = em.createQuery(sql);
list = query.getResultList();
} finally {
em.close();
}
return list;
}
}
<file_sep>package org.vectorx;
import java.io.Serializable;
import java.util.List;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.SessionScoped;
import javax.inject.Inject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vectorx.entity.DayOff;
import org.vectorx.reopsitory.DayOffRepository;
@ManagedBean
@SessionScoped
public class DayOffBean implements Serializable {
private static final long serialVersionUID = 1L;
@Inject
private DayOffRepository dayOffRepository;
// Console logger
static final Logger logger = LogManager.getLogger(DayOffBean.class.getName());
private List<DayOff> list;
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<DayOff> getList() {
try{
return dayOffRepository.getAllDayOff();
}catch(Exception e){
logger.error("Error: ", e);
return null;
}
}
public void setList(List<DayOff> list) {
this.list = list;
}
}
<file_sep>package org.vectorx.ws;
import javax.jws.WebMethod;
import javax.jws.WebService;
@WebService
public interface DaysOffService {
@WebMethod
public boolean isDayOff(String date);
@WebMethod
public String getNextDayOff();
}
<file_sep>CREATE TABLE DAYOFF(ID INT PRIMARY KEY,
DAY DATE, DESCRIPTION VARCHAR(255));
INSERT INTO DAYOFF VALUES(1, DATE '2015-01-01', 'feriado nacional –Confraternização universal');
INSERT INTO DAYOFF VALUES(2, DATE '2015-02-20', 'ponto facultativo –Carnaval');
INSERT INTO DAYOFF VALUES(3, DATE '2015-02-21', 'ponto facultativo –Carnaval');
INSERT INTO DAYOFF VALUES(4, DATE '2015-02-22', 'ponto facultativo até as 14 horas–Quarta-feira de Cinzas');
INSERT INTO DAYOFF VALUES(5, DATE '2015-04-06', 'ponto facultativo –Paixão de Cristo');
INSERT INTO DAYOFF VALUES(6, DATE '2015-04-21', 'feriado nacional –Tiradentes');
INSERT INTO DAYOFF VALUES(7, DATE '2015-05-01', 'feriado nacional –Dia Mundial do Trabalho');
INSERT INTO DAYOFF VALUES(8, DATE '2015-06-07', 'ponto facultativo –Corpus Christi');
INSERT INTO DAYOFF VALUES(9, DATE '2015-09-07', 'feriado nacional –Independência do Brasil');
INSERT INTO DAYOFF VALUES(10, DATE '2015-10-12', 'feriado nacional –Nossa Senhora Aparecida');
INSERT INTO DAYOFF VALUES(11, DATE '2015-10-28', 'ponto facultativo –Diado Servidor Público');
INSERT INTO DAYOFF VALUES(12, DATE '2015-11-02', 'feriado nacional –Finados');
INSERT INTO DAYOFF VALUES(13, DATE '2015-11-15', 'feriado nacional –Proclamação da República');
INSERT INTO DAYOFF VALUES(14, DATE '2015-12-24', 'ponto facultativo –Vésperado Natal');
INSERT INTO DAYOFF VALUES(15, DATE '2015-12-25', 'feriado nacional –Natal');
INSERT INTO DAYOFF VALUES(16, DATE '2015-12-31', 'ponto facultativo –VésperadeAno-Novo');
COMMIT;<file_sep>package org.utils;
import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.tool.hbm2ddl.SchemaExport;
public class GerarTabelasBD {
public void gerarTabelas() {
try {
// AnnotationConfiguration ac = new AnnotationConfiguration();
// ac.addAnnotatedClass(Livro.class);
// ac.addAnnotatedClass(Autorizacao.class);
// ac.addAnnotatedClass(Usuario.class);
// sessionFactory = ac.configure().buildSessionFactory();
// SchemaExport se = new SchemaExport(ac);
// se.create(true, true);
} catch (Throwable ex) {
System.err.println("Initial SessionFactory creation failed." + ex);
throw new ExceptionInInitializerError(ex);
}
}
}
| 165599d1c10547d729a14ac4b208ee9c53ff927b | [
"Markdown",
"Java",
"Maven POM",
"SQL"
] | 7 | Markdown | mcelio/work-03 | fbab4d0dbb05813c6eee18d4a6bc9e0e6416eb3f | 26e0fb3d6c6e2b4209af943a781e2a9dcd9a534e |
refs/heads/master | <file_sep>'use strict';
angular.module('angulatransmissionApp')
.controller('MainCtrl', function ($scope, $http, Session, $base64) {
$scope.ipAddress = '192.168.1.80';
$scope.session = undefined;
var addTorrent = function(id, file) {
Session.addTorrent(id, $scope.ipAddress, file).then(function(data) {
if (angular.isString(data)) {
$scope.session = data;
addTorrent($scope.session, file);
} else {
console.log('Success');
}
});
};
var oneButtonUpload = function(url) {
var oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "blob";
oReq.onload = function(oEvent) {
var blob = oReq.response;
var reader = new FileReader();
reader.onload = function(e) {
addTorrent($scope.session, reader.result);
};
reader.readAsBinaryString(blob);
};
oReq.send();
};
var chromeInject = function() {
chrome.contextMenus.create({
id: 'open',
title: chrome.i18n.getMessage('openContextMenuTitle'),
contexts: ['link']
});
chrome.contextMenus.onClicked.addListener(function(info, tab) {
oneButtonUpload(info.linkUrl);
});
};
chromeInject();
});
document.body.innerHTML = '<div ng-controller=MainCtrl>';
angular.bootstrap(document.body, ['angulatransmissionApp']);
<file_sep>'use strict';
var devip = '192.168.1.80';
var myApp = angular.module('angulatransmissionApp')
.controller('MainCtrl', function ($scope, Session, $base64, $localStorage) {
$scope.$storage = $localStorage.$default({
downloadDir: true,
rateUpload: true,
eta: false,
totalSize: true,
status: true,
remove: true,
uploadedEver: true
});
$scope.$storage.ipAddress = devip;
$scope.alerts = [];
$scope.listSettings = function() {
return settingsBuilder($scope.$storage);
};
$scope.addAlert = function(text) {
$scope.alerts.push({msg: text});
};
$scope.closeAlert = function(index) {
$scope.alerts.splice(index, 1);
};
$scope.statusFilter = function (num) {
if (num == 6){
return "<span class='glyphicon glyphicon-cloud-upload'></span>";
} else if (num == 4){
return "<span class='glyphicon glyphicon-cloud-download'></span>";
} else if (num == 3){
return "<span class='glyphicon glyphicon-time'></span>";
} else if (num === 0){
return "<span class='glyphicon glyphicon-pause'></span>";
} else {
return "Unknown";
};
};
$scope.stopStartFilter = function (num) {
if (num === 0){
return "<span class='glyphicon glyphicon-play'></span>";
} else {
return "<span class='glyphicon glyphicon-stop'></span>";
}
};
$scope.removeTorrent = function(id) {
Session.removeTorrent($scope.session, $scope.$storage.ipAddress, id);
};
var stopTorrent = function(id) {
Session.stopTorrent($scope.session, $scope.$storage.ipAddress, id);
};
var restartTorrent = function(id) {
Session.restartTorrent($scope.session, $scope.$storage.ipAddress, id);
};
$scope.torrentStopStarter = function(id, status){
if (status === 0){
restartTorrent(id);
} else {
stopTorrent(id);
}
};
var listTorrents = function() {
Session.listTorrents($scope.session, $scope.$storage.ipAddress, $scope.listSettings()).then(function(data) {
if (angular.isString(data)) {
$scope.session = data;
} else {
$scope.torrents = data['arguments']['torrents'];
}
});
};
var addTorrent = function(id, file) {
Session.addTorrent(id, $scope.$storage.ipAddress, file);
};
$scope.refreshList = function () {
listTorrents();
};
$scope.byteCalc = function (bytes) {
return byteCalc(bytes);
};
$scope.settingsToggle = function () {
if ($scope.setting === true) {
$scope.setting = false;
} else {
$scope.setting = true;
};
};
$scope.percentCalc = function (inputDouble) {
var percent = inputDouble * 100;
return percent.toFixed(2) + '%';
};
setInterval(function(){
$scope.$apply(function() {
$scope.refreshList();
});
}, 420);
});
<file_sep># Angular Chrome Extension for Transmission Daemon
### Using the [Official Spec](https://trac.transmissionbt.com/browser/trunk/extras/rpc-spec.txt)
Chrome extension which has a context menu which takes the torrent link, passes it to an angular service which GETs the torrent and encodes it into base64 which is then POSTed to the transmission-daemon
The popout consists of one page ~ the torrent list.
The torrent list has buttons to delete the torrent and data, stop torrent.. but aside from that it's not supposed to be a fully fledged client.

| e5ea85480c29b95f4b5869d9784c1765fb054f76 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | will118/Angulatransmission | 135b636abd25f1ce4451f9ed7e4ef7a157a615b4 | f342fe67102822a5120695f59ac5e9da7d5ce1db |
refs/heads/master | <repo_name>Emethium/starting-with-puppeteer<file_sep>/src/config/logger.js
/**
* Implements everything log related
*
* @module config/logger
*/
const { format, createLogger, transports } = require("winston");
/**
* Creates a derived logger using winston.createLogger.
*
* By default, in production, logs are limited to info.
* This can be overwriten through process.env['LOG_LEVEL'].
*
* Locally all logs should appear in your console, and
* are also persisted to debug.log and error.log.
*
* @type {Logger}
* @static
* @constant
*/
let opts = {
level: "silly",
exitOnError: false
};
const logger = createLogger(opts);
const formatter = format.printf(msg => {
const { timeElapsed, error, timestamp, level, message } = msg;
let out = `${timestamp} (${level}) - ${message}`;
if (timeElapsed) out += ` (+${timeElapsed}ms)`;
if (error && error.stack) out += `\n\n${error.stack}\n`;
if (error && error.code) out += ` (${error.code})`;
return out;
});
// Error logs
logger.add(
new transports.File({
silent: false,
filename: "error.log",
level: "error",
format: format.combine(
format.uncolorize({ all: true }),
format.timestamp(),
formatter
)
})
);
// Debug logs
logger.add(
new transports.File({
silent: false,
filename: "debug.log",
format: format.combine(
format.uncolorize({ all: true }),
format.timestamp(),
formatter
)
})
);
// Console logs
logger.add(
new transports.Console({
format: format.combine(
format.colorize({ all: true }),
format.timestamp({
format: "DD-MM-YYYY HH:mm:ss"
}),
formatter
)
})
);
module.exports = { logger };
<file_sep>/.env.example
AWS_ACCESS_KEY_ID=getyours
AWS_SECRET_ACCESS_KEY=getyours
BUCKET=mybucket
TERM=puppeteer<file_sep>/src/core/google.js
const { logger } = require("../config/logger");
const { takeSnapshot, measureTimeElapsed } = require("../util");
/**
* Opens google main page and wait until page is fully loaded
*
* @param {*} page
*
* @returns {number} - The time elapsed in ms
*/
async function loadPage(page) {
logger.debug("Loading page...");
const start = measureTimeElapsed();
await page.goto("https://www.google.com/?gl=us&hl=en", {
waitUntil: ["load"]
});
const timeElapsed = measureTimeElapsed(start);
logger.debug("Page loaded...", { timeElapsed });
return timeElapsed;
}
async function fillForm(page, searchTerm) {
const start = measureTimeElapsed();
const inputField = await page.$("[title=Search]");
await inputField.type(searchTerm, { delay: 100 });
const timeElapsed = measureTimeElapsed(start);
logger.debug(`Form filled in ${timeElapsed} ms`);
return timeElapsed;
}
async function submitForm(page) {
const start = measureTimeElapsed();
// Forces form submission
await page.$eval("form", form => form.submit());
await page.waitForNavigation({ waitUntil: ["load"] });
const timeElapsed = measureTimeElapsed(start);
logger.debug(`Form filled and resulting page loaded in ${timeElapsed} ms`, {
timeElapsed
});
return timeElapsed;
}
async function extractSearchResults(page) {
const rawResults = await page.$("[id=search] > div > [data-async-context]");
// We only care for the text result with links
const filteredResults = await rawResults.$$eval(".g:not(.g-blk)", results =>
Array.from(results)
.map(r => r.innerText)
.filter(r => r !== "")
);
const parsedResults = filteredResults.map(fr => {
const splittedData = fr.split("\n");
return {
resultTitle: splittedData[1],
resultHeader: splittedData[2],
resultDescription: splittedData[3],
resultFooter: splittedData[4]
};
});
return parsedResults;
}
async function scrap(page, searchTerm) {
const perf = {};
try {
// Load Google's main page
perf["firstPage"] = await loadPage(page);
await takeSnapshot(page, { fullPage: true });
// Fills form with the search term provided
perf["fillForm"] = await fillForm(page, searchTerm);
await takeSnapshot(page, { fullPage: true });
// Submits form and waits for page transition
perf["submitForm"] = await submitForm(page);
await takeSnapshot(page, { fullPage: true });
// Extracts search data
const results = await extractSearchResults(page);
logger.info("Completed without errors");
return { results, perf };
} catch (error) {}
}
module.exports = scrap;
<file_sep>/src/util/browser.js
/**
* Implements everything Puppeteer-launching related
*
* @module util/browser
*/
// Imports
const puppeteer = require("puppeteer");
const puppeteerExtra = require("puppeteer-extra");
const StealthPlugin = require("puppeteer-extra-plugin-stealth");
const AdblockerPlugin = require("puppeteer-extra-plugin-adblocker");
const launchArgs = [
// Required for Docker version of Puppeteer
"--no-sandbox",
"--disable-setuid-sandbox",
// Disable GPU
"--disable-gpu",
// This will write shared memory files into /tmp instead of /dev/shm,
// because Docker’s default for /dev/shm is 64MB
"--disable-dev-shm-usage"
];
/**
* Initializes a common Puppeteer Browser
*
* @param {string} proxy - String representation of an IPv4 address and a designated port (IPv4:PORT)
*
* @returns {Browser} A Puppeteer Browser
*/
async function initializePuppeteer(proxy) {
setupProxy()
return puppeteer.launch({
executablePath: "/usr/bin/chromium-browser",
args: launchArgs,
defaultViewport: {
width: 1024,
height: 768
}
});
}
/**
* Initializes a Puppeteer Browser with steroids
*
* @param {string} proxy - String representation of an IPv4 address and a designated port (IPv4:PORT)
*
* @returns {Browser} A modified Puppeteer Browser
*/
async function initializeExtraPuppeteer(proxy) {
setupProxy()
puppeteerExtra.use(StealthPlugin());
puppeteerExtra.use(AdblockerPlugin());
return puppeteerExtra.launch({
executablePath: "/usr/bin/chromium-browser",
args: launchArgs,
defaultViewport: {
width: 1024,
height: 768
}
});
}
/**
* Adds a proyx to the launch argument list if one is provided
*
* @param {string} proxy - String representation of an IPv4 address and a designated port (IPv4:PORT)
* @inner
* @static
*/
function setupProxy(proxy) {
return proxy ? launchArgs.push(`--proxy-server=https=${proxy}`) : null;
}
module.exports = {
initializePuppeteer,
initializeExtraPuppeteer
};
<file_sep>/src/util/upload.js
/**
* Provides helper methods for dealing with file transfer, downloading and copying from remote locations
*
* @module util/file-upload
*
*/
// Imports
const S3 = require("aws-sdk").S3;
const { v4: uuidv4 } = require("uuid");
// Local imports
const bucket = process.env.BUCKET
const { logger } = require("../config/logger");
const { measureTimeElapsed } = require("./time");
/**
* Uploads a base64 encoded buffer with the specified
* content type and extension to an external service.
*
* When no destination if configured through {@link config/environment.bucket}
* it will silent ignore the upload request and return
*
* @param {Buffer} buffer - An integer buffer (base64 encoded)
* @param {Object<string, string>} opts - Additional options for file upload
* @param {string} opts.contentType - The content type for the buffered file
* @param {string} opts.ext - File extension
*
* @static
* @function
*/
async function uploadFile(buffer, { contentType, ext } = {}) {
// Halt execution if no bucket is configured
if (!bucket) {
logger.warn(
"An upload destination was not provided, ignoring file upload...",
{ category: "file-upload" }
);
return;
}
const service = getUploadService();
const key = generateObjectKey(ext);
try {
const start = measureTimeElapsed();
logger.silly(`Uploading object ${key}`, {
category: "file-upload",
key
});
await service
.putObject({
Key: key,
Bucket: bucket,
Body: buffer,
ContentEncoding: "base64",
ContentType: contentType
})
.promise();
const timeElapsed = measureTimeElapsed(start);
logger.silly("Object uploaded", { category: "file-upload", timeElapsed });
return key;
} catch (error) {
logger.error(`Could not upload file ${key} `, {
category: "file-upload",
error
});
}
}
/**
* Instanciates a service object representing
* an abstract destination which all API calls
* should be issued to.
*
* @returns {AWS.S3} A service object for external calls
*
* @private
* @static
* @function
*/
function getUploadService() {
const config = {
apiVersion: "2006-03-01",
signatureVersion: "v4",
s3ForcePathStyle: true,
endpoint: "http://minio:9000"
};
return new S3(config);
}
/**
* Generates an object key (uuid-v4) with the provided extension.
*
* @param {string} [ext=".jpg"] - The file extension (defaults to .jpg)
* @returns {string} The generated object key, formatted as uuid + ext
*
* @private
* @static
* @function
*/
function generateObjectKey(ext = ".jpg") {
const uuid = uuidv4();
return uuid + ext;
}
// Exports
module.exports = {
uploadFile
};
<file_sep>/src/util/page.js
/**
* This module implements common helper methods to extract,
* modify and process page information
*
* @module util/page
*
*/
const { logger } = require("../config/logger");
const { uploadFile } = require("./upload")
/**
* Takes a screenshot of the provided page
*
* @param {*} page - The page a screenshot should be taken from
* @param {Object.<string, string>} opts - Additional options
* @param {boolean} [opts.fullPage=false] - When true, takes a screenshot of the full scrollable page.
* @param {boolean} [opts.skipUpload=false] - When true, skip uploading files to destination.
* @param {Context} [opts.context={}] - The execution context
*
* @returns {string | Buffer} The key for the generated file within the destination. When options.skipUpload is true, it returns the allocated Buffer
*
* @static @function
*/
async function takeScreenshot(
page,
{ fullPage = false, skipUpload = false, context = {} } = {}
) {
try {
logger.info("Taking screenshot from page", {
category: "page-manipulation",
context,
fullPage,
skipUpload
});
buffer = await page.screenshot({
fullPage,
type: "jpeg"
});
logger.info("Screenshot taken", {
category: "page-manipulation",
context,
fullPage,
skipUpload
});
return skipUpload
? buffer
: await uploadFile(buffer, { contentType: "image/jpeg", ext: ".jpg" });
} catch (error) {
logger.error("Could not take screenshot from page", {
category: "page-manipulation",
context,
error
});
return null;
}
}
/**
* Takes the innerHTML content from the body of the provided page
*
* @param {*} page - The page from which the content should be taken from
* @param {Object.<string, string>} opts - Additional options
* @param {boolean} [opts.skipUpload=false] - When true, skip uploading files to destination.
* @param {Context} [opts.context={}] - The execution context
*
* @returns {string | Buffer} The key for the generated file within the destination. When options.skipUpload is true, it returns the allocated Buffer
*
* @static @function
*/
async function takeContent(page, { skipUpload = false, context = {} }) {
try {
logger.info("Taking HTML content from page", {
category: "page-manipulation",
context,
skipUpload
});
const html = await page.content();
buffer = Buffer.from(html);
logger.info("HTML taken", {
category: "page-manipulation",
context,
skipUpload
});
return skipUpload
? buffer
: await uploadFile(buffer, { contentType: "text/html", ext: ".html" });
} catch (error) {
logger.error("Could not take HTML from page", {
category: "page-manipulation",
context,
error
});
return null;
}
}
/**
* Take both the HTML and a screenshot from the provided page
*
* @see takeScreenshot
* @see takeContent
*
* @param {*} page - The page a snapshot should be taken from
* @param {Object.<string, string>} opts - Additional options
* @param {boolean} [opts.fullPage=false] - When true, takes a screenshot of the full scrollable page.
* @param {boolean} [opts.skipUpload=false] - When true, skip uploading files to destination.
* @param {Context} [opts.context={}] - The execution context
*
* @returns {Snapshot} The page snapshot
*
* @static @function
*/
async function takeSnapshot(
page,
{ fullPage = false, skipUpload = false, context = {} } = {}
) {
const [screenshot, html] = await Promise.all([
takeScreenshot(page, { fullPage, skipUpload, context }),
takeContent(page, { skipUpload, context })
]);
return { screenshot, html };
}
module.exports = {
takeSnapshot,
takeScreenshot,
takeContent
};
<file_sep>/docker-compose.yml
version: '3.4'
volumes:
minio-data:
driver: local
services:
scrapper:
build:
context: .
target: development
hostname: scrapper
volumes:
- ./:/app
env_file: .env
command: npm run test:google
depends_on:
- minio
ports:
- 9229:9229
stealth-check:
build:
context: .
target: development
hostname: stealth-check
volumes:
- ./:/app
env_file: .env
command: npm run test:stealth
depends_on:
- minio
ports:
- 9229:9229
minio:
image: minio/minio:latest
hostname: minio
volumes:
- minio-data:/data
command: server --compat /data
environment:
MINIO_ACCESS_KEY: getyours
MINIO_SECRET_KEY: getyours
ports:
- 1111:9000
<file_sep>/README.md
# Starting with Puppeteer
This is an example repository to be used as a companion to a series of begginer-friendly posts I plan on write about doing magic stuff with Puppeteer.
> awesome post links will go here, eventually
Reading the articles are non-obligatory (but I'll be very happy if you do) and this repo can be read as it is. A lot of different approaches will be used here as examples and maybe inspire your own implementations.
## Content
All the code here is separated by context modules with their own set of awesome features.
### Util
- **Browser**: You can find here how to launch your `Puppeteer` instance along with using it with superpowers, with all the resources [`pupppeteer-extra`](https://github.com/berstend/puppeteer-extra) provides us.
- **Page**: Provides useful functions of interesting ways of taking your screenshots and scrapping full page's HTML code and uploading somewhere. Maybe a S3 bucket or something?
- **Stealth**: Shows how to perform a scrapper stealth test using the `puppeteer-extra` stealth module and showing up the results.
- **Time**: Functions to be used to check the amount of time used to perform scrapping operations
- **Upload**: Shows a logic to upload all your screenshots and HTML data into a local bucket, customizable to work with S3 as well.
### Core
- **Google**: Really simple example of scrapping Google's first page of results for a keyword search.
### Config
- **Logger**: Custom logger configured using `Winston`. I quite like it, feel free to use as well.
## Building the image
- Run `docker build -t starting-with-puppeteer:latest .`
## Running the scrapping example
- Install all necessary dependencies with a `npm install`
- Create your own `.env` following the variables defined on the `.env.example`
- Run `docker-compose up scrapper`
- Profit
## Running the stealth checking example
- Install all necessary dependencies with a `npm install`
- Create your own `.env` following the variables defined on the `.env.example`
- Run `docker-compose up stealth-check`
- Profit
> You can check all the taken snapshots on Minio, accessible by entering `localhost:1111/minio/bucket/` on your local machine. | c4dc0f57e4577ce141da1687876fb144a44139e0 | [
"JavaScript",
"YAML",
"Markdown",
"Shell"
] | 8 | JavaScript | Emethium/starting-with-puppeteer | 8251f922ca9bf012a7b55499668f6b7803f8c9df | bf3bca0fd18f3943883e0128ab2965008ca51778 |
refs/heads/master | <file_sep>package com.eriochrome.bartime.modelos.entidades;
public class Desafio extends Juego {
private boolean permanente;
private String desafioTexto;
public Desafio(String desafioTexto) {
this.desafioTexto = desafioTexto;
this.permanente = false;
}
public String getDesafioTexto() {
return desafioTexto;
}
public void asignarTipo() {
super.asignarTipo("Desafio");
}
public void setID(String id) {
super.setID(id);
}
public void setPermanente(boolean permanente) {
this.permanente = permanente;
}
public boolean isPermanente() {
return permanente;
}
@Override
public String getTextoParticipacion(String nombreParticipante) {
return nombreParticipante
+ " esta ahora participando en el desafio '"
+ desafioTexto
+ "'.";
}
@Override
public String getTextoGanadorDeJuego() {
String nombreBar = getNombreBar();
return "Has ganado "
+ puntos
+ " puntos por ganar el desafio '"
+ desafioTexto
+ "' en "
+ nombreBar
+ ".";
}
/**
* Requerido por firebase
*/
public Desafio() {
}
@Override
public int getPuntos() {
return super.getPuntos();
}
@Override
public String getTipoDeJuego() {
return super.getTipoDeJuego();
}
@Override
public String getNombreBar() {
return super.getNombreBar();
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.PaginaTriviaContract;
import com.eriochrome.bartime.presenters.PaginaTriviaPresenter;
public class PaginaTriviaActivity extends AppCompatActivity implements PaginaTriviaContract.View {
private PaginaTriviaPresenter presenter;
private ProgressBar progressBar;
private ImageButton volver;
private RelativeLayout container;
private TextView tipoDeJuego;
private TextView resumenDelJuego;
private TextView cantGanadores;
private TextView cantParticipantes;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pagina_trivia);
presenter = new PaginaTriviaPresenter();
presenter.bind(this);
presenter.obtenerJuego(getIntent());
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
volver = findViewById(R.id.volver);
volver.setOnClickListener(v -> finish());
container = findViewById(R.id.container_rl);
tipoDeJuego = findViewById(R.id.tipo_de_juego);
resumenDelJuego = findViewById(R.id.resumen_juego);
cantGanadores = findViewById(R.id.cant_ganadores);
cantParticipantes = findViewById(R.id.cant_participantes);
}
@Override
protected void onResume() {
super.onResume();
presenter.cargarDatosJuego();
presenter.cargarDatosParticipantes();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void cargando() {
progressBar.setVisibility(View.VISIBLE);
container.setVisibility(View.GONE);
}
@Override
public void finCargando() {
progressBar.setVisibility(View.GONE);
container.setVisibility(View.VISIBLE);
}
@Override
public void setGanadores(int ganadores) {
cantGanadores.setText(String.valueOf(ganadores));
}
@Override
public void setParticipantes(int participantes) {
cantParticipantes.setText(String.valueOf(participantes));
}
@Override
public void setTipoDeJuego(String tipoDeJuego) {
this.tipoDeJuego.setText(tipoDeJuego);
}
@Override
public void setResumen(String resumen) {
this.resumenDelJuego.setText(resumen);
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.PaginaBarContract;
import com.eriochrome.bartime.modelos.PaginaBarInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Comentario;
import java.util.ArrayList;
public class PaginaBarPresenter implements PaginaBarContract.CompleteListener {
private PaginaBarContract.Interaccion interaccion;
private PaginaBarContract.View view;
private boolean esFav;
public PaginaBarPresenter() {
interaccion = new PaginaBarInteraccion(this);
}
public void bind(PaginaBarContract.View view) {
this.view = view;
}
public void unbind(){
view = null;
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
public String getNombreDeBar() {
return interaccion.getNombreDeBar();
}
public void enviarComentario(Comentario comentario) {
interaccion.enviarComentario(comentario);
calificarBar(comentario.getEstrellas());
}
private void calificarBar(int calificacion) {
interaccion.actualizarEstrellas(calificacion);
}
public boolean esFavorito() {
return esFav;
}
public void agregarAFavoritos() {
interaccion.agregarAFavoritos();
checkeoFavorito();
}
public void quitarDeFavoritos() {
interaccion.quitarDeFavoritos();
checkeoFavorito();
}
public boolean hayUsuarioConectado() {
return interaccion.hayUsuarioConectado();
}
public void checkeoFavorito() {
interaccion.checkearFavorito();
}
@Override
public void onStart() {
view.cargando();
}
@Override
public void onComplete(boolean esFav) {
view.finCargando();
if (esFav) {
view.agregadoAFavoritos();
} else {
view.quitadoDeFavoritos();
}
this.esFav = esFav;
}
@Override
public void comentarioListo() {
view.comentarioListo();
}
public void checkearUsuarioCalificoBar() {
interaccion.checkearUsuarioCalificoBar();
}
@Override
public void yaCalificoEsteBar() {
view.yaCalificoElBar();
}
@Override
public void cargaDeComentarios() {
view.cargaDeComentarios();
}
@Override
public void finCargaDeComentarios() {
view.finCargaDeComentarios();
}
@Override
public void setPuntos(Integer puntos) {
view.finCargando();
view.setPuntos(puntos);
}
@Override
public void onImageLoaded(String path) {
view.onImageLoaded(path);
}
public Intent enviarBar(Intent i) {
return i.putExtra("bar", interaccion.getBar());
}
public ArrayList<Comentario> getComentarios() {
return interaccion.getComentarios();
}
public void cargarComentarios() {
interaccion.cargarComentarios();
}
public Bar getBar() {
return interaccion.getBar();
}
public void cargarPuntosEnElBar() {
view.cargando();
interaccion.cargarPuntosEnElBar();
}
public void cargarImagenes() {
interaccion.cargarImagenes();
}
public String getDescripcion() {
return interaccion.getDescripcion();
}
public String getUbicacionDeBar() {
return interaccion.getUbicacionDeBar();
}
public String getTelefonoDeBar() {
return interaccion.getTelefonoDeBar();
}
public boolean esBarConOwner() {
return interaccion.esBarConOwner();
}
public void visitar() {
interaccion.visitar();
}
}
<file_sep>package com.eriochrome.bartime.modelos;
import android.net.Uri;
import androidx.annotation.NonNull;
import com.eriochrome.bartime.contracts.BarControlContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.utils.Utils;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
public class BarControlInteraccion implements BarControlContract.Interaccion {
private FirebaseUser userAuth;
private DatabaseReference refGlobal;
private BarControlContract.CompleteListener listener;
private Bar bar;
private StorageReference storageReference;
private ValueEventListener valueEventListenerAvisos = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
if (dataSnapshot.hasChildren()) listener.hayAvisos();
else listener.noHayAvisos();
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
};
public BarControlInteraccion(BarControlContract.CompleteListener listener) {
this.listener = listener;
storageReference = FirebaseStorage.getInstance().getReference();
userAuth = FirebaseAuth.getInstance().getCurrentUser();
refGlobal = FirebaseDatabase.getInstance().getReference();
}
@Override
public void setupBar() {
listener.onStart();
refGlobal.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
DataSnapshot barAsociadoPath = dataSnapshot.child("usuariosBar").child(userAuth.getUid()).child("barAsociado");
if (barAsociadoPath.exists()) {
String nombreBarAsociado = barAsociadoPath.getValue(String.class);
bar = dataSnapshot.child("bares").child(nombreBarAsociado).getValue(Bar.class);
listener.onComplete(bar);
} else {
listener.onComplete(null);
}
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
@Override
public String getNombreUsuario() {
return userAuth.getDisplayName();
}
@Override
public Bar getBar() {
return bar;
}
@Override
public void subirFoto(Uri path) {
listener.onStart();
String strNumeroDeFoto = "_" + Integer.toString(bar.getCantidadDeFotos() + 1);
String nombreBar = bar.getNombre().replaceAll(" ", "_");
String caminoEnStorage = nombreBar + strNumeroDeFoto + ".jpg";
StorageReference imagenRef = storageReference.child("imagenes").child(caminoEnStorage);
UploadTask uploadTask = imagenRef.putFile(path);
uploadTask.addOnSuccessListener(taskSnapshot -> {
bar.aumentarCantidadDeFotos();
refGlobal.child("bares").child(bar.getNombre()).child("cantidadDeFotos").setValue(bar.getCantidadDeFotos());
listener.onComplete(bar);
});
}
@Override
public void checkearAvisos() {
if (bar != null) {
refGlobal.child("avisos").child(bar.getNombre())
.addValueEventListener(valueEventListenerAvisos);
}
}
@Override
public void dejarDeCheckearAvisos() {
if (bar != null) {
refGlobal.child("avisos").child(bar.getNombre())
.removeEventListener(valueEventListenerAvisos);
}
}
@Override
public void cargarImagenes() {
for (int i = 0; i < bar.getCantidadDeFotos(); i++) {
String nombreBar = bar.getNombre().replaceAll(" ", "_");
String path = nombreBar + Utils.getNumeroDeFoto(i) + ".jpg";
storageReference.child("imagenes").child(path).getDownloadUrl().addOnSuccessListener(uri -> {
listener.onImageLoaded(uri.toString());
});
}
}
@Override
public String getDescripcion() {
return bar.getDescripcion();
}
@Override
public String getNombreBar() {
return bar.getNombre();
}
}<file_sep>package com.eriochrome.bartime.vistas;
import android.app.DatePickerDialog;
import android.os.Bundle;
import android.widget.Button;
import android.widget.DatePicker;
import android.widget.EditText;
import androidx.appcompat.app.AppCompatActivity;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.CrearSorteoContract;
import com.eriochrome.bartime.presenters.CrearSorteoPresenter;
import com.eriochrome.bartime.utils.DateFormatter;
import java.util.Calendar;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class CrearSorteoActivity extends AppCompatActivity implements
CrearSorteoContract.View,
DatePickerDialog.OnDateSetListener {
private CrearSorteoPresenter presenter;
private Button fecha;
private EditText puntos;
private Button continuar;
private final Calendar calendar = Calendar.getInstance();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_crear_sorteo);
presenter = new CrearSorteoPresenter();
presenter.bind(this);
presenter.obtenerBar(getIntent());
fecha = findViewById(R.id.fecha);
puntos = findViewById(R.id.puntos);
continuar = findViewById(R.id.continuar);
fecha.setOnClickListener(v -> {
DatePickerDialog datePickerDialog = new DatePickerDialog(
this, CrearSorteoActivity.this,
calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH),
calendar.get(Calendar.DAY_OF_MONTH));
datePickerDialog.show();
});
continuar.setOnClickListener(v -> {
if (datosCompletos()) {
presenter.enviarSorteo();
}
});
}
private boolean datosCompletos() {
boolean completos = true;
if (fecha.getText().toString().equals(getString(R.string.placeholder_fecha))) {
completos = false;
toastShort(this, getString(R.string.debes_elegir_fecha));
}
if (puntos.getText().toString().equals("")) {
completos = false;
toastShort(this, getString(R.string.debes_recompensa_ganador));
}
return completos;
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void onDateSet(DatePicker view, int year, int month, int dayOfMonth) {
String fechaText = DateFormatter.toString(dayOfMonth, month, year);
fecha.setText(fechaText);
}
@Override
public String getFechaFin() {
return fecha.getText().toString();
}
@Override
public String getPuntos() {
return puntos.getText().toString();
}
@Override
public void enviado() {
toastShort(this, getString(R.string.sorteo_enviado_exito));
finish();
}
}
<file_sep>package com.eriochrome.bartime.modelos.entidades;
import com.eriochrome.bartime.utils.Utils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
//import com.google.android.gms.maps.model.LatLng;
public class Bar implements Serializable {
private String nombre;
private String descripcion;
private String ubicacion;
private double lat;
private double lng;
private float estrellas;
private long calificacionesAcumuladas;
private int numeroDeCalificaciones;
private HashMap<String, Integer> horariosInicial;
private HashMap<String, Integer> horariosFinal;
private HashMap<String, Integer> happyhourInicial;
private HashMap<String, Integer> happyhourFinal;
private ArrayList<String> metodosDePago;
private int cantidadDeFotos;
private String owner;
private String telefono;
//Para estadisticas
private int visitas;
private int cantidadFavoritos;
private int cantidadItemsVendidos;
private int cantidadParticipantesJuegos;
//Requerido por la base de datos.
public Bar() {
}
public Bar(String nombre) {
this.nombre = nombre;
descripcion = "";
estrellas = 0;
calificacionesAcumuladas = 0;
numeroDeCalificaciones = 0;
ubicacion = "";
lat = 0; lng = 0;
horariosInicial = inicializarHorarios();
horariosFinal = inicializarHorarios();
happyhourInicial = inicializarHorarios();
happyhourFinal = inicializarHorarios();
metodosDePago = new ArrayList<>();
cantidadDeFotos = 1;
owner = "";
}
public String getNombre() {
return nombre;
}
public String getDescripcion() {
return descripcion;
}
public String getUbicacion() {
return ubicacion;
}
public float getEstrellas() {
return estrellas;
}
public long getCalificacionesAcumuladas() {
return calificacionesAcumuladas;
}
public int getNumeroDeCalificaciones() {
return numeroDeCalificaciones;
}
public HashMap<String, Integer> getHorariosInicial() {
return horariosInicial;
}
public HashMap<String, Integer> getHorariosFinal() {
return horariosFinal;
}
public HashMap<String, Integer> getHappyhourInicial() {
return happyhourInicial;
}
public HashMap<String, Integer> getHappyhourFinal() {
return happyhourFinal;
}
public ArrayList<String> getMetodosDePago() {
return metodosDePago;
}
public double getLat() {
return lat;
}
public double getLng() {
return lng;
}
public int getCantidadDeFotos() {
return cantidadDeFotos;
}
public String getOwner() {
return owner;
}
public String getTelefono() {
return telefono;
}
public int getVisitas() {
return visitas;
}
public int getCantidadFavoritos() {
return cantidadFavoritos;
}
public int getCantidadItemsVendidos() {
return cantidadItemsVendidos;
}
public int getCantidadParticipantesJuegos() {
return cantidadParticipantesJuegos;
}
public void actualizarEstrellas(int calificacion) {
calificacionesAcumuladas += calificacion;
numeroDeCalificaciones++;
estrellas = (float)calificacionesAcumuladas / numeroDeCalificaciones;
}
public void agregarHorarios(HashMap<String, Integer> horariosInicial, HashMap<String, Integer> horariosFinal) {
this.horariosInicial = horariosInicial;
this.horariosFinal = horariosFinal;
}
public void agregarHappyhourHorarios(HashMap<String, Integer> happyhourInicial, HashMap<String, Integer> happyhourFinal) {
this.happyhourInicial = happyhourInicial;
this.happyhourFinal = happyhourFinal;
}
public void agregarMetodosDePago(ArrayList<String> metodosDePago) {
this.metodosDePago = metodosDePago;
}
private boolean estaAbierto() {
Calendar ahora = Calendar.getInstance();
ahora.setTime(new Date());
String diaDeHoy = Utils.getStringDiaDeSemana(ahora);
return Utils.estaEntreHoras(horariosInicial.get(diaDeHoy), horariosFinal.get(diaDeHoy), ahora);
}
private boolean hayHappyHour() {
Calendar ahora = Calendar.getInstance();
ahora.setTime(new Date());
String diaDeHoy = Utils.getStringDiaDeSemana(ahora);
return Utils.estaEntreHoras(happyhourInicial.get(diaDeHoy), happyhourFinal.get(diaDeHoy), ahora);
}
private HashMap<String, Integer> inicializarHorarios() {
HashMap<String, Integer> devolver = new HashMap<>();
devolver.put("Domingo", 0);
devolver.put("Lunes", 0);
devolver.put("Martes", 0);
devolver.put("Miercoles", 0);
devolver.put("Jueves", 0);
devolver.put("Viernes", 0);
devolver.put("Sabado", 0);
return devolver;
}
public void setUbicacion(String direccion) {
this.ubicacion = direccion;
}
public void setLatLng(double lat, double lng) {
this.lat = lat;
this.lng = lng;
}
public boolean contieneFiltros(Filtro filtro) {
boolean contiene = true;
if (filtro.filtroAbierto()) {
if (!estaAbierto()) contiene = false;
}
if (filtro.filtroAbierto() && filtro.filtroHappyHour()) {
if (!hayHappyHour()) contiene = false;
}
if (filtro.filtroPagoEfectivo()) {
if(metodosDePago == null || !metodosDePago.contains("efectivo")) contiene = false;
}
if (filtro.filtroPagoCredito()) {
if(metodosDePago == null || !metodosDePago.contains("tarjeta de credito")) contiene = false;
}
if (filtro.filtroPagoDebito()) {
if(metodosDePago == null || !metodosDePago.contains("tarjeta de debito")) contiene = false;
}
return contiene;
}
public void aumentarCantidadDeFotos() {
cantidadDeFotos++;
}
public void setNombre(String nombreBar) {
nombre = nombreBar;
}
public void setDescripcion(String desc) {
descripcion = desc;
}
public void reclamar(Bar bar) {
estrellas = bar.getEstrellas();
numeroDeCalificaciones = bar.getNumeroDeCalificaciones();
calificacionesAcumuladas = bar.getCalificacionesAcumuladas();
}
public void setOwner(String displayName) {
owner = displayName;
}
public void setTelefono(String telefono) {
this.telefono = telefono;
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.graphics.Typeface;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RatingBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.fragment.app.DialogFragment;
import com.daimajia.slider.library.SliderLayout;
import com.daimajia.slider.library.SliderTypes.BaseSliderView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.PaginaBarContract;
import com.eriochrome.bartime.modelos.entidades.Comentario;
import com.eriochrome.bartime.presenters.PaginaBarPresenter;
import com.eriochrome.bartime.utils.MySliderView;
import com.eriochrome.bartime.vistas.dialogs.DialogComentario;
import com.eriochrome.bartime.vistas.dialogs.DialogCrearCuenta;
import com.eriochrome.bartime.vistas.dialogs.DialogMostrarHorarios;
import com.firebase.ui.auth.AuthMethodPickerLayout;
import com.firebase.ui.auth.AuthUI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class PaginaBarActivity extends AppCompatActivity implements PaginaBarContract.View, DialogComentario.ComentarioListener, DialogCrearCuenta.Listener {
/**
* TODO: el bug de las fotos es porque las cargo asincronicamente. Buscar la forma de sincronizar
* Posibilidad: Esperar a que carguen todas (puede ser lento)
*/
private static final int RC_SIGN_IN = 1;
private static final int TAG_NO_COMENTARIOS = 0;
private RelativeLayout paginaBarRl;
private TextView nombreBar;
private TextView descripcion;
private TextView ubicacion;
private TextView telefono;
private TextView verHorarios;
private Button calificarBar;
private ImageButton favorito;
private Button verMas;
private TextView puntosText;
private Button tienda;
private Button juegos;
private LinearLayout cajaComentarios;
private SliderLayout sliderShow;
private ProgressBar progressBar;
private ImageButton volver;
private PaginaBarPresenter presenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pagina_bar);
presenter = new PaginaBarPresenter();
presenter.bind(this);
presenter.obtenerBar(getIntent());
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
volver = findViewById(R.id.volver);
volver.setOnClickListener(v -> finish());
paginaBarRl = findViewById(R.id.pagina_bar_rl);
nombreBar = findViewById(R.id.nombre_bar);
descripcion = findViewById(R.id.descripcion);
ubicacion = findViewById(R.id.ubicacion);
telefono = findViewById(R.id.telefono);
verHorarios = findViewById(R.id.horarios);
calificarBar = findViewById(R.id.calificarBar);
favorito = findViewById(R.id.favorito);
verMas = findViewById(R.id.ver_mas);
puntosText = findViewById(R.id.puntos_text);
tienda = findViewById(R.id.tienda);
juegos = findViewById(R.id.juegos);
cajaComentarios = findViewById(R.id.caja_comentarios);
sliderShow = findViewById(R.id.slider);
nombreBar.setText(presenter.getNombreDeBar());
setupDescripcion();
ubicacion.setText(presenter.getUbicacionDeBar());
telefono.setText(presenter.getTelefonoDeBar());
puntosText.setVisibility(View.INVISIBLE);
setupListeners();
presenter.cargarComentarios();
presenter.cargarImagenes();
presenter.visitar();
}
private void setupDescripcion() {
Typeface tf = Typeface.createFromAsset(getAssets(),"fonts/Lato-Light.ttf");
descripcion.setTypeface(tf);
String desc = presenter.getDescripcion();
if (!desc.equals(""))
descripcion.setText(desc);
else
descripcion.setText(getString(R.string.aun_sin_descripcion));
}
@Override
protected void onStart() {
super.onStart();
if (!presenter.esBarConOwner()) {
telefono.setVisibility(View.GONE);
verHorarios.setVisibility(View.GONE);
}
}
@Override
protected void onResume() {
super.onResume();
if (presenter.hayUsuarioConectado()) {
presenter.cargarPuntosEnElBar();
presenter.checkeoFavorito();
presenter.checkearUsuarioCalificoBar();
}
}
@Override
public void agregadoAFavoritos() {
favorito.setImageResource(R.drawable.ic_favorite_24dp);
}
@Override
public void quitadoDeFavoritos() {
favorito.setImageResource(R.drawable.ic_favorite_border_violet_24dp);
}
@Override
public void cargando() {
progressBar.setVisibility(View.VISIBLE);
paginaBarRl.setVisibility(View.GONE);
}
@Override
public void finCargando() {
progressBar.setVisibility(View.GONE);
paginaBarRl.setVisibility(View.VISIBLE);
}
private void setupListeners() {
calificarBar.setOnClickListener(view -> {
if (presenter.hayUsuarioConectado()) {
DialogFragment comentarioDialog = new DialogComentario();
comentarioDialog.show(getSupportFragmentManager(), "comentario");
} else {
DialogCrearCuenta crearCuentaDialog = new DialogCrearCuenta();
crearCuentaDialog.setTexto(getString(R.string.necesitas_cuenta_calificar));
crearCuentaDialog.show(getFragmentManager(), "crearCuentaDialog");
}
});
favorito.setOnClickListener(v -> {
if (presenter.hayUsuarioConectado()) {
if (presenter.esFavorito()) {
presenter.quitarDeFavoritos();
} else {
presenter.agregarAFavoritos();
}
} else {
DialogCrearCuenta crearCuentaDialog = new DialogCrearCuenta();
crearCuentaDialog.setTexto(getString(R.string.necesitas_cuenta_favoritos));
crearCuentaDialog.show(getFragmentManager(), "crearCuentaDialog");
}
});
verMas.setOnClickListener(v -> {
Intent i = new Intent(PaginaBarActivity.this, ComentariosActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
ubicacion.setOnClickListener(v -> {
Intent i = new Intent(PaginaBarActivity.this, VerMapaActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
verHorarios.setOnClickListener(v -> {
DialogMostrarHorarios dialogMostrarHorarios = new DialogMostrarHorarios();
dialogMostrarHorarios.setHorarios(presenter.getBar());
dialogMostrarHorarios.show(getSupportFragmentManager(), "mostrarHorarios");
});
tienda.setOnClickListener(v -> {
if (presenter.hayUsuarioConectado()) {
Intent i = new Intent(PaginaBarActivity.this, TiendaActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
} else {
DialogCrearCuenta dialogCrearCuenta = new DialogCrearCuenta();
dialogCrearCuenta.setTexto(getString(R.string.necesitas_cuenta_tienda));
dialogCrearCuenta.show(getFragmentManager(), "crearCuentaDialog");
}
});
juegos.setOnClickListener(v -> {
Intent i = new Intent(PaginaBarActivity.this, JuegosDelBarActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
}
@Override
public void login() {
AuthMethodPickerLayout customLayout = new AuthMethodPickerLayout
.Builder(R.layout.custom_login_ui)
.setGoogleButtonId(R.id.google_login)
.setEmailButtonId(R.id.normal_login)
.build();
List<AuthUI.IdpConfig> providers = Arrays.asList(
new AuthUI.IdpConfig.EmailBuilder().build(),
new AuthUI.IdpConfig.GoogleBuilder().build());
startActivityForResult(
AuthUI.getInstance()
.createSignInIntentBuilder()
.setAuthMethodPickerLayout(customLayout)
.setAvailableProviders(providers)
.setTheme(R.style.AppTheme)
.setLogo(R.drawable.bar_tap_2)
.build(),
RC_SIGN_IN);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RC_SIGN_IN) {
if (resultCode == RESULT_OK) {
overridePendingTransition(0,0);
finish();
}
}
}
@Override
public void enviarComentario(Comentario comentario) {
presenter.enviarComentario(comentario);
toastShort(this, getString(R.string.enviando));
}
@Override
public void comentarioListo() {
toastShort(this, getString(R.string.comentario_enviado));
calificarBar.setVisibility(View.GONE);
}
@Override
public void yaCalificoElBar() {
calificarBar.setVisibility(View.GONE);
}
@Override
public void cargaDeComentarios() {
cajaComentarios.removeAllViews();
}
@Override
public void finCargaDeComentarios() {
ArrayList<Comentario> listaComentarios = presenter.getComentarios();
int cantidadDeComentarios = listaComentarios.size();
if (cantidadDeComentarios == 0) {
View sinComentariosView = View.inflate(this, R.layout.item_no_hay_comentarios, null);
sinComentariosView.setTag(TAG_NO_COMENTARIOS);
cajaComentarios.addView(sinComentariosView);
} else {
int i = 0;
while (i < cantidadDeComentarios && i < 3) {
View comentarioView = View.inflate(this, R.layout.item_comentario, null);
ponerValoresAComentario(comentarioView, listaComentarios.get(i));
comentarioView.setTag(i);
cajaComentarios.addView(comentarioView);
i++;
}
}
}
private void ponerValoresAComentario(View view, Comentario comentario) {
TextView nombreUsuario = view.findViewById(R.id.nombre_usuario);
nombreUsuario.setText(comentario.getComentador());
RatingBar ratingBar = view.findViewById(R.id.rating_bar);
ratingBar.setRating(comentario.getEstrellas());
TextView comentarioTexto = view.findViewById(R.id.comentario);
comentarioTexto.setText(comentario.getComentarioText());
}
@Override
public void setPuntos(Integer puntos) {
if (puntos != 0) {
String texto = "(" + puntos + " puntos)";
puntosText.setText(texto);
puntosText.setVisibility(View.VISIBLE);
}
}
@Override
public void onImageLoaded(String path) {
MySliderView sliderView = new MySliderView(this);
sliderView.image(path)
.setScaleType(BaseSliderView.ScaleType.CenterInside);
sliderShow.addSlider(sliderView);
}
@Override
protected void onStop() {
sliderShow.stopAutoCycle();
super.onStop();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.DialogFragment;
import androidx.appcompat.app.AlertDialog;
import android.widget.EditText;
import android.widget.RatingBar;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Comentario;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class DialogComentario extends DialogFragment {
public interface ComentarioListener {
void enviarComentario(Comentario comentario);
}
ComentarioListener listener;
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (ComentarioListener) context;
} catch (ClassCastException e) {
toastShort(context, "No se implemento la interfaz");
}
}
@NonNull
@Override
public Dialog onCreateDialog(@Nullable Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setView(R.layout.dialog_comentario);
builder.setTitle("Calificar al Bar");
builder.setPositiveButton(R.string.listo, (dialog, which) -> {
Comentario comentario = crearComentario();
listener.enviarComentario(comentario);
dismiss();
});
return builder.create();
}
private Comentario crearComentario() {
Comentario comentario = new Comentario();
RatingBar ratingBar = ((AlertDialog)getDialog()).findViewById(R.id.ratingBar);
EditText comentarioText = ((AlertDialog)getDialog()).findViewById(R.id.comentario);
comentario.setEstrellas((int) ratingBar.getRating());
comentario.setComentarioText(comentarioText.getText().toString());
return comentario;
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.CrearSorteoContract;
import com.eriochrome.bartime.modelos.CrearSorteoInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
public class CrearSorteoPresenter implements CrearSorteoContract.Listener {
private CrearSorteoContract.Interaccion interaccion;
private CrearSorteoContract.View view;
public CrearSorteoPresenter() {
interaccion = new CrearSorteoInteraccion(this);
}
public void bind(CrearSorteoContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void enviarSorteo() {
String fechaFin = view.getFechaFin();
String puntos = view.getPuntos();
interaccion.enviarSorteo(fechaFin, puntos);
}
@Override
public void enviado() {
view.enviado();
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.eriochrome.bartime.contracts.TiendaContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.ComprobanteDeCompra;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import com.eriochrome.bartime.utils.CreadorDeAvisos;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.MutableData;
import com.google.firebase.database.Transaction;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
public class TiendaInteraccion implements TiendaContract.Interaccion {
private Bar bar;
private DatabaseReference ref;
private FirebaseUser authUser;
private ArrayList<ItemTienda> itemsTienda;
private TiendaContract.Listener listener;
private int misPuntos;
private ValueEventListener valueEventListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
itemsTienda.clear();
DataSnapshot snapItems = dataSnapshot.child("tiendas").child(bar.getNombre());
for (DataSnapshot ds : snapItems.getChildren()) {
ItemTienda itemTienda = ds.getValue(ItemTienda.class);
itemsTienda.add(itemTienda);
}
DataSnapshot snapPuntos = dataSnapshot.child("puntos").child(authUser.getDisplayName()).child(bar.getNombre());
Integer puntos;
if (snapPuntos.exists()) {
puntos = snapPuntos.getValue(Integer.class);
} else {
puntos = 0;
}
listener.listo(itemsTienda, puntos);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
};
public TiendaInteraccion(TiendaContract.Listener listener) {
this.listener = listener;
ref = FirebaseDatabase.getInstance().getReference();
authUser = FirebaseAuth.getInstance().getCurrentUser();
itemsTienda = new ArrayList<>();
}
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
@Override
public void setupTienda() {
ref.addValueEventListener(valueEventListener);
}
@Override
public void guardarPuntos(Integer misPuntos) {
this.misPuntos = misPuntos;
}
@Override
public int getPuntos() {
return misPuntos;
}
@Override
public void comprarItem(ItemTienda itemTienda) {
contabilizarCompra();
misPuntos -= itemTienda.getCosto();
ref.child("puntos").child(authUser.getDisplayName()).child(bar.getNombre()).setValue(misPuntos);
ComprobanteDeCompra comprobante = new ComprobanteDeCompra(itemTienda, bar.getNombre(), authUser.getDisplayName());
ref.child("comprobantesDeCompra").child(authUser.getDisplayName()).child(bar.getNombre())
.child(String.valueOf(comprobante.getNroComprobante())).setValue(comprobante);
CreadorDeAvisos creadorDeAvisos = new CreadorDeAvisos();
creadorDeAvisos.avisarCompraDeDescuento(itemTienda, authUser.getDisplayName(), bar);
}
private void contabilizarCompra() {
//Para estadistica
ref.child("bares").child(bar.getNombre()).child("cantidadItemsVendidos").runTransaction(new Transaction.Handler() {
@NonNull
@Override
public Transaction.Result doTransaction(@NonNull MutableData mutableData) {
Integer valorActual = mutableData.getValue(Integer.class);
if (valorActual == null) {
mutableData.setValue(1);
}
else {
mutableData.setValue(valorActual + 1);
}
return Transaction.success(mutableData);
}
@Override
public void onComplete(@Nullable DatabaseError databaseError, boolean b, @Nullable DataSnapshot dataSnapshot) {}
});
}
@Override
public void dejarDeEscucharCambios() {
ref.removeEventListener(valueEventListener);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.eriochrome.bartime.contracts.ListadosContract;
import com.eriochrome.bartime.modelos.entidades.Sorteo;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.MutableData;
import com.google.firebase.database.Transaction;
import com.google.firebase.database.ValueEventListener;
public class ListadosInteraccion implements ListadosContract.Interaccion {
private final ListadosContract.CompleteListener listener;
private FirebaseAuth auth;
private DatabaseReference refGlobal;
private DatabaseReference refUsuarios;
private ValueEventListener valueEventListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
if (dataSnapshot.hasChildren()) listener.hayAvisos();
else listener.noHayAvisos();
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
};
public ListadosInteraccion(ListadosContract.CompleteListener listener) {
this.listener = listener;
auth = FirebaseAuth.getInstance();
refGlobal = FirebaseDatabase.getInstance().getReference();
refUsuarios = refGlobal.child("usuarios");
}
@Override
public boolean estaConectado() {
return auth.getCurrentUser() != null;
}
@Override
public void checkearAvisos() {
refGlobal.child("avisos").child(auth.getCurrentUser().getDisplayName())
.addValueEventListener(valueEventListener);
}
@Override
public void dejarDeCheckearAvisos() {
refGlobal.child("avisos").child(auth.getCurrentUser().getDisplayName())
.removeEventListener(valueEventListener);
}
@Override
public void anotarReferrer(String referrerUid, String gameID) {
refUsuarios.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
String nombre = dataSnapshot.child(referrerUid).child("nombre").getValue(String.class);
if ((auth.getCurrentUser() != null) && (!nombre.equals(auth.getCurrentUser().getDisplayName()))) {
listener.anotarConNombre(nombre, gameID);
}
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
});
}
@Override
public void anotarConNombre(String nombre, String gameID) {
refGlobal.child("invitadosSorteo").child(gameID).child(nombre)
.runTransaction(new Transaction.Handler() {
@NonNull
@Override
public Transaction.Result doTransaction(@NonNull MutableData mutableData) {
Integer valorActual = mutableData.getValue(Integer.class);
if (valorActual == null) {
mutableData.setValue(1);
} else {
mutableData.setValue(valorActual + 1);
}
return Transaction.success(mutableData);
}
@Override
public void onComplete(@Nullable DatabaseError databaseError, boolean b, @Nullable DataSnapshot dataSnapshot) {}
});
}
@Override
public void obtenerSorteoConId(String gameID) {
refGlobal.child("juegos").child("Sorteo").addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
Sorteo sorteo = dataSnapshot.child(gameID).getValue(Sorteo.class);
listener.abrirSorteo(sorteo);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {}
});
}
@Override
public void subirUsuarioADatabase() {
FirebaseUser user = auth.getCurrentUser();
if (user != null) {
String uid = user.getUid();
refUsuarios.child(uid).child("nombre").setValue(user.getDisplayName());
}
}
@Override
public String getNombreUsuario() {
FirebaseUser user = auth.getCurrentUser();
if (user != null) {
return user.getDisplayName();
} else {
return "Invitado";
}
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Context;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ProgressBar;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.adapters.EspacioVerticalDecorator;
import com.eriochrome.bartime.adapters.ListaBaresFavoritosAdapter;
import com.eriochrome.bartime.contracts.FavoritosFragmentContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.presenters.FavoritosFragmentPresenter;
import java.util.ArrayList;
import java.util.Objects;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class ListadoFavoritosFragment extends Fragment implements FavoritosFragmentContract.View {
private EditText buscar;
private RecyclerView baresRecyclerView;
private ListaBaresFavoritosAdapter baresAdapter;
private ProgressBar loading;
private FavoritosFragmentPresenter presenter;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_favoritos, container, false);
presenter = new FavoritosFragmentPresenter();
presenter.bind(this);
buscar = view.findViewById(R.id.buscar);
buscar.setOnEditorActionListener((textView, actionId, keyEvent) -> {
if(actionId == EditorInfo.IME_ACTION_DONE) {
presenter.buscar(buscar.getText().toString());
ocultarTeclado();
return true;
}
return false;
});
loading = view.findViewById(R.id.progressBar);
loading.setVisibility(View.GONE);
baresAdapter = new ListaBaresFavoritosAdapter(getActivity());
baresRecyclerView = view.findViewById(R.id.recycler_view);
baresRecyclerView.setHasFixedSize(true);
setupRecyclerView();
baresRecyclerView.setAdapter(baresAdapter);
return view;
}
@Override
public void onResume() {
super.onResume();
presenter.mostrarFavoritos();
}
@Override
public void cargando() {
loading.setVisibility(View.VISIBLE);
baresAdapter.clear();
}
@Override
public void finCargando(ArrayList<Bar> listaBares) {
if (listaBares.size() == 0) {
toastShort(getActivity(), getString(R.string.no_hay_resultados));
}
loading.setVisibility(View.GONE);
baresAdapter.setItems(listaBares);
}
private void setupRecyclerView() {
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(getActivity());
int espacioVertical = 30;
EspacioVerticalDecorator espacioVerticalDecorator = new EspacioVerticalDecorator(espacioVertical);
baresRecyclerView.setLayoutManager(layoutManager);
baresRecyclerView.addItemDecoration(espacioVerticalDecorator);
}
private void ocultarTeclado(){
InputMethodManager imm = (InputMethodManager) Objects.requireNonNull(getActivity())
.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
imm.toggleSoftInput(InputMethodManager.HIDE_IMPLICIT_ONLY, 0);
}
}
@Override
public void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.CrearTriviaContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Trivia;
public class CrearTriviaInteraccion implements CrearTriviaContract.Interaccion {
private Bar bar;
private Trivia trivia;
public CrearTriviaInteraccion() {
trivia = new Trivia();
}
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
@Override
public Bar getBar() {
return bar;
}
@Override
public Trivia getTrivia() {
return trivia;
}
@Override
public void comenzarCreacionTrivia(String titulo, int cantPreguntas) {
trivia.setTitulo(titulo);
trivia.setCantPreguntas(cantPreguntas);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.PaginaTriviaContract;
import com.eriochrome.bartime.modelos.entidades.Juego;
import com.eriochrome.bartime.modelos.entidades.Trivia;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import androidx.annotation.NonNull;
public class PaginaTriviaInteraccion implements PaginaTriviaContract.Interaccion {
private Trivia trivia;
private PaginaTriviaContract.Listener listener;
private DatabaseReference ref;
public PaginaTriviaInteraccion(PaginaTriviaContract.Listener listener) {
this.listener = listener;
ref = FirebaseDatabase.getInstance().getReference().child("juegos").child("Trivia");
}
@Override
public void setTrivia(Juego juego) {
this.trivia = (Trivia) juego;
}
@Override
public void cargarDatosParticipantes() {
ref.child(trivia.getID()).addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
int cantParticipantes = (int) dataSnapshot.child("participantes").getChildrenCount();
int cantGanadores;
try {
cantGanadores = dataSnapshot.child("cantGanadores").getValue(Integer.class);
} catch (NullPointerException e) {
cantGanadores = 0;
}
listener.onComplete(cantParticipantes, cantGanadores);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
});
}
@Override
public String getTipoDeJuego() {
return trivia.getTipoDeJuego();
}
@Override
public String getResumen() {
return trivia.getTextoResumen();
}
}<file_sep>package com.eriochrome.bartime.utils;
import android.app.Activity;
import android.app.TimePickerDialog;
import android.content.Context;
import android.text.InputType;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
public class TimePicker implements TimePickerDialog.OnTimeSetListener, View.OnFocusChangeListener, View.OnClickListener {
private Context context;
private EditText horarioEditText;
public TimePicker(EditText horarioEditText, Context context) {
this.context = context;
this.horarioEditText = horarioEditText;
this.horarioEditText.setOnFocusChangeListener(this);
this.horarioEditText.setOnClickListener(this);
this.horarioEditText.setInputType(InputType.TYPE_NULL);
}
@Override
public void onTimeSet(android.widget.TimePicker view, int hourOfDay, int minute) {
this.horarioEditText.setText(String.valueOf(hourOfDay));
this.horarioEditText.setTextAlignment(View.TEXT_ALIGNMENT_CENTER);
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Activity.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(v.getWindowToken(), 0);
if (hasFocus) {
new TimePickerDialog(context, this, 0, 0, true).show();
}
}
@Override
public void onClick(View v) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Activity.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(v.getWindowToken(), 0);
new TimePickerDialog(context, this, 0, 0, true).show();
}
}
<file_sep># BarTap
Android App. Social Network for bars and nightclubs, game oriented.
<file_sep>package com.eriochrome.bartime.utils;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.google.android.gms.maps.model.LatLng;
import com.google.maps.android.SphericalUtil;
import java.util.Comparator;
public class ComparadorBaresDistancia implements Comparator<Bar> {
private LatLng latLngUsuario;
public ComparadorBaresDistancia(LatLng latLngUsuario) {
this.latLngUsuario = latLngUsuario;
if (latLngUsuario == null) {
this.latLngUsuario = new LatLng(-34.603722, -58.381592); //Buenos Aires por defecto
}
}
@Override
public int compare(Bar o1, Bar o2) {
double dist1 = SphericalUtil.computeDistanceBetween(latLngUsuario, new LatLng(o1.getLat(), o1.getLng()));
double dist2 = SphericalUtil.computeDistanceBetween(latLngUsuario, new LatLng(o2.getLat(), o2.getLng()));
if (dist1 < dist2) return -1;
else if (dist1 > dist2) return 1;
return 0;
}
}<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.view.GravityCompat;
import androidx.drawerlayout.widget.DrawerLayout;
import com.daimajia.slider.library.SliderLayout;
import com.daimajia.slider.library.SliderTypes.BaseSliderView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.BarControlContract;
import com.eriochrome.bartime.presenters.BarControlPresenter;
import com.eriochrome.bartime.utils.MySliderView;
import com.firebase.ui.auth.AuthUI;
import com.google.android.material.navigation.NavigationView;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class BarControlActivity extends AppCompatActivity implements BarControlContract.View {
private static final int NUMERO_SOLICITUD_GALERIA = 1;
private BarControlPresenter presenter;
private DrawerLayout drawerLayout;
private ImageButton drawerButton;
private NavigationView navigationView;
private ProgressBar loading;
private RelativeLayout sinBarRl;
private Button sinBarButton;
private RelativeLayout barControlRl;
private SliderLayout sliderShow;
private TextView nombreBar;
private TextView descripcion;
private Button editarBar;
private Button juegos;
private Button miTienda;
private Button agregarFotos;
private ImageButton avisos;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_bar_control);
checkPrimeraVez();
presenter = new BarControlPresenter();
presenter.bind(this);
drawerLayout = findViewById(R.id.drawer_layout);
drawerButton = findViewById(R.id.drawer_button);
navigationView = findViewById(R.id.nav_drawer);
loading = findViewById(R.id.progressBar);
sinBarRl = findViewById(R.id.sin_bar_rl);
sinBarButton = findViewById(R.id.sin_bar_btn);
barControlRl = findViewById(R.id.bar_control_rl);
sliderShow = findViewById(R.id.slider);
nombreBar = findViewById(R.id.nombre_bar);
descripcion = findViewById(R.id.descripcion);
editarBar = findViewById(R.id.editar_bar);
juegos = findViewById(R.id.juegos);
miTienda = findViewById(R.id.mi_tienda);
agregarFotos = findViewById(R.id.agregar_fotos);
avisos = findViewById(R.id.avisos);
setupListeners();
}
private void checkPrimeraVez() {
Thread t = new Thread(() -> {
// Initialize SharedPreferences
SharedPreferences getPrefs = PreferenceManager
.getDefaultSharedPreferences(getBaseContext());
// Create a new boolean and preference and set it to true
boolean isFirstStart = getPrefs.getBoolean("firstStartBar", true);
// If the activity has never started before...
if (isFirstStart) {
// Launch app intro
final Intent i = new Intent(BarControlActivity.this, IntroduccionBarActivity.class);
runOnUiThread(() -> startActivity(i));
// Make a new preferences editor
SharedPreferences.Editor e = getPrefs.edit();
// Edit preference to make it false because we don't want this to run again
e.putBoolean("firstStartBar", false);
// Apply changes
e.apply();
}
});
t.start();
}
@Override
protected void onStart() {
super.onStart();
presenter.setupBar();
}
private void updateUI() {
setupDrawer();
if (presenter.hayBarAsociado()) {
sinBarRl.setVisibility(View.GONE);
barControlRl.setVisibility(View.VISIBLE);
nombreBar.setText(presenter.getNombreBar());
setupDescripcion();
sliderShow.removeAllSliders();
presenter.cargarImagenes();
} else {
sinBarRl.setVisibility(View.VISIBLE);
barControlRl.setVisibility(View.GONE);
}
}
private void setupDescripcion() {
Typeface tf = Typeface.createFromAsset(getAssets(),"fonts/Lato-Light.ttf");
descripcion.setTypeface(tf);
String desc = presenter.getDescripcion();
if (!desc.equals(""))
descripcion.setText(desc);
else
descripcion.setText(getString(R.string.aun_sin_descripcion));
}
private void setupListeners() {
sinBarButton.setOnClickListener(v -> {
startActivity(new Intent(BarControlActivity.this, DatosBarPrincipalActivity.class));
finish();
});
editarBar.setOnClickListener(v -> {
Intent i = new Intent(BarControlActivity.this, DatosBarPrincipalActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
juegos.setOnClickListener(v -> {
Intent i = new Intent(BarControlActivity.this, JuegosGeneralActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
miTienda.setOnClickListener(v -> {
Intent i = new Intent(BarControlActivity.this, TiendaBarActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
});
agregarFotos.setOnClickListener(v -> {
sliderShow.stopAutoCycle();
seleccionarImagenDeGaleria();
});
avisos.setOnClickListener(v ->
startActivity(new Intent(BarControlActivity.this, AvisosBarActivity.class)));
navigationView.setNavigationItemSelectedListener(menuItem -> {
drawerLayout.closeDrawers();
ejecutarOpcionMenu(menuItem.getItemId());
return false; //Devuelve false para que no quede seleccionado
});
drawerButton.setOnClickListener(v -> drawerLayout.openDrawer(GravityCompat.START));
}
private void ejecutarOpcionMenu(int itemId) {
switch (itemId) {
case R.id.contacto:
startActivity(new Intent(this, ContactoActivity.class));
break;
case R.id.mis_ventas:
if (presenter.hayBarAsociado()) {
Intent i = new Intent(this, ComprasBarActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
}
break;
case R.id.comentarios:
if (presenter.hayBarAsociado()) {
Intent i = new Intent(this, ComentariosActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
}
break;
case R.id.cerrar_sesion:
AuthUI.getInstance()
.signOut(this)
.addOnCompleteListener(task -> {
startActivity(new Intent(BarControlActivity.this, DistincionDeUsuarioActivity.class));
finish();
});
break;
case R.id.salir:
finishAndRemoveTask();
break;
}
}
private void setupDrawer() {
View header = navigationView.getHeaderView(0);
TextView usuarioActivo = header.findViewById(R.id.usuario_activo);
usuarioActivo.setText(presenter.getNombreUsuario());
}
@Override
public void cargando() {
sinBarRl.setVisibility(View.GONE);
barControlRl.setVisibility(View.GONE);
loading.setVisibility(View.VISIBLE);
}
@Override
public void finCargando() {
loading.setVisibility(View.GONE);
updateUI();
}
@Override
public void hayAvisos() {
avisos.setImageResource(R.drawable.ic_notifications_active_violet_24dp);
}
@Override
public void noHayAvisos() {
avisos.setImageResource(R.drawable.ic_notifications_none_violet_24dp);
}
@Override
public void onImageLoaded(String path) {
MySliderView sliderView = new MySliderView(this);
sliderView.image(path)
.setScaleType(BaseSliderView.ScaleType.CenterInside);
sliderShow.addSlider(sliderView);
}
private void seleccionarImagenDeGaleria() {
Intent elegirFotoIntent = new Intent(Intent.ACTION_PICK);
elegirFotoIntent.setType("image/*");
startActivityForResult(elegirFotoIntent, NUMERO_SOLICITUD_GALERIA);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == NUMERO_SOLICITUD_GALERIA) {
if (resultCode == RESULT_OK) {
Uri path;
if (data != null) {
path = data.getData();
presenter.subirFoto(path);
} else {
toastShort(BarControlActivity.this, getString(R.string.ocurrio_error_inesperado));
}
}
else {
toastShort(BarControlActivity.this, getString(R.string.no_elegiste_imagen));
}
}
sliderShow.startAutoCycle();
}
@Override
protected void onStop() {
sliderShow.stopAutoCycle();
super.onStop();
}
@Override
protected void onDestroy() {
presenter.unbind();
presenter.dejarDeCheckearAvisos();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.CrearTriviaContract;
import com.eriochrome.bartime.modelos.CrearTriviaInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
public class CrearTriviaPresenter {
private CrearTriviaContract.Interaccion interaccion;
private CrearTriviaContract.View view;
public CrearTriviaPresenter() {
interaccion = new CrearTriviaInteraccion();
}
public void bind(CrearTriviaContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
public Intent enviarBar(Intent i) {
return i.putExtra("bar", interaccion.getBar());
}
public Intent enviarTrivia(Intent i) {
return i.putExtra("trivia", interaccion.getTrivia());
}
public void comenzarCreacionTrivia() {
String titulo = view.getTitulo();
int cantPreguntas = view.getCantPreguntas();
interaccion.comenzarCreacionTrivia(titulo, cantPreguntas);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import androidx.annotation.NonNull;
import com.eriochrome.bartime.contracts.DistincionContract;
import com.eriochrome.bartime.modelos.entidades.UsuarioBar;
import com.eriochrome.bartime.modelos.entidades.UsuarioBarBasico;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
public class DistincionInteraccion implements DistincionContract.Interaccion {
private final DistincionContract.CompleteListener listener;
private FirebaseAuth auth;
private DatabaseReference refUsuariosBar;
public DistincionInteraccion(DistincionContract.CompleteListener listener) {
this.listener = listener;
refUsuariosBar = FirebaseDatabase.getInstance().getReference().child("usuariosBar");
auth = FirebaseAuth.getInstance();
}
@Override
public void subirUsuarioBarADatabase() {
FirebaseUser barAuth = auth.getCurrentUser();
UsuarioBar barUsuario = new UsuarioBarBasico(barAuth.getDisplayName());
refUsuariosBar.child(barAuth.getUid()).setValue(barUsuario);
}
@Override
public void checkearExiste() {
refUsuariosBar.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
boolean existe = dataSnapshot.hasChild(auth.getCurrentUser().getUid());
listener.checkearExiste(existe);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.graphics.Typeface;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.MapaDeBaresContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.presenters.MapaDeBaresPresenter;
import com.eriochrome.bartime.utils.GlideApp;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.libraries.places.api.Places;
import com.google.android.libraries.places.api.model.Place;
import com.google.android.libraries.places.widget.AutocompleteSupportFragment;
import com.google.android.libraries.places.widget.listener.PlaceSelectionListener;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.StorageReference;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Objects;
public class MapaDeBaresActivity extends AppCompatActivity implements
MapaDeBaresContract.View,
OnMapReadyCallback,
GoogleMap.OnMarkerClickListener {
private static final int MIN_ESTRELLAS = 2;
private MapaDeBaresPresenter presenter;
private GoogleMap mMap;
private SupportMapFragment mapFragment;
private RelativeLayout barRL;
private final int DEFAULT_ZOOM = 12;
private final float ACCENT_VIOLET_HUE = 285.15f;
private final LatLng ubicacionDefault = new LatLng(-34.5916106,-58.4496007); //Bsas
private StorageReference storageReference = FirebaseStorage.getInstance().getReference();
private HashMap<Marker, Bar> marcadores;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mapa_de_bares);
marcadores = new HashMap<>();
mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
if (mapFragment != null) {
mapFragment.getMapAsync(this);
}
barRL = findViewById(R.id.bar_rl);
barRL.setVisibility(View.GONE);
presenter = new MapaDeBaresPresenter();
presenter.bind(this);
Places.initialize(getApplicationContext(), getString(R.string.google_maps_key));
Places.createClient(this);
AutocompleteSupportFragment autocompleteFragment = (AutocompleteSupportFragment)
getSupportFragmentManager().findFragmentById(R.id.autocomplete_fragment);
if (autocompleteFragment != null) {
autocompleteFragment.setPlaceFields(Arrays.asList(Place.Field.NAME, Place.Field.LAT_LNG));
autocompleteFragment.setOnPlaceSelectedListener(new PlaceSelectionListener() {
@Override
public void onPlaceSelected(@NonNull Place place) {
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(place.getLatLng(), DEFAULT_ZOOM));
}
@Override
public void onError(@NonNull Status status) {
Log.d("asds", "An error occurred: " + status);
}
});
}
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
mMap.setMinZoomPreference(8.0f);
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(ubicacionDefault, DEFAULT_ZOOM));
presenter.getPosicionesDeBares();
}
//Viene el marker asincronicamente
@Override
public void marcarBar(Bar bar) {
LatLng latLng = new LatLng(bar.getLat(), bar.getLng());
Marker marcador = mMap.addMarker(new MarkerOptions()
.icon(BitmapDescriptorFactory.defaultMarker(ACCENT_VIOLET_HUE))
.position(latLng)
.title(bar.getNombre()));
marcadores.put(marcador, bar);
}
//Todos los markers cargados
@Override
public void listo() {
mMap.setOnMarkerClickListener(this);
}
@Override
public boolean onMarkerClick(Marker marker) {
View barView = View.inflate(MapaDeBaresActivity.this, R.layout.item_bar, null);
barRL.removeAllViews();
barView.setTag(marker.hashCode());
ponerValoresAlView(Objects.requireNonNull(marcadores.get(marker)), barView);
barRL.addView(barView);
barRL.setVisibility(View.VISIBLE);
return false;
}
private void ponerValoresAlView(Bar bar, View barView) {
TextView nombre = barView.findViewById(R.id.nombre_bar);
nombre.setText(bar.getNombre());
TextView ubicacion = barView.findViewById(R.id.ubicacion_bar);
ubicacion.setText(bar.getUbicacion());
Typeface tfLight = Typeface.createFromAsset(getAssets(), "fonts/Lato-Light.ttf");
ubicacion.setTypeface(tfLight);
ImageView imagen = barView.findViewById(R.id.imagen_bar);
String nombreBar = bar.getNombre().replaceAll(" ", "_");
String imagePath = nombreBar + ".jpg";
StorageReference imagenRef = storageReference.child("imagenes").child(imagePath);
GlideApp.with(barView)
.load(imagenRef).placeholder(R.drawable.placeholder)
.into(imagen);
TextView estrellas = barView.findViewById(R.id.estrellas);
setEstrellas(bar, estrellas);
barView.setOnClickListener(v -> {
Intent intent = new Intent(MapaDeBaresActivity.this, PaginaBarActivity.class);
intent.putExtra("bar", bar);
startActivity(intent);
});
}
@SuppressLint("DefaultLocale")
private void setEstrellas(Bar bar, TextView estrellas) {
//Para que el bar no quede mal, solo se muestran las estrellas si tiene una puntuacion
// mayor a la minima.
double estrellasDelBar = bar.getEstrellas();
if (estrellasDelBar >= MIN_ESTRELLAS) {
estrellas.setText(String.format("%.1f", estrellasDelBar));
} else {
estrellas.setText(" -- ");
}
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.PaginaJuegoParticipableContract;
import com.eriochrome.bartime.presenters.PaginaJuegoParticipablePresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogValidarGanador;
import java.util.ArrayList;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class PaginaJuegoParticipableActivity extends AppCompatActivity implements PaginaJuegoParticipableContract.View, DialogValidarGanador.Listener {
private PaginaJuegoParticipablePresenter presenter;
private ProgressBar progressBar;
private ImageButton volver;
private TextView tipoDeJuego;
private TextView resumenDelJuego;
private ListView listView;
private ArrayAdapter<String> adapter;
private String posibleGanador;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pagina_juego_participable);
presenter = new PaginaJuegoParticipablePresenter();
presenter.bind(this);
presenter.obtenerJuego(getIntent());
presenter.obtenerBar(getIntent());
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
volver = findViewById(R.id.volver);
volver.setOnClickListener(v -> finish());
tipoDeJuego = findViewById(R.id.tipo_de_juego);
resumenDelJuego = findViewById(R.id.resumen_juego);
listView = findViewById(R.id.listView);
}
@Override
protected void onStart() {
super.onStart();
tipoDeJuego.setText(presenter.getTipoDeJuego());
resumenDelJuego.setText(presenter.getResumenJuego());
presenter.setupAdapter();
}
@Override
public void cargando() {
progressBar.setVisibility(View.VISIBLE);
}
@Override
public void finCargando(ArrayList<String> participantes) {
if (participantes.size() == 0) {
toastShort(this, getString(R.string.no_hay_resultados));
}
setupAdapter(participantes);
progressBar.setVisibility(View.GONE);
}
private void setupAdapter(ArrayList<String> participantes) {
adapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, participantes);
listView.setAdapter(adapter);
listView.setOnItemClickListener((parent, view, position, id) -> {
presenter.onClickParticipante();
posibleGanador = adapter.getItem(position);
});
}
@Override
public void abrirDialogValidarGanador() {
DialogValidarGanador dialogValidarGanador = new DialogValidarGanador();
dialogValidarGanador.show(getFragmentManager(), "validarGanador");
}
@Override
public void declararGanador() {
presenter.declararGanador(posibleGanador);
startActivity(new Intent(this, BarControlActivity.class));
finish();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Juego;
public interface ListadosContract {
interface View {
void hayAvisos();
void noHayAvisos();
void abrirSorteo(Juego juego);
}
interface Interaccion {
boolean estaConectado();
void subirUsuarioADatabase();
String getNombreUsuario();
void checkearAvisos();
void dejarDeCheckearAvisos();
void anotarReferrer(String referrerUid, String gameID);
void obtenerSorteoConId(String gameID);
void anotarConNombre(String nombre, String gameID);
}
interface CompleteListener {
void hayAvisos();
void noHayAvisos();
void abrirSorteo(Juego juego);
void anotarConNombre(String nombre, String gameID);
}
}
<file_sep>package com.eriochrome.bartime.modelos.entidades;
public abstract class UsuarioBar {
protected String nombre;
public String getNombre() {
return nombre;
}
}
<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import android.graphics.Typeface;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.utils.GlideApp;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.StorageReference;
import androidx.recyclerview.widget.RecyclerView;
public class BarReclamarHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private Context context;
private Bar bar;
private View view;
private TextView nombreBar;
private TextView ubicacionBar;
private ImageView imagenBar;
private StorageReference storageReference = FirebaseStorage.getInstance().getReference();
public interface Listener {
void onClickBar(Bar bar);
}
private Listener listener;
public BarReclamarHolder(View view, Context context) {
super(view);
this.context = context;
this.listener = (Listener) context;
this.view = view;
this.nombreBar = view.findViewById(R.id.nombre_bar);
this.ubicacionBar = view.findViewById(R.id.ubicacion_bar);
this.imagenBar = view.findViewById(R.id.imagen_bar);
view.setOnClickListener(this);
}
public void bind(Bar bar) {
Typeface tfLight = Typeface.createFromAsset(context.getAssets(), "fonts/Lato-Light.ttf");
this.bar = bar;
this.nombreBar.setText(bar.getNombre());
this.ubicacionBar.setText(bar.getUbicacion());
this.ubicacionBar.setTypeface(tfLight);
String nombreBar = bar.getNombre().replaceAll(" ", "_");
String imagePath = nombreBar + ".jpg";
StorageReference imagenRef = storageReference.child("imagenes").child(imagePath);
GlideApp.with(this.view)
.load(imagenRef).placeholder(R.drawable.placeholder)
.into(this.imagenBar);
}
@Override
public void onClick(View v) {
listener.onClickBar(bar);
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.ComprobanteDeCompra;
import java.util.ArrayList;
public interface MisComprasContract {
interface Interaccion {
void cargarCompras();
}
interface View {
void cargando();
void finCargando(ArrayList<ComprobanteDeCompra> compras);
}
interface Listener {
void listo(ArrayList<ComprobanteDeCompra> compras);
}
}<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.widget.Button;
import androidx.appcompat.app.AlertDialog;
import com.eriochrome.bartime.R;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class DialogCrearJuego extends DialogFragment {
public interface OnButtonClick {
void crearJuegoConTipo(String tipoDeJuego);
}
private OnButtonClick listener;
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (OnButtonClick) context;
} catch (ClassCastException e) {
toastShort(context, "No se implemento la interfaz");
}
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(getString(R.string.crear_juego));
LayoutInflater inflater = getActivity().getLayoutInflater();
builder.setView(inflater.inflate(R.layout.dialog_nuevo_juego, null));
builder.setNegativeButton(R.string.cancelar, ((dialog, which) -> dismiss()));
return builder.create();
}
@Override
public void onStart() {
super.onStart();
Button crearDesafio = ((AlertDialog)getDialog()).findViewById(R.id.crear_desafio);
crearDesafio.setOnClickListener(v -> {
listener.crearJuegoConTipo("Desafio");
});
Button crearTrivia = ((AlertDialog)getDialog()).findViewById(R.id.crear_trivia);
crearTrivia.setOnClickListener(v -> {
listener.crearJuegoConTipo("Trivia");
});
Button crearSorteo = ((AlertDialog)getDialog()).findViewById(R.id.crear_sorteo);
crearSorteo.setOnClickListener(v -> {
listener.crearJuegoConTipo("Sorteo");
});
}
}
<file_sep>package com.eriochrome.bartime.contracts;
public interface LauncherContract {
interface View {
void startNuevo();
void startUsuario();
void startBar();
}
interface Interaccion {
boolean estaConectado();
void esBar();
}
interface CompleteListener {
void esBar(boolean esBar);
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.DatosBarHorariosContract;
import com.eriochrome.bartime.modelos.DatosBarHorariosInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
import java.util.HashMap;
public class DatosBarHorariosPresenter {
private DatosBarHorariosContract.Interaccion interaccion;
private DatosBarHorariosContract.View view;
public DatosBarHorariosPresenter() {
interaccion = new DatosBarHorariosInteraccion();
}
public void bind(DatosBarHorariosContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
if (estaAbierto(bar)) {
setInputs(bar);
}
}
private void setInputs(Bar bar) {
view.setHorarios(bar.getHorariosInicial(), bar.getHorariosFinal());
if (tieneHappyHour(bar)) {
view.setHappyHour(bar.getHappyhourInicial(), bar.getHappyhourFinal());
}
}
public Intent enviarBar(Intent i) {
return i.putExtra("bar", interaccion.getBar());
}
public void setHorarios(HashMap<String, Integer> horariosInicial, HashMap<String, Integer> horariosFinal) {
interaccion.setHorarios(horariosInicial, horariosFinal);
}
public void setHappyHour(HashMap<String, Integer> happyhourInicial, HashMap<String, Integer> happyhourFinal) {
interaccion.setHappyHour(happyhourInicial, happyhourFinal);
}
private boolean estaAbierto(Bar bar) {
boolean hayDiaConHorarioInicial = false;
boolean hayDiaConHorarioFinal = false;
for (Integer horarioInicial : bar.getHorariosInicial().values()) {
if (horarioInicial != 0) hayDiaConHorarioInicial = true;
}
for (Integer horarioFinal : bar.getHorariosFinal().values()) {
if (horarioFinal != 0) hayDiaConHorarioFinal = true;
}
return hayDiaConHorarioInicial && hayDiaConHorarioFinal;
}
private boolean tieneHappyHour(Bar bar) {
boolean hayDiaConHHInicial = false;
boolean hayDiaConHHFinal = false;
for (Integer hhInicial : bar.getHappyhourInicial().values()) {
if (hhInicial != 0) hayDiaConHHInicial = true;
}
for (Integer hhFinal : bar.getHappyhourFinal().values()) {
if (hhFinal != 0) hayDiaConHHFinal = true;
}
return hayDiaConHHInicial && hayDiaConHHFinal;
}
}<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.viewpager.widget.PagerAdapter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.utils.GlideApp;
import com.eriochrome.bartime.utils.Utils;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.StorageReference;
public class ViewPagerAdapter extends PagerAdapter {
private Context context;
private StorageReference storageReference;
private Bar bar;
public ViewPagerAdapter(Context context, Bar bar) {
this.bar = bar;
this.context = context;
storageReference = FirebaseStorage.getInstance().getReference();
}
@Override
public int getCount() {
return bar.getCantidadDeFotos();
}
@Override
public boolean isViewFromObject(@NonNull View view, @NonNull Object o) {
return view == o;
}
@NonNull
@Override
public Object instantiateItem(@NonNull ViewGroup container, int position) {
LayoutInflater inflater = LayoutInflater.from(context);
ViewGroup layout = (ViewGroup) inflater.inflate(R.layout.item_imagen_viewpager, container, false);
ImageView imageView = layout.findViewById(R.id.imageView);
String nombreBar = bar.getNombre().replaceAll(" ", "_");
String path = nombreBar + Utils.getNumeroDeFoto(position) + ".jpg";
StorageReference imageRef = storageReference.child("imagenes").child(path);
GlideApp.with(layout)
.load(imageRef)
.into(imageView);
container.addView(layout);
return layout;
}
@Override
public void destroyItem(@NonNull ViewGroup container, int position, @NonNull Object object) {
container.removeView((View) object);
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.LauncherContract;
import com.eriochrome.bartime.presenters.LauncherPresenter;
public class LauncherActivity extends AppCompatActivity implements LauncherContract.View {
private LauncherPresenter presenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_launcher);
presenter = new LauncherPresenter();
presenter.bind(this);
}
@Override
protected void onStart() {
super.onStart();
presenter.redirigir();
}
@Override
public void startNuevo() {
startActivity(new Intent(this, DistincionDeUsuarioActivity.class));
overridePendingTransition(0,0);
finish();
}
@Override
public void startUsuario() {
startActivity(new Intent(LauncherActivity.this, ListadosActivity.class));
overridePendingTransition(0,0);
finish();
}
@Override
public void startBar() {
startActivity(new Intent(LauncherActivity.this, BarControlActivity.class));
overridePendingTransition(0,0);
finish();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Comentario;
import java.util.ArrayList;
public interface PaginaBarContract {
interface View {
void agregadoAFavoritos();
void quitadoDeFavoritos();
void cargando();
void finCargando();
void comentarioListo();
void yaCalificoElBar();
void cargaDeComentarios();
void finCargaDeComentarios();
void setPuntos(Integer puntos);
void onImageLoaded(String path);
}
interface Interaccion {
void setBar(Bar bar);
String getNombreDeBar();
void actualizarEstrellas(int calificacion);
void agregarAFavoritos();
boolean hayUsuarioConectado();
void quitarDeFavoritos();
void checkearFavorito();
void enviarComentario(Comentario comentario);
void checkearUsuarioCalificoBar();
Bar getBar();
ArrayList<Comentario> getComentarios();
void cargarComentarios();
void cargarPuntosEnElBar();
void cargarImagenes();
String getDescripcion();
String getUbicacionDeBar();
String getTelefonoDeBar();
boolean esBarConOwner();
void visitar();
}
interface CompleteListener {
void onStart();
void onComplete(boolean esFav);
void comentarioListo();
void yaCalificoEsteBar();
void cargaDeComentarios();
void finCargaDeComentarios();
void setPuntos(Integer puntos);
void onImageLoaded(String toString);
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.TriviaContract;
import com.eriochrome.bartime.modelos.TriviaInteraccion;
import com.eriochrome.bartime.modelos.entidades.PreguntaTrivia;
import com.eriochrome.bartime.modelos.entidades.Trivia;
public class TriviaPresenter {
private TriviaContract.Interaccion interaccion;
private TriviaContract.View view;
public TriviaPresenter() {
interaccion = new TriviaInteraccion();
}
public void bind(TriviaContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerTrivia(Intent intent) {
Trivia trivia = (Trivia) intent.getSerializableExtra("trivia");
interaccion.setTrivia(trivia);
}
public boolean eligioOpcionCorrecta(String opcion) {
return interaccion.eligioOpcionCorrecta(opcion);
}
public void actualizarPuntos() {
interaccion.actualizarPuntos();
}
public boolean quedanPreguntas() {
return interaccion.quedanPreguntas();
}
public void cargarSiguientePregunta() {
PreguntaTrivia preguntaTrivia = interaccion.cargarSiguiente();
String pregunta = preguntaTrivia.getPregunta();
String opA = preguntaTrivia.getOpcionA();
String opB = preguntaTrivia.getOpcionB();
String opC = preguntaTrivia.getOpcionC();
view.llenar(pregunta, opA, opB, opC);
}
public void agregarGanador() {
interaccion.agregarGanador();
}
}<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Juego;
import com.eriochrome.bartime.modelos.entidades.Trivia;
import java.util.ArrayList;
public interface JuegosFragmentContract {
interface Interaccion {
void mostrarJuegosConPalabra(String s);
ArrayList<Juego> obtenerJuegos();
void participarDeJuego(Juego juego);
boolean estaConectado();
void intentarParticiparDeJuego(Juego juego);
}
interface View {
void cargando();
void finCargando(ArrayList<Juego> juegos);
void successParticipando(Juego juego);
void yaSeParticipo();
void ingresarATrivia(Trivia trivia);
}
interface Listener {
void listo();
void successParticipando(Juego juego);
void yaSeParticipo();
void participarDeJuego(Juego juego);
void ingresarATrivia(Trivia trivia);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.CrearPreguntasTriviaContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.PreguntaTrivia;
import com.eriochrome.bartime.modelos.entidades.Trivia;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
public class CrearPreguntasTriviaInteraccion implements CrearPreguntasTriviaContract.Interaccion {
private Bar bar;
private Trivia trivia;
private int preguntasRestantes;
private DatabaseReference refJuegos;
private CrearPreguntasTriviaContract.Listener listener;
public CrearPreguntasTriviaInteraccion(CrearPreguntasTriviaContract.Listener listener) {
this.listener = listener;
refJuegos = FirebaseDatabase.getInstance().getReference().child("juegos");
}
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
@Override
public void setTrivia(Trivia trivia) {
this.trivia = trivia;
this.preguntasRestantes = trivia.getCantPreguntas();
}
@Override
public boolean quedanPreguntas() {
preguntasRestantes--;
return preguntasRestantes > 0;
}
@Override
public void guardarPregunta(String pregunta, String opcionA, String opcionB, String opcionC, String correcta) {
PreguntaTrivia preguntaTrivia = new PreguntaTrivia(pregunta, opcionA, opcionB, opcionC, correcta);
trivia.addPregunta(preguntaTrivia);
}
@Override
public void subirTrivia() {
int puntos = trivia.getCantPreguntas() * 50;
trivia.asignarPuntos(puntos);
trivia.asignarNombreBar(bar.getNombre());
trivia.asignarTipo();
String id = refJuegos.push().getKey();
trivia.setID(id);
if (id != null) {
refJuegos.child("Trivia").child(id).setValue(trivia)
.addOnSuccessListener(aVoid -> listener.enviado());
}
}
}<file_sep>package com.eriochrome.bartime.presenters;
import com.eriochrome.bartime.contracts.MisComprasContract;
import com.eriochrome.bartime.modelos.MisComprasInteraccion;
import com.eriochrome.bartime.modelos.entidades.ComprobanteDeCompra;
import java.util.ArrayList;
public class MisComprasPresenter implements MisComprasContract.Listener {
private MisComprasContract.Interaccion interaccion;
private MisComprasContract.View view;
public MisComprasPresenter() {
interaccion = new MisComprasInteraccion(this);
}
public void bind(MisComprasContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void cargarCompras() {
view.cargando();
interaccion.cargarCompras();
}
@Override
public void listo(ArrayList<ComprobanteDeCompra> compras) {
view.finCargando(compras);
}
}<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import android.net.Uri;
import com.eriochrome.bartime.contracts.DatosBarPrincipalContract;
import com.eriochrome.bartime.modelos.DatosBarPrincipalInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
public class DatosBarPrincipalPresenter implements DatosBarPrincipalContract.Listener {
private DatosBarPrincipalContract.Interaccion interaccion;
private DatosBarPrincipalContract.View view;
public DatosBarPrincipalPresenter() {
interaccion = new DatosBarPrincipalInteraccion(this);
}
public void bind(DatosBarPrincipalContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void setNombre(String nombre) {
interaccion.setNombre(nombre);
}
public void setDescripcion(String descripcion) {
interaccion.setDescripcion(descripcion);
}
public Intent enviarBar(Intent i) {
return i.putExtra("bar", interaccion.getBar());
}
public void setUbicacion(String ubicacion, double lat, double lng) {
interaccion.setUbicacion(ubicacion, lat, lng);
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
if (bar != null) {
interaccion.setBar(bar);
setInputs(bar);
view.setTitleEditar();
view.esModoEditar();
}
}
private void setInputs(Bar bar) {
view.setNombreBar(bar.getNombre());
view.setDescripcion(bar.getDescripcion());
view.setUbicacion(bar.getUbicacion());
interaccion.cargarImagen(bar);
}
public void subirFoto(Uri path) throws RuntimeException {
interaccion.subirFoto(path);
}
@Override
public void onImageLoaded(String downloadUrl) {
view.onImageLoaded(downloadUrl);
}
}<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Bar;
import java.util.ArrayList;
public class ListaBaresAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private static final int FOOTER_VIEW = 1;
private Context context;
private ArrayList<Bar> bares;
public ListaBaresAdapter(Context context) {
this.context = context;
this.bares = new ArrayList<>();
}
public void setItems(ArrayList<Bar> listaBares) {
bares.clear();
bares.addAll(listaBares);
notifyDataSetChanged();
}
@NonNull
@Override
public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int i) {
View view;
if (i == FOOTER_VIEW) {
view = LayoutInflater.from(viewGroup.getContext())
.inflate(R.layout.item_no_encontras, viewGroup, false);
return new FooterHolder(this.context, view);
}
else {
view = LayoutInflater.from(viewGroup.getContext())
.inflate(R.layout.item_bar, viewGroup, false);
return new BarHolder(this.context, view);
}
}
@Override
public void onBindViewHolder(@NonNull RecyclerView.ViewHolder viewHolder, int i) {
try {
if (viewHolder instanceof BarHolder) {
BarHolder barHolder = (BarHolder) viewHolder;
Bar bar = this.bares.get(i);
barHolder.bindBar(bar);
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public int getItemCount() {
if (this.bares == null) {
return 0;
}
if (this.bares.size() == 0) {
return 1;
}
//Uno extra para mostrar el footer
return this.bares.size() + 1;
}
@Override
public int getItemViewType(int position) {
if (position == bares.size()) {
return FOOTER_VIEW;
}
return super.getItemViewType(position);
}
public void clear() {
bares.clear();
notifyDataSetChanged();
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.PaginaJuegoParticipableContract;
import com.eriochrome.bartime.modelos.PaginaJuegoParticipableInteraccion;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Juego;
import java.util.ArrayList;
public class PaginaJuegoParticipablePresenter implements PaginaJuegoParticipableContract.Listener{
private PaginaJuegoParticipableContract.Interaccion interaccion;
private PaginaJuegoParticipableContract.View view;
public PaginaJuegoParticipablePresenter() {
interaccion = new PaginaJuegoParticipableInteraccion(this);
}
public void bind(PaginaJuegoParticipableContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerJuego(Intent intent) {
Juego juego = (Juego) intent.getSerializableExtra("juego");
interaccion.setJuego(juego);
}
public String getTipoDeJuego() {
return interaccion.getTipoDeJuego();
}
public String getResumenJuego() {
return interaccion.getResumenJuego();
}
public void setupAdapter() {
view.cargando();
interaccion.obtenerParticipantes();
}
@Override
public void onComplete(ArrayList<String> participantes) {
view.finCargando(participantes);
}
public void onClickParticipante() {
if (interaccion.esUnJuegoValidable()) {
view.abrirDialogValidarGanador();
}
}
public void declararGanador(String ganador) {
interaccion.declararGanador(ganador);
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
}<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import androidx.recyclerview.widget.RecyclerView;
import android.view.View;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.ComprobanteDeCompra;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class ComprobanteCompraHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private Context context;
private ComprobanteDeCompra comprobanteDeCompra;
private TextView descripcion;
private TextView nombreBar;
private TextView costo;
private TextView nroComprobante;
public interface OnComprobanteClickListener {
void onClick(ComprobanteDeCompra comprobanteDeCompra);
}
private OnComprobanteClickListener clickListener;
public ComprobanteCompraHolder(View view, Context context, boolean esBar) {
super(view);
this.context = context;
descripcion = view.findViewById(R.id.desc);
nombreBar = view.findViewById(R.id.nombre_bar);
costo = view.findViewById(R.id.costo);
nroComprobante = view.findViewById(R.id.nro_comprobante);
if (esBar) {
try {
clickListener = (OnComprobanteClickListener) context;
} catch (ClassCastException e) {
toastShort(context, "No se implemento la interfaz");
}
view.setOnClickListener(this);
}
}
public void bind(ComprobanteDeCompra comprobanteDeCompra) {
this.comprobanteDeCompra = comprobanteDeCompra;
descripcion.setText(comprobanteDeCompra.getDescripcion());
nombreBar.setText(comprobanteDeCompra.getNombreBar());
String costoText = "Costo: " + comprobanteDeCompra.getCosto();
costo.setText(costoText);
String nroComprobanteText = "Nro. Comprobante: " + comprobanteDeCompra.getNroComprobante();
nroComprobante.setText(nroComprobanteText);
}
@Override
public void onClick(View v) {
clickListener.onClick(comprobanteDeCompra);
}
}
<file_sep>package com.eriochrome.bartime.utils;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import com.eriochrome.bartime.modelos.entidades.Juego;
import com.eriochrome.bartime.modelos.entidades.Sorteo;
import com.eriochrome.bartime.modelos.entidades.Trivia;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
public class CreadorDeAvisos {
private DatabaseReference refAvisos;
public CreadorDeAvisos() {
refAvisos = FirebaseDatabase.getInstance().getReference().child("avisos");
}
public void avisarGanadorDeJuego(Juego juego, String ganador) {
String texto = juego.getTextoGanadorDeJuego();
String key = refAvisos.child(ganador).push().getKey();
if (key != null) {
refAvisos.child(ganador).child(key).setValue(texto);
}
}
public void avisarParticipacion(String nombreParticipante, Juego juego) {
if (!juego.getTipoDeJuego().equals("Trivia")) {
String texto = juego.getTextoParticipacion(nombreParticipante);
String key = refAvisos.child(juego.getNombreBar()).push().getKey();
if (key != null) {
refAvisos.child(juego.getNombreBar()).child(key).setValue(texto);
}
}
}
public void avisarCompraDeDescuento(ItemTienda itemTienda, String nombreComprador, Bar bar) {
String texto = getTextoCompraDeDescuento(itemTienda, nombreComprador);
String key = refAvisos.child(bar.getNombre()).push().getKey();
if (key != null) {
refAvisos.child(bar.getNombre()).child(key).setValue(texto);
}
}
private String getTextoCompraDeDescuento(ItemTienda itemTienda, String nombreComprador) {
return nombreComprador
+ " ha comprado: '"
+ itemTienda.getDescripcion()
+ "'.";
}
public void avisarPerdedorDeSorteo(Sorteo sorteo, String perdedor) {
String texto = getTextoPerderSorteo(sorteo);
String key = refAvisos.child(perdedor).push().getKey();
if (key != null) {
refAvisos.child(perdedor).child(key).setValue(texto);
}
}
private String getTextoPerderSorteo(Sorteo sorteo) {
return "No resultaste ganador del sorteo por "
+ sorteo.getPuntos()
+ " puntos del bar "
+ sorteo.getNombreBar()
+ ".";
}
public void avisarComentarioEnBar(String user, String nombreBar) {
String texto = getTextoComentarioBar(user);
String key = refAvisos.child(nombreBar).push().getKey();
if (key != null) {
refAvisos.child(nombreBar).child(key).setValue(texto);
}
}
private String getTextoComentarioBar(String user) {
return "¡"
+ user
+ " ha calificado tu bar!";
}
}
<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.DatosBarOpcionalesContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import java.util.ArrayList;
public class DatosBarOpcionalesInteraccion implements DatosBarOpcionalesContract.Interaccion {
private Bar bar;
private DatabaseReference refGlobal;
private DatabaseReference baresRef;
private DatosBarOpcionalesContract.Listener listener;
private FirebaseUser authUser;
public DatosBarOpcionalesInteraccion(DatosBarOpcionalesContract.Listener listener) {
this.listener = listener;
authUser = FirebaseAuth.getInstance().getCurrentUser();
refGlobal = FirebaseDatabase.getInstance().getReference();
baresRef = refGlobal.child("bares");
}
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
@Override
public void setMetodosDePago(ArrayList<String> metodosDePago) {
bar.agregarMetodosDePago(metodosDePago);
}
@Override
public void subirDatos() {
refGlobal.child("usuariosBar").child(authUser.getUid()).child("barAsociado").setValue(bar.getNombre());
bar.setOwner(authUser.getDisplayName());
baresRef.child(bar.getNombre()).setValue(bar).addOnSuccessListener(aVoid -> listener.listo());
}
@Override
public void setTelefono(String telefono) {
bar.setTelefono(telefono);
}
}<file_sep>package com.eriochrome.bartime.modelos.entidades;
public class Aviso {
private String textoAviso;
private String id;
public Aviso(String textoAviso, String id) {
this.textoAviso = textoAviso;
this.id = id;
}
public String getTextoAviso() {
return textoAviso;
}
public String getId() {
return id;
}
}
<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.DatosBarReclamarContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.presenters.DatosBarReclamarPresenter;
import com.eriochrome.bartime.utils.StrCompareUtils;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import androidx.annotation.NonNull;
public class DatosBarReclamarInteraccion implements DatosBarReclamarContract.Interaccion {
private DatosBarReclamarContract.Listener listener;
private FirebaseUser authUser;
private StrCompareUtils strComparer;
private DatabaseReference ref;
private ArrayList<Bar> bares;
private Bar nuevoBar;
public DatosBarReclamarInteraccion(DatosBarReclamarContract.Listener listener) {
this.listener = listener;
strComparer = new StrCompareUtils();
ref = FirebaseDatabase.getInstance().getReference().child("bares");
authUser = FirebaseAuth.getInstance().getCurrentUser();
bares = new ArrayList<>();
}
@Override
public void mostrarBaresParaReclamar() {
bares.clear();
ref.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot ds : dataSnapshot.getChildren()) {
Bar bar = ds.getValue(Bar.class);
if (bar.getOwner().equals("")) {
if (strComparer.sonParecidos(nuevoBar.getNombre(), bar.getNombre())) {
bares.add(bar);
}
}
}
listener.listo(bares);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) { }
});
}
@Override
public void setBar(Bar bar) {
nuevoBar = bar;
}
@Override
public Bar getBar() {
return nuevoBar;
}
@Override
public void reclamarBar(Bar bar) {
nuevoBar.reclamar(bar);
nuevoBar.setOwner(authUser.getDisplayName());
ref.child(bar.getNombre()).removeValue();
}
}<file_sep>package com.eriochrome.bartime.utils;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.os.Build;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class PermissionUtils {
private Context context;
private AppCompatActivity activityActual;
private PermissionResultCallback permissionResultCallback;
private ArrayList<String> listaPermisos =new ArrayList<>();
private ArrayList<String> listaPermisosNecesarios =new ArrayList<>();
private String contenidoDialogo = "";
private int requestCode;
public PermissionUtils(Context context) {
this.context=context;
this.activityActual = (AppCompatActivity) context;
permissionResultCallback= (PermissionResultCallback) context;
}
public PermissionUtils(Context context, PermissionResultCallback callback) {
this.context = context;
this.activityActual = (AppCompatActivity) context;
permissionResultCallback= callback;
}
/**
* Check the API Level & Permission
*/
public void check_permission(ArrayList<String> permissions, String contenidoDialogo, int requestCode) {
this.listaPermisos = permissions;
this.contenidoDialogo = contenidoDialogo;
this.requestCode = requestCode;
if(Build.VERSION.SDK_INT >= 23) {
if (checkAndRequestPermissions(permissions, requestCode)) {
permissionResultCallback.permissionGranted(requestCode);
Log.i("all permissions", "granted");
Log.i("proceed", "to callback");
}
}
else {
permissionResultCallback.permissionGranted(requestCode);
Log.i("all permissions", "granted");
Log.i("proceed", "to callback");
}
}
/**
* Check and request the Permissions
*/
private boolean checkAndRequestPermissions(ArrayList<String> permissions,int request_code) {
if(permissions.size()>0) {
listaPermisosNecesarios = new ArrayList<>();
for(int i=0;i<permissions.size();i++) {
int hasPermission = ContextCompat.checkSelfPermission(activityActual,permissions.get(i));
if (hasPermission != PackageManager.PERMISSION_GRANTED) {
listaPermisosNecesarios.add(permissions.get(i));
}
}
if (!listaPermisosNecesarios.isEmpty()) {
ActivityCompat.requestPermissions(activityActual, listaPermisosNecesarios.toArray(new String[listaPermisosNecesarios.size()]),request_code);
return false;
}
}
return true;
}
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
switch (requestCode) {
case 1:
if(grantResults.length>0) {
Map<String, Integer> perms = new HashMap<>();
for (int i = 0; i < permissions.length; i++) {
perms.put(permissions[i], grantResults[i]);
}
final ArrayList<String> pending_permissions=new ArrayList<>();
for (int i = 0; i < listaPermisosNecesarios.size(); i++) {
if (perms.get(listaPermisosNecesarios.get(i)) != PackageManager.PERMISSION_GRANTED) {
if(ActivityCompat.shouldShowRequestPermissionRationale(activityActual, listaPermisosNecesarios.get(i)))
pending_permissions.add(listaPermisosNecesarios.get(i));
else {
Log.i("Go to settings","and enable permissions");
permissionResultCallback.neverAskAgain(this.requestCode);
Toast.makeText(activityActual, "Go to settings and enable permissions", Toast.LENGTH_LONG).show();
return;
}
}
}
if(pending_permissions.size()>0) {
showMessageOKCancel(contenidoDialogo,
(dialog, which) -> {
switch (which) {
case DialogInterface.BUTTON_POSITIVE:
check_permission(listaPermisos, contenidoDialogo, this.requestCode);
break;
case DialogInterface.BUTTON_NEGATIVE:
Log.i("permisson","not fully given");
if(listaPermisos.size() == pending_permissions.size())
permissionResultCallback.permissionDenied(this.requestCode);
else
permissionResultCallback.partialPermissionGranted(this.requestCode, pending_permissions);
break;
}
});
}
else {
Log.i("all","permissions granted");
Log.i("proceed","to next step");
permissionResultCallback.permissionGranted(this.requestCode);
}
}
break;
}
}
/**
* Explain why the app needs permissions
*/
private void showMessageOKCancel(String message, DialogInterface.OnClickListener okListener) {
new AlertDialog.Builder(activityActual)
.setMessage(message)
.setPositiveButton("Ok", okListener)
.setNegativeButton("Cancel", okListener)
.create()
.show();
}
public interface PermissionResultCallback
{
void permissionGranted(int requestCode);
void partialPermissionGranted(int requestCode, ArrayList<String> granted_permissions);
void permissionDenied(int requestCode);
void neverAskAgain(int requestCode);
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.PaginaSorteoContract;
import com.eriochrome.bartime.modelos.PaginaSorteoInteraccion;
import com.eriochrome.bartime.modelos.entidades.Juego;
public class PaginaSorteoPresenter implements PaginaSorteoContract.Listener {
private PaginaSorteoContract.Interaccion interaccion;
private PaginaSorteoContract.View view;
public PaginaSorteoPresenter() {
interaccion = new PaginaSorteoInteraccion(this);
}
public void bind(PaginaSorteoContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerJuego(Intent intent) {
Juego juego = (Juego)intent.getSerializableExtra("juego");
interaccion.setSorteo(juego);
}
public void sortear() {
view.cargando();
interaccion.sortear();
}
@Override
public void finSorteo(String participanteGanador) {
view.finSorteo(participanteGanador);
interaccion.borrarSorteo();
}
@Override
public void setCantParticipantes(long childrenCount) {
view.setCantParticipantes((int) childrenCount);
view.finCargando();
}
public void cargarDatos() {
view.cargando();
view.setResumenJuego(interaccion.getResumenJuego());
interaccion.cargarCantidadDeParticipantes();
}
}<file_sep>package com.eriochrome.bartime.vistas;
import android.app.DialogFragment;
import android.content.Context;
import android.content.Intent;
import android.location.Location;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ProgressBar;
import android.widget.RadioGroup;
import android.widget.Switch;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.crashlytics.android.Crashlytics;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.adapters.EspacioVerticalDecorator;
import com.eriochrome.bartime.adapters.ListaBaresAdapter;
import com.eriochrome.bartime.contracts.BaresFragmentContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.presenters.BaresFragmentPresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogSeleccionFiltros;
import java.util.ArrayList;
import java.util.Objects;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class ListadoBaresFragment extends Fragment implements BaresFragmentContract.View{
private Button filtrar;
private Button verEnMapa;
private EditText buscar;
private RecyclerView baresRecyclerView;
private ListaBaresAdapter baresAdapter;
private ProgressBar loading;
BaresFragmentPresenter presenter;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_listado_bares, container, false);
filtrar = view.findViewById(R.id.filtrar);
verEnMapa = view.findViewById(R.id.ver_en_mapa);
buscar = view.findViewById(R.id.buscar);
loading = view.findViewById(R.id.progressBar);
loading.setVisibility(View.GONE);
baresRecyclerView = view.findViewById(R.id.recycler_view);
return view;
}
@Override
public void onViewCreated(@NonNull View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
presenter = new BaresFragmentPresenter();
presenter.bind(this);
baresAdapter = new ListaBaresAdapter(getActivity());
baresRecyclerView.setHasFixedSize(true);
setupRecyclerView();
baresRecyclerView.setAdapter(baresAdapter);
setupListeners();
}
@Override
public void onStart() {
super.onStart();
presenter.mostrarPrimerOrdenBares();
}
@Override
public void cargando() {
loading.setVisibility(View.VISIBLE);
baresAdapter.clear();
}
@Override
public void finCargando(ArrayList<Bar> listaBares) {
if (listaBares.size() == 0) {
toastShort(getActivity(), getString(R.string.no_hay_resultados));
}
loading.setVisibility(View.GONE);
baresAdapter.setItems(listaBares);
}
@Override
public boolean filtroHappyHour(AlertDialog dialog) {
Switch happyhour = dialog.findViewById(R.id.happyhour);
return happyhour != null && happyhour.isChecked();
}
@Override
public boolean filtroAbierto(AlertDialog dialog) {
Switch abierto = dialog.findViewById(R.id.abierto);
return abierto != null && abierto.isChecked();
}
@Override
public boolean filtroEfectivo(AlertDialog dialog) {
CheckBox efectivo = dialog.findViewById(R.id.efectivo);
return efectivo != null && efectivo.isChecked();
}
@Override
public boolean filtroCredito(AlertDialog dialog) {
CheckBox credito = dialog.findViewById(R.id.credito);
return credito != null && credito.isChecked();
}
@Override
public boolean filtroDebito(AlertDialog dialog) {
CheckBox debito = dialog.findViewById(R.id.debito);
return debito != null && debito.isChecked();
}
@Override
public String getOrdenamiento(AlertDialog dialog) {
String ordenamiento = "";
RadioGroup ordenamientoId = dialog.findViewById(R.id.ordenar_group);
if (ordenamientoId != null) {
switch (ordenamientoId.getCheckedRadioButtonId()) {
case R.id.distancia:
ordenamiento = "distancia";
break;
case R.id.estrellas:
ordenamiento = "estrellas";
break;
case R.id.nombre:
ordenamiento = "nombre";
break;
}
}
return ordenamiento;
}
private void ocultarTeclado(){
InputMethodManager imm = (InputMethodManager) Objects.requireNonNull(getActivity()).getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
imm.toggleSoftInput(InputMethodManager.HIDE_IMPLICIT_ONLY, 0);
}
}
private void mostrarFiltros() {
DialogFragment filtros = new DialogSeleccionFiltros();
filtros.show(Objects.requireNonNull(getActivity()).getFragmentManager(), "filtros");
}
void aplicarFiltros(AlertDialog dialog, Location ultimaUbicacion) {
if (getOrdenamiento(dialog).equals("distancia")) {
if (ultimaUbicacion != null) {
presenter.setUltimaUbicacion(ultimaUbicacion);
} else {
toastShort(getActivity(), "No se pudo obtener su ubicacion, se usara la establecida por defecto.");
}
}
presenter.mostrarConFiltros(dialog);
}
private void setupRecyclerView() {
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(getActivity());
int espacioVertical = 60;
EspacioVerticalDecorator espacioVerticalDecorator = new EspacioVerticalDecorator(espacioVertical);
baresRecyclerView.setLayoutManager(layoutManager);
baresRecyclerView.addItemDecoration(espacioVerticalDecorator);
}
private void setupListeners() {
filtrar.setOnClickListener(v -> mostrarFiltros());
verEnMapa.setOnClickListener(v -> {
Intent i = new Intent(getActivity(), MapaDeBaresActivity.class);
startActivity(i);
});
buscar.setOnEditorActionListener((textView, actionId, keyEvent) -> {
if(actionId == EditorInfo.IME_ACTION_DONE) {
presenter.buscarConPalabra(buscar.getText().toString());
ocultarTeclado();
return true;
}
return false;
});
}
@Override
public void onDestroy() {
try {
presenter.unbind();
} catch (NullPointerException e) {
e.printStackTrace();
Crashlytics.log("Error onDestroy bares fragment");
}
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Juego;
import com.eriochrome.bartime.modelos.entidades.Trivia;
import java.util.ArrayList;
public interface JuegosDelBarContract {
interface Interaccion {
void setBar(Bar bar);
void mostrarJuegos();
boolean estaConectado();
void intentarParticiparDeJuego(Juego juego);
void participarDeJuego(Juego juego);
}
interface View {
void cargando();
void finCargando(ArrayList<Juego> juegos);
void yaSeParticipo();
void successParticipando();
void ingresarATrivia(Trivia trivia);
}
interface Listener {
void onJuegosCargados(ArrayList<Juego> juegos);
void yaSeParticipo();
void participarDeJuego(Juego juego);
void successParticipando();
void ingresarATrivia(Trivia trivia);
}
}<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.adapters.BarReclamarHolder;
import com.eriochrome.bartime.adapters.EspacioVerticalDecorator;
import com.eriochrome.bartime.adapters.ListaBaresAReclamarAdapter;
import com.eriochrome.bartime.contracts.DatosBarReclamarContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.presenters.DatosBarReclamarPresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogReclamarBar;
import java.util.ArrayList;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
public class DatosBarReclamarActivity extends AppCompatActivity implements
DatosBarReclamarContract.View,
BarReclamarHolder.Listener,
DialogReclamarBar.Listener {
private DatosBarReclamarPresenter presenter;
private ProgressBar progressBar;
private RecyclerView recyclerView;
private ListaBaresAReclamarAdapter adapter;
private Button noDeseoReclamar;
private TextView aclaracion1;
private TextView aclaracion2;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_datos_bar_reclamar);
recyclerView = findViewById(R.id.recyclerView);
recyclerView.setHasFixedSize(true);
adapter = new ListaBaresAReclamarAdapter(this);
recyclerView.setAdapter(adapter);
setupRecyclerView();
aclaracion1 = findViewById(R.id.acl_1);
aclaracion1.setVisibility(View.GONE);
aclaracion2 = findViewById(R.id.acl_2);
aclaracion2.setVisibility(View.GONE);
noDeseoReclamar = findViewById(R.id.no_deseo_reclamar);
noDeseoReclamar.setVisibility(View.GONE);
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
presenter = new DatosBarReclamarPresenter();
presenter.bind(this);
presenter.obtenerBar(getIntent());
noDeseoReclamar.setOnClickListener(v -> seguir());
presenter.mostrarBaresParaReclamar();
}
private void seguir() {
Intent i = new Intent(DatosBarReclamarActivity.this, DatosBarHorariosActivity.class);
i = presenter.enviarBar(i);
startActivity(i);
finish();
}
@Override
protected void onResume() {
super.onResume();
adapter.notifyDataSetChanged();
}
private void setupRecyclerView() {
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(this);
int espacioVertical = 30;
EspacioVerticalDecorator espacioVerticalDecorator = new EspacioVerticalDecorator(espacioVertical);
recyclerView.setLayoutManager(layoutManager);
recyclerView.addItemDecoration(espacioVerticalDecorator);
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void onClickBar(Bar bar) {
DialogReclamarBar dialogReclamarBar = new DialogReclamarBar();
dialogReclamarBar.setBar(bar);
dialogReclamarBar.show(getSupportFragmentManager(), "reclamar");
}
@Override
public void reclamarBar(Bar bar) {
presenter.reclamarBar(bar);
seguir();
}
@Override
public void cargando() {
adapter.clear();
progressBar.setVisibility(View.VISIBLE);
}
@Override
public void finCargando(ArrayList<Bar> nuevosBares) {
if (nuevosBares.isEmpty()) {
seguir();
}
adapter.setItems(nuevosBares);
progressBar.setVisibility(View.GONE);
aclaracion1.setVisibility(View.VISIBLE);
aclaracion2.setVisibility(View.VISIBLE);
noDeseoReclamar.setVisibility(View.VISIBLE);
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.PreguntaTrivia;
import com.eriochrome.bartime.modelos.entidades.Trivia;
public interface TriviaContract {
interface Interaccion {
void setTrivia(Trivia trivia);
PreguntaTrivia cargarSiguiente();
boolean eligioOpcionCorrecta(String opcion);
void actualizarPuntos();
boolean quedanPreguntas();
void agregarGanador();
}
interface View {
void llenar(String pregunta, String opA, String opB, String opC);
}
}<file_sep>package com.eriochrome.bartime.presenters;
import com.eriochrome.bartime.contracts.LauncherContract;
import com.eriochrome.bartime.modelos.LauncherInteraccion;
public class LauncherPresenter implements LauncherContract.CompleteListener{
private LauncherContract.Interaccion interaccion;
private LauncherContract.View view;
public LauncherPresenter() {
this.interaccion = new LauncherInteraccion(this);
}
public void bind(LauncherContract.View view) {
this.view = view;
}
public void unbind() {
this.view = null;
}
public void redirigir() {
if (interaccion.estaConectado()) {
interaccion.esBar();
} else {
if (view != null)
view.startNuevo();
}
}
@Override
public void esBar(boolean esBar) {
if (view != null) {
if (esBar) {
view.startBar();
} else {
view.startUsuario();
}
}
}
}
<file_sep>package com.eriochrome.bartime.utils;
import com.eriochrome.bartime.modelos.entidades.Juego;
import info.debatty.java.stringsimilarity.NormalizedLevenshtein;
public class StrCompareUtils {
private NormalizedLevenshtein nl;
private static final double MIN_SIM = 0.6;
public StrCompareUtils() {
nl = new NormalizedLevenshtein();
}
/**
* @param juego un juego de un bar
* @param busqueda query
* @return true si el query esta contenido en la concatenacion
* del tipo de juego y el nombre del bar.
*/
public boolean juegoContiene(Juego juego, String busqueda) {
String tipoYBar = juego.getNombreBar().toLowerCase() + juego.getTipoDeJuego().toLowerCase();
return tipoYBar.contains(busqueda);
}
/**
* @param query el nombre que intenta poner al nuevo bar
* @param nombreBar un nombre de un bar existente
* @return true si la semejanza de levenshetein normalizada es mayor a cierta cantidad.
*/
public boolean sonParecidos(String query, String nombreBar) {
return nl.similarity(query, nombreBar) >= MIN_SIM;
}
}
<file_sep>package com.eriochrome.bartime.modelos.entidades;
public class Comentario {
private int estrellas;
private String comentarioText;
private String comentador;
private String nombreBar;
private String comentarioID;
public String getID() {
return comentarioID;
}
public String getNombreBar() {
return nombreBar;
}
public int getEstrellas() {
return estrellas;
}
public String getComentarioText() {
return comentarioText;
}
public String getComentador() {
return comentador;
}
public void setEstrellas(int estrellas) {
this.estrellas = estrellas;
}
public void setComentarioText(String comentarioText) {
this.comentarioText = comentarioText;
}
public void setComentador(String comentador) {
this.comentador = comentador;
}
public void setNombreBar(String nombreBar) {
this.nombreBar = nombreBar;
}
public void setID(String comentarioID) {
this.comentarioID = comentarioID;
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.net.Uri;
import android.provider.MediaStore;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.DatosBarPrincipalContract;
import com.eriochrome.bartime.presenters.DatosBarPrincipalPresenter;
import com.eriochrome.bartime.utils.GlideApp;
import java.io.IOException;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class DatosBarPrincipalActivity extends AppCompatActivity implements DatosBarPrincipalContract.View {
private static final int NUMERO_SOLICITUD_UBICACION = 1;
private DatosBarPrincipalPresenter presenter;
private boolean modoEditar;
private static final int NUMERO_SOLICITUD_GALERIA = 2;
private static final int CODIGO_REQUEST_EXTERNAL_STORAGE = 100;
private Button continuar;
private Button seleccionarUbicacion;
private Button seleccionarImagen;
private TextView tituloActivity;
private EditText nombreBar;
private EditText descripcion;
private ImageView imagenBar;
Uri returnUri;
private boolean tieneFoto;
private double lat;
private double lng;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_datos_bar_principal);
presenter = new DatosBarPrincipalPresenter();
presenter.bind(this);
tituloActivity = findViewById(R.id.titulo);
nombreBar = findViewById(R.id.nombre_bar);
descripcion = findViewById(R.id.desc);
seleccionarUbicacion = findViewById(R.id.seleccionar_ubicacion);
seleccionarImagen = findViewById(R.id.seleccionar_imagen_principal);
imagenBar = findViewById(R.id.imagen_bar);
continuar = findViewById(R.id.continuar);
tieneFoto = false;
modoEditar = false;
presenter.obtenerBar(getIntent());
nombreBar.setOnFocusChangeListener((v, hasFocus) -> {
if (hasFocus) nombreBar.setHint("");
else nombreBar.setHint(getResources().getString(R.string.nombre));
});
descripcion.setOnFocusChangeListener((v, hasFocus) -> {
if (hasFocus) descripcion.setHint("");
else descripcion.setHint(getResources().getString(R.string.descripcion));
});
seleccionarUbicacion.setOnClickListener(v -> {
Intent intentUbicacion = new Intent(DatosBarPrincipalActivity.this, SeleccionarUbicacionActivity.class);
startActivityForResult(intentUbicacion, NUMERO_SOLICITUD_UBICACION);
});
seleccionarImagen.setOnClickListener(v -> {
if(tienePermisos())
seleccionarImagenDeGaleria();
else
pedirPermisosUbicacion();
});
continuar.setOnClickListener(v -> {
Intent i;
if (modoEditar) {
i = new Intent(DatosBarPrincipalActivity.this, DatosBarHorariosActivity.class);
} else {
i = new Intent(DatosBarPrincipalActivity.this, DatosBarReclamarActivity.class);
}
if (datosListos()) {
presenter.setNombre(nombreBar.getText().toString());
presenter.setDescripcion(descripcion.getText().toString());
presenter.setUbicacion(seleccionarUbicacion.getText().toString(), lat, lng);
//Ya subo la foto al storage porque total si no se crea el bar despues se puede sobreescribir
try {
if (returnUri != null) {
presenter.subirFoto(returnUri);
}
} catch (RuntimeException e) {
toastShort(this, getString(R.string.ocurrio_error_inesperado));
}
i = presenter.enviarBar(i);
startActivity(i);
}
});
}
private void seleccionarImagenDeGaleria() {
Intent cameraIntent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
if (cameraIntent.resolveActivity(this.getPackageManager()) != null) {
startActivityForResult(cameraIntent, NUMERO_SOLICITUD_GALERIA);
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == NUMERO_SOLICITUD_GALERIA) {
if (resultCode == RESULT_OK) {
returnUri = data.getData();
Bitmap bitmapImage = null;
try {
bitmapImage = MediaStore.Images.Media.getBitmap(getContentResolver(), returnUri);
} catch (IOException e) {
e.printStackTrace();
}
imagenBar.setImageBitmap(bitmapImage);
} else {
toastShort(this, getString(R.string.no_elegiste_imagen));
}
}
else if (requestCode == NUMERO_SOLICITUD_UBICACION) {
if (resultCode == RESULT_OK) {
if (data != null) {
lat = data.getDoubleExtra("latitud", 0);
lng = data.getDoubleExtra("longitud", 0);
String direccion = data.getStringExtra("direccion");
seleccionarUbicacion.setText(direccion);
}
}
}
}
private boolean tienePermisos() {
return (ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) ==
PackageManager.PERMISSION_GRANTED);
}
private void pedirPermisosUbicacion() {
ActivityCompat.requestPermissions(this, new String[]{
Manifest.permission.READ_EXTERNAL_STORAGE
}, CODIGO_REQUEST_EXTERNAL_STORAGE);
}
private boolean datosListos() {
boolean listo = true;
if (nombreBar.getText().toString().equals("") ||
nombreBar.getText().toString().equals(getString(R.string.nombre))) {
listo = false;
toastShort(this, getString(R.string.falta_nombre_bar));
}
else if (descripcion.getText().toString().equals("") ||
descripcion.getText().toString().equals(getString(R.string.descripcion))) {
listo = false;
toastShort(this, getString(R.string.falta_descripcion_para_bar));
}
else if (seleccionarUbicacion.getText().equals("") ||
seleccionarUbicacion.getText().equals(getString(R.string.seleccionar_ubicacion))) {
listo = false;
toastShort(this, getString(R.string.falta_ubicacion_bar));
}
else if (returnUri == null && !tieneFoto) {
listo = false;
toastShort(this, getString(R.string.se_debe_asignar_imagen_principal));
}
return listo;
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void setTitleEditar() {
tituloActivity.setText(getString(R.string.editar_bar));
}
@Override
public void setNombreBar(String nombre) {
nombreBar.setText(nombre);
}
@Override
public void setDescripcion(String descripcion) {
this.descripcion.setText(descripcion);
}
@Override
public void setUbicacion(String ubicacion) {
this.seleccionarUbicacion.setText(ubicacion);
}
@Override
public void onImageLoaded(String downloadUrl) {
GlideApp.with(this)
.load(downloadUrl)
.placeholder(R.drawable.barra_progreso_circular)
.into(imagenBar);
}
@Override
public void esModoEditar() {
tieneFoto = true;
modoEditar = true;
}
}
<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import com.eriochrome.bartime.R;
public class DialogCrearCuenta extends DialogFragment {
public interface Listener {
void login();
}
private String texto;
private Listener listener;
public void setTexto(String texto) {
this.texto = texto;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (Listener) context;
} catch (ClassCastException e) {
throw new ClassCastException(context.toString()
+ " must implement DialogCrearCuenta");
}
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setMessage(texto)
.setTitle(getString(R.string.se_requiere_cuenta));
builder.setPositiveButton(R.string.continuar, (dialog, which) -> {
listener.login();
dismiss();
});
builder.setNegativeButton(R.string.cancelar, (dialog, which) -> {
dismiss();
});
return builder.create();
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.content.res.Resources;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import com.eriochrome.bartime.R;
import com.github.paolorotolo.appintro.AppIntro;
import com.github.paolorotolo.appintro.AppIntroFragment;
public class IntroduccionUsuarioActivity extends AppIntro {
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Resources res = getResources();
addSlide(AppIntroFragment.newInstance(res.getString(R.string.intro_u1_tit), res.getString(R.string.intro_u1_des), R.drawable.intro2, res.getColor(R.color.colorPrimaryDark)));
addSlide(AppIntroFragment.newInstance(res.getString(R.string.intro_u2_tit), res.getString(R.string.intro_u2_des), R.drawable.intro3, res.getColor(R.color.colorPrimary)));
addSlide(AppIntroFragment.newInstance(res.getString(R.string.intro_u3_tit), res.getString(R.string.intro_u3_des), R.drawable.intro5, res.getColor(R.color.colorAccent)));
}
@Override
public void onSkipPressed(Fragment currentFragment) {
super.onSkipPressed(currentFragment);
startActivity(new Intent(this, ListadosActivity.class));
finish();
}
@Override
public void onDonePressed(Fragment currentFragment) {
super.onDonePressed(currentFragment);
startActivity(new Intent(this, ListadosActivity.class));
finish();
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.AgregarBarUsuarioContract;
import com.eriochrome.bartime.presenters.AgregarBarUsuarioPresenter;
import java.io.FileNotFoundException;
import java.io.InputStream;
import de.hdodenhof.circleimageview.CircleImageView;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class AgregarBarUsuarioActivity extends AppCompatActivity implements AgregarBarUsuarioContract.View {
private TextView volver;
private Button listo;
private CircleImageView imagenBar;
private EditText nombre;
private TextView ubicacion;
private final int NUMERO_SOLICITUD_UBICACION = 1;
private double lat;
private double lng;
private TextView botonFoto;
private Uri path;
private final int NUMERO_SOLICITUD_GALERIA = 2;
AgregarBarUsuarioPresenter presenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_agregar_bar_usuario);
volver = findViewById(R.id.volver);
nombre = findViewById(R.id.nombre);
ubicacion = findViewById(R.id.ubicacion);
botonFoto = findViewById(R.id.foto);
imagenBar = findViewById(R.id.imagen_bar);
listo = findViewById(R.id.listo);
setupListeners();
nombre.setOnFocusChangeListener((v, hasFocus) -> {
if (hasFocus) nombre.setHint("");
else nombre.setHint(getString(R.string.nombre));
});
presenter = new AgregarBarUsuarioPresenter();
presenter.bind(this);
imagenBar.setImageDrawable(getDrawable(R.drawable.placeholder));
}
private void setupListeners() {
volver.setOnClickListener(v -> finish());
ubicacion.setOnClickListener(v -> {
Intent intentUbicacion = new Intent(AgregarBarUsuarioActivity.this, SeleccionarUbicacionActivity.class);
startActivityForResult(intentUbicacion, NUMERO_SOLICITUD_UBICACION);
});
botonFoto.setOnClickListener(v -> seleccionarImagenDeGaleria());
listo.setOnClickListener(v -> {
String nombreBar = nombre.getText().toString();
if (datosCompletos()) {
presenter.crearBar(nombreBar);
presenter.agregarImagen(path);
presenter.agregarUbicacion(ubicacion.getText().toString(), lat, lng);
presenter.subirBar();
}
});
}
private boolean datosCompletos() {
boolean listo = true;
if (nombre.getText().toString().equals("") ||
nombre.getText().toString().equals(getString(R.string.nombre))) {
listo = false;
toastShort(this, getString(R.string.falta_nombre_bar));
}
else if (ubicacion.getText().equals("") ||
ubicacion.getText().equals(getString(R.string.seleccionar_ubicacion))) {
listo = false;
toastShort(this, getString(R.string.falta_ubicacion_bar));
}
else if (path == null) {
listo = false;
toastShort(this, getString(R.string.se_debe_asignar_imagen_principal));
}
return listo;
}
private void seleccionarImagenDeGaleria() {
Intent elegirFotoIntent = new Intent(Intent.ACTION_PICK);
elegirFotoIntent.setType("image/*");
startActivityForResult(elegirFotoIntent, NUMERO_SOLICITUD_GALERIA);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == NUMERO_SOLICITUD_GALERIA) {
if (resultCode == RESULT_OK) {
try {
if (data != null) {
path = data.getData();
final InputStream imageStream = getContentResolver().openInputStream(path);
final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
imagenBar.setImageBitmap(selectedImage);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
toastShort(AgregarBarUsuarioActivity.this, "Algo fallo");
}
}
else {
toastShort(AgregarBarUsuarioActivity.this, "No elegiste ninguna imagen.");
}
}
else if (requestCode == NUMERO_SOLICITUD_UBICACION) {
if (resultCode == RESULT_OK) {
if (data != null) {
lat = data.getDoubleExtra("latitud", 0);
lng = data.getDoubleExtra("longitud", 0);
String direccion = data.getStringExtra("direccion");
ubicacion.setText(direccion);
}
}
}
}
@Override
public void startConfirmacion() {
Intent i = new Intent(AgregarBarUsuarioActivity.this, ConfirmarNuevoBarActivity.class);
startActivity(i);
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.adapters.EspacioVerticalDecorator;
import com.eriochrome.bartime.adapters.ItemTiendaHolder;
import com.eriochrome.bartime.adapters.ListaItemsTiendaAdapter;
import com.eriochrome.bartime.contracts.TiendaContract;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import com.eriochrome.bartime.presenters.TiendaPresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogComprarItemTienda;
import java.util.ArrayList;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class TiendaActivity extends AppCompatActivity implements
TiendaContract.View,
ItemTiendaHolder.ItemTiendaClickListener,
DialogComprarItemTienda.ComprarListener{
private TiendaPresenter presenter;
private RecyclerView recyclerView;
private ListaItemsTiendaAdapter adapter;
private TextView puntosText;
private ProgressBar progressBar;
private ImageButton volver;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_tienda);
presenter = new TiendaPresenter();
presenter.bind(this);
presenter.obtenerBar(getIntent());
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
volver = findViewById(R.id.volver);
volver.setOnClickListener(v -> finish());
recyclerView = findViewById(R.id.recycler_view);
recyclerView.setHasFixedSize(true);
setupRecyclerView();
adapter = new ListaItemsTiendaAdapter(this, false);
adapter.setListener(this);
recyclerView.setAdapter(adapter);
puntosText = findViewById(R.id.puntosText);
}
@Override
protected void onResume() {
super.onResume();
presenter.setupTienda();
}
@Override
public void cargando() {
progressBar.setVisibility(View.VISIBLE);
adapter.clear();
}
@Override
public void finCargando(ArrayList<ItemTienda> items, Integer misPuntos) {
if (items.size() == 0) {
toastShort(this, getString(R.string.no_hay_resultados));
}
adapter.setItems(items);
puntosText.setText(String.format("%s puntos.", String.valueOf(misPuntos)));
progressBar.setVisibility(View.GONE);
}
@Override
public void onClickItemTienda(ItemTienda itemTienda) {
DialogComprarItemTienda dialogComprarItemTienda = new DialogComprarItemTienda();
int misPuntos = presenter.getPuntos();
dialogComprarItemTienda.setup(misPuntos, itemTienda);
dialogComprarItemTienda.show(getFragmentManager(), "comprarItem");
}
@Override
public void onItemComprado(ItemTienda itemTienda) {
presenter.comprarItem(itemTienda);
toastShort(this, getString(R.string.tu_compra_fue_un_exito));
}
private void setupRecyclerView() {
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(this);
int espacioVertical = 30;
EspacioVerticalDecorator espacioVerticalDecorator = new EspacioVerticalDecorator(espacioVertical);
recyclerView.setLayoutManager(layoutManager);
recyclerView.addItemDecoration(espacioVerticalDecorator);
}
@Override
protected void onPause() {
super.onPause();
presenter.onPause();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import android.net.Uri;
import com.eriochrome.bartime.modelos.entidades.Juego;
import java.util.ArrayList;
public interface MisJuegosContract {
interface Interaccion {
void mostrarJuegos();
void dejarDeParticipar(Juego juego);
void invitarASorteo(Juego juego);
}
interface View {
void cargando();
void finCargando(ArrayList<Juego> juegos);
void setInvitationUrl(Uri shortLink);
}
interface Listener {
void listo(ArrayList<Juego> juegos);
void setInvitationUrl(Uri shortLink);
}
}<file_sep>package com.eriochrome.bartime.modelos;
import androidx.annotation.NonNull;
import com.eriochrome.bartime.contracts.MapaDeBaresContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
public class MapaDeBaresInteraccion implements MapaDeBaresContract.Interaccion {
private MapaDeBaresContract.Listener listener;
private DatabaseReference ref;
public MapaDeBaresInteraccion(MapaDeBaresContract.Listener listener) {
this.listener = listener;
ref = FirebaseDatabase.getInstance().getReference().child("bares");
}
@Override
public void getPosicionesDeBares() {
ref.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot ds : dataSnapshot.getChildren()) {
Bar bar = ds.getValue(Bar.class);
listener.marcarBar(bar);
}
listener.listo();
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
}<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.vistas.ListadosActivity;
public class DialogTerminoTrivia extends DialogFragment {
private String texto;
public void setTexto(String texto) {
this.texto = texto;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setMessage(texto);
builder.setPositiveButton(R.string.continuar, ((dialog, which) -> {
startActivity(new Intent(getActivity(), ListadosActivity.class));
getActivity().finish();
}));
return builder.create();
}
}
<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Juego;
public class JuegoDelBarHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private Juego juego;
private Context context;
private ImageView imagenDelJuego;
private TextView nombreJuego;
private TextView resumenJuego;
public interface OnJuegoHolderClickListener {
void onClickJuego(Juego juego);
}
private OnJuegoHolderClickListener clickListener;
public JuegoDelBarHolder(@NonNull View view, Context context) {
super(view);
this.context = context;
imagenDelJuego = view.findViewById(R.id.imagen_juego);
nombreJuego = view.findViewById(R.id.nombre_juego);
resumenJuego = view.findViewById(R.id.resumen_juego);
try {
clickListener = (OnJuegoHolderClickListener) context;
} catch (ClassCastException e) {
e.printStackTrace();
}
view.setOnClickListener(this);
}
@Override
public void onClick(View v) {
clickListener.onClickJuego(juego);
}
public void bindJuego(Juego juego) {
this.juego = juego;
setImagen(juego.getTipoDeJuego());
nombreJuego.setText(juego.getTipoDeJuego());
resumenJuego.setText(juego.getTextoResumen());
}
private void setImagen(String tipoDeJuego) {
switch (tipoDeJuego) {
case "Desafio":
imagenDelJuego.setImageResource(R.drawable.logodesafio);
break;
case "Trivia":
imagenDelJuego.setImageResource(R.drawable.logotrivia);
break;
case "Sorteo":
imagenDelJuego.setImageResource(R.drawable.logomision);
break;
}
}
}
<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.ComprobanteDeCompra;
import java.util.ArrayList;
public class ListaComprasAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private Context context;
private boolean esBar;
private ArrayList<ComprobanteDeCompra> compras;
public ListaComprasAdapter(Context context, boolean esBar) {
this.context = context;
this.esBar = esBar;
this.compras = new ArrayList<>();
}
@NonNull
@Override
public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int i) {
View view = LayoutInflater.from(viewGroup.getContext())
.inflate(R.layout.item_comprobante_compra, viewGroup, false);
return new ComprobanteCompraHolder(view, context, esBar);
}
@Override
public void onBindViewHolder(@NonNull RecyclerView.ViewHolder viewHolder, int i) {
ComprobanteCompraHolder comprobanteHolder = (ComprobanteCompraHolder) viewHolder;
ComprobanteDeCompra comprobanteDeCompra = compras.get(i);
comprobanteHolder.bind(comprobanteDeCompra);
}
@Override
public int getItemCount() {
return compras.size();
}
public void setItems(ArrayList<ComprobanteDeCompra> nuevasCompras) {
compras.clear();
compras.addAll(nuevasCompras);
notifyDataSetChanged();
}
public void clear() {
compras.clear();
notifyDataSetChanged();
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Juego;
public interface PaginaSorteoContract {
interface Interaccion {
void setSorteo(Juego juego);
void sortear();
String getResumenJuego();
void cargarCantidadDeParticipantes();
void borrarSorteo();
}
interface View {
void cargando();
void finSorteo(String participanteGanador);
void setResumenJuego(String resumenJuego);
void setCantParticipantes(int cantParticipantes);
void finCargando();
}
interface Listener {
void finSorteo(String participanteGanador);
void setCantParticipantes(long childrenCount);
}
}<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.PaginaTriviaContract;
import com.eriochrome.bartime.modelos.PaginaTriviaInteraccion;
import com.eriochrome.bartime.modelos.entidades.Juego;
public class PaginaTriviaPresenter implements PaginaTriviaContract.Listener{
private PaginaTriviaContract.Interaccion interaccion;
private PaginaTriviaContract.View view;
public PaginaTriviaPresenter() {
interaccion = new PaginaTriviaInteraccion(this);
}
public void bind(PaginaTriviaContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerJuego(Intent intent) {
Juego juego = (Juego)intent.getSerializableExtra("juego");
interaccion.setTrivia(juego);
}
public void cargarDatosParticipantes() {
view.cargando();
interaccion.cargarDatosParticipantes();
}
@Override
public void onComplete(int participantes, int ganadores) {
view.setGanadores(ganadores);
view.setParticipantes(participantes);
view.finCargando();
}
public void cargarDatosJuego() {
view.setTipoDeJuego(interaccion.getTipoDeJuego());
view.setResumen(interaccion.getResumen());
}
}<file_sep>package com.eriochrome.bartime.adapters;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.Juego;
import java.util.ArrayList;
public class ListaJuegosAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private ArrayList<Juego> juegos;
private Context context;
private boolean esCreadoPorBar;
public ListaJuegosAdapter(Context context, boolean esCreadoPorBar) {
this.context = context;
this.juegos = new ArrayList<>();
this.esCreadoPorBar = esCreadoPorBar;
}
@NonNull
@Override
public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int i) {
if (esCreadoPorBar) {
View view = LayoutInflater.from(viewGroup.getContext())
.inflate(R.layout.item_juego_del_bar, viewGroup, false);
return new JuegoDelBarHolder(view, context);
}
else {
View view = LayoutInflater.from(viewGroup.getContext())
.inflate(R.layout.item_juego, viewGroup, false);
return new JuegoHolder(view, context);
}
}
@Override
public void onBindViewHolder(@NonNull RecyclerView.ViewHolder viewHolder, int i) {
if (esCreadoPorBar) {
JuegoDelBarHolder juegoDelBarHolder = (JuegoDelBarHolder) viewHolder;
Juego juego = juegos.get(i);
juegoDelBarHolder.bindJuego(juego);
}
else {
JuegoHolder juegoHolder = (JuegoHolder) viewHolder;
Juego juego = juegos.get(i);
juegoHolder.bindJuego(juego);
}
}
@Override
public int getItemCount() {
return juegos.size();
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getItemViewType(int position) {
return position;
}
public void setItems(ArrayList<Juego> nuevosJuegos) {
juegos.clear();
juegos.addAll(nuevosJuegos);
notifyDataSetChanged();
}
public void clear() {
juegos.clear();
notifyDataSetChanged();
}
}
<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import android.view.LayoutInflater;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.super_rabbit.wheel_picker.WheelPicker;
public class DialogHappyHourPicker extends DialogFragment {
private WheelPicker desdeNP;
private WheelPicker hastaNP;
private String horaInicial;
private String horaFinal;
private TextView textView;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
builder.setView(inflater.inflate(R.layout.dialog_hh_picker, null));
builder.setPositiveButton(R.string.ok, (dialog, which) -> {
String str = horaInicial + " - " + horaFinal;
textView.setText(str);
dismiss();
});
builder.setNegativeButton(R.string.cancelar, ((dialog, which) -> dismiss()));
return builder.create();
}
@Override
public void onStart() {
super.onStart();
desdeNP = ((AlertDialog)getDialog()).findViewById(R.id.desde_np);
hastaNP = ((AlertDialog)getDialog()).findViewById(R.id.hasta_np);
horaInicial = "0";
horaFinal = "23";
desdeNP.setMin(0);
desdeNP.setMax(23);
desdeNP.setSelectorRoundedWrapPreferred(true);
desdeNP.setSelectedTextColor(R.color.colorAccent);
desdeNP.setOnValueChangeListener((picker, oldVal, newVal) -> horaInicial = newVal);
hastaNP.setMin(0);
hastaNP.setMax(23);
hastaNP.setSelectorRoundedWrapPreferred(true);
hastaNP.setSelectedTextColor(R.color.colorAccent);
hastaNP.setOnValueChangeListener((picker, oldVal, newVal) -> horaFinal = newVal);
}
public void setTextView(TextView textView) {
this.textView = textView;
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Bar;
public interface CrearSorteoContract {
interface Interaccion {
void enviarSorteo(String fechaFin, String puntos);
void setBar(Bar bar);
}
interface View {
String getFechaFin();
String getPuntos();
void enviado();
}
interface Listener {
void enviado();
}
}<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class DialogComprarItemTienda extends DialogFragment {
public interface ComprarListener {
void onItemComprado(ItemTienda itemTienda);
}
private ItemTienda itemTienda;
private int misPuntos;
private boolean puntosSuficientes;
private ComprarListener listener;
private RelativeLayout rlNormal;
private TextView textViewPuntosInsuficientes;
private TextView misPuntosText;
private TextView costoText;
private TextView totalText;
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (ComprarListener) context;
} catch (ClassCastException e) {
toastShort(context, "No se implemento la interfaz");
}
}
@NonNull
@Override
public Dialog onCreateDialog(@Nullable Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
builder.setView(inflater.inflate(R.layout.dialog_comprar_item_tienda, null));
builder.setPositiveButton(R.string.comprar, (dialogInterface, i) -> {
listener.onItemComprado(itemTienda);
dismiss();
});
builder.setNegativeButton(R.string.cancelar, (dialogInterface, i) -> dismiss());
AlertDialog dialog = builder.create();
dialog.setOnShowListener(dialog1 -> {
if (!puntosSuficientes)
((AlertDialog)dialog1).getButton(DialogInterface.BUTTON_POSITIVE).setEnabled(false);
});
return dialog;
}
public void setup(int misPuntos, ItemTienda itemTienda) {
this.itemTienda = itemTienda;
this.misPuntos = misPuntos;
}
@Override
public void onStart() {
super.onStart();
misPuntosText = ((AlertDialog)getDialog()).findViewById(R.id.actual);
costoText = ((AlertDialog)getDialog()).findViewById(R.id.costo);
totalText = ((AlertDialog)getDialog()).findViewById(R.id.total);
textViewPuntosInsuficientes = ((AlertDialog)getDialog()).findViewById(R.id.puntos_insuficientes);
textViewPuntosInsuficientes.setVisibility(View.GONE);
rlNormal = ((AlertDialog)getDialog()).findViewById(R.id.rl_normal);
rlNormal.setVisibility(View.VISIBLE);
puntosSuficientes = (misPuntos - itemTienda.getCosto()) > 0;
if (puntosSuficientes) {
textViewPuntosInsuficientes.setVisibility(View.GONE);
misPuntosText.setText(String.valueOf(misPuntos));
costoText.setText(String.valueOf(-itemTienda.getCosto()));
totalText.setText(String.valueOf(misPuntos-itemTienda.getCosto()));
} else {
rlNormal.setVisibility(View.GONE);
textViewPuntosInsuficientes.setVisibility(View.VISIBLE);
}
}
}
<file_sep>package com.eriochrome.bartime.contracts;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Trivia;
public interface CrearTriviaContract {
interface Interaccion {
void setBar(Bar bar);
Bar getBar();
Trivia getTrivia();
void comenzarCreacionTrivia(String titulo, int cantPreguntas);
}
interface View {
String getTitulo();
int getCantPreguntas();
}
}<file_sep>package com.eriochrome.bartime.vistas;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageButton;
import android.widget.Spinner;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.core.view.GravityCompat;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentTransaction;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.adapters.JuegoHolder;
import com.eriochrome.bartime.contracts.ListadosContract;
import com.eriochrome.bartime.modelos.entidades.Juego;
import com.eriochrome.bartime.presenters.ListadosPresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogCrearCuenta;
import com.eriochrome.bartime.vistas.dialogs.DialogResumenJuego;
import com.eriochrome.bartime.vistas.dialogs.DialogSeleccionFiltros;
import com.firebase.ui.auth.AuthMethodPickerLayout;
import com.firebase.ui.auth.AuthUI;
import com.google.android.gms.location.FusedLocationProviderClient;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.tasks.Task;
import com.google.android.material.navigation.NavigationView;
import com.google.firebase.dynamiclinks.FirebaseDynamicLinks;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class ListadosActivity extends AppCompatActivity implements ListadosContract.View,
DialogSeleccionFiltros.FiltrosListener,
JuegoHolder.OnJuegoHolderClickListener,
ActivityCompat.OnRequestPermissionsResultCallback,
DialogResumenJuego.Listener,
DialogCrearCuenta.Listener{
private DrawerLayout drawerLayout;
private ImageButton drawerButton;
private NavigationView navigationView;
private ImageButton avisos;
private ImageButton share;
private Spinner spinner;
private ArrayAdapter<String> spinnerAdapter;
private static final int RC_SIGN_IN = 1;
private ListadosPresenter presenter;
private Location ultimaUbicacion;
private boolean mLocationPermissionGranted;
private FusedLocationProviderClient mFusedLocationProviderClient;
private static final int CODIGO_REQUEST_LOCATION = 123;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_listados);
checkPrimeraVez();
drawerLayout = findViewById(R.id.drawer_layout);
drawerButton = findViewById(R.id.drawer_button);
avisos = findViewById(R.id.avisos);
share = findViewById(R.id.share);
navigationView = findViewById(R.id.nav_drawer);
spinner = findViewById(R.id.spinner_listado);
presenter = new ListadosPresenter();
presenter.bind(this);
mFusedLocationProviderClient = LocationServices.getFusedLocationProviderClient(this);
setupListeners();
}
/**
* Se puede llegar a esta actividad desde fuera mediante un dynamic link
* Este es el handler
* TODO: una vez este definida su funcion especificarla aca
*/
private void checkDynamicLink() {
FirebaseDynamicLinks.getInstance()
.getDynamicLink(getIntent())
.addOnSuccessListener(this, pendingDynamicLinkData -> {
// Get deep link from result (may be null if no link is found)
Uri deepLink;
if (pendingDynamicLinkData != null) {
deepLink = pendingDynamicLinkData.getLink();
if (deepLink != null) {
startFragment(new ListadoJuegosFragment());
spinner.setSelection(spinnerAdapter.getPosition("Juegos"));
String referrerUid = deepLink.getQueryParameter("invitedby");
String gameID = deepLink.getQueryParameter("gameId");
//Se anota el referrer en este punto, puede no participar pero se
//supone que si abrio el link quiere participar
presenter.anotarReferrer(referrerUid, gameID);
presenter.obtenerSorteoConId(gameID);
}
}
})
.addOnFailureListener(this,
e -> Log.w("Fail DynLink", "getDynamicLink:onFailure", e));
}
/**
* Se fija si es la primera vez del usuario para mostrarle
* el app intro
*/
private void checkPrimeraVez() {
Thread t = new Thread(() -> {
// Initialize SharedPreferences
SharedPreferences getPrefs = PreferenceManager
.getDefaultSharedPreferences(getBaseContext());
// Create a new boolean and preference and set it to true
boolean isFirstStart = getPrefs.getBoolean("firstStartUser", true);
// If the activity has never started before...
if (isFirstStart) {
// Launch app intro
final Intent i = new Intent(ListadosActivity.this, IntroduccionUsuarioActivity.class);
runOnUiThread(() -> startActivity(i));
// Make a new preferences editor
SharedPreferences.Editor e = getPrefs.edit();
// Edit preference to make it false because we don't want this to run again
e.putBoolean("firstStartUser", false);
// Apply changes
e.apply();
}
});
t.start();
}
private void updateUI() {
setupDrawer();
setupSpinner();
}
@Override
protected void onStart() {
super.onStart();
checkLocation();
}
/**
* Empieza a escuchar por nuevos avisos
* y pone el nombre en la database si no esta
*/
@Override
protected void onResume() {
super.onResume();
updateUI();
if (presenter.estaConectado()) {
presenter.subirUsuarioADatabase();
presenter.checkearAvisos();
}
checkDynamicLink();
getLastLocation();
}
/**
* Pide permisos de ubicacion si no los tiene
*/
private void checkLocation() {
if (!mLocationPermissionGranted) {
if (ContextCompat.checkSelfPermission(
this.getApplicationContext(),
android.Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
mLocationPermissionGranted = true;
} else {
ActivityCompat.requestPermissions(
this,
new String[]{android.Manifest.permission.ACCESS_FINE_LOCATION},
CODIGO_REQUEST_LOCATION);
}
}
}
/**
* Se ejecuta luego de que se interactua con el dialogo de solicitud de permiso de ubicacion
*/
@Override
public void onRequestPermissionsResult(int requestCode,
@NonNull String[] permissions,
@NonNull int[] grantResults) {
mLocationPermissionGranted = false;
if (requestCode == CODIGO_REQUEST_LOCATION) {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
mLocationPermissionGranted = true;
}
}
}
/**
* Intenta obtener la ultima ubicacion del usuario.
*/
private void getLastLocation() {
try {
if (mLocationPermissionGranted) {
Task<Location> locationResult = mFusedLocationProviderClient.getLastLocation();
locationResult.addOnCompleteListener(task -> ultimaUbicacion = task.getResult());
}
} catch (SecurityException e) {
e.printStackTrace();
}
}
/**
* Ejecuta la accion de la opcion seleccionada del menu.
* @param id identificador de la opcion seleccionada
*/
private void ejecutarOpcionMenu(int id) {
switch (id) {
case R.id.iniciar_sesion:
loginUsuario();
break;
case R.id.agregar_bar:
startActivity(new Intent(ListadosActivity.this, AgregarBarUsuarioActivity.class));
break;
case R.id.juegos:
startActivity(new Intent(ListadosActivity.this, MisJuegosActivity.class));
break;
case R.id.compras:
startActivity(new Intent(ListadosActivity.this, MisComprasActivity.class));
break;
case R.id.guardados:
startFragment(new ListadoFavoritosFragment());
spinner.setSelection(spinnerAdapter.getPosition("Mis Favoritos"));
break;
case R.id.contacto:
startActivity(new Intent(ListadosActivity.this, ContactoActivity.class));
break;
case R.id.cerrar_sesion:
AuthUI.getInstance()
.signOut(this)
.addOnCompleteListener(task -> {
startActivity(new Intent(ListadosActivity.this, DistincionDeUsuarioActivity.class));
finish();
});
break;
case R.id.salir:
finishAndRemoveTask();
break;
}
}
/**
* Crea e inicia la actividad de login de un usuario
* Soporte para login con google o con email.
*/
private void loginUsuario() {
AuthMethodPickerLayout customLayout = new AuthMethodPickerLayout
.Builder(R.layout.custom_login_ui)
.setEmailButtonId(R.id.normal_login)
.setGoogleButtonId(R.id.google_login)
.build();
List<AuthUI.IdpConfig> providers = Arrays.asList(
new AuthUI.IdpConfig.EmailBuilder().build(),
new AuthUI.IdpConfig.GoogleBuilder().build());
startActivityForResult(
AuthUI.getInstance()
.createSignInIntentBuilder()
.setAuthMethodPickerLayout(customLayout)
.setAvailableProviders(providers)
.setTheme(R.style.AppTheme)
.setLogo(R.drawable.bar_tap_2)
.build(),
RC_SIGN_IN);
}
/**
* Se ejecuta al volver de la actividad login.
* Si esta ok lo conecta y si no existe el usuario lo crea
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RC_SIGN_IN) {
if (resultCode == RESULT_OK) {
presenter.subirUsuarioADatabase();
updateUI();
} else {
toastShort(ListadosActivity.this, "Ocurrio un error. Intente nuevamente");
}
}
}
/**
* Handlers para clicks en ui de:
* drawerButton, navigationView, avisos, share, spinner
*/
private void setupListeners() {
drawerButton.setOnClickListener(v -> drawerLayout.openDrawer(GravityCompat.START));
navigationView.setNavigationItemSelectedListener(menuItem -> {
drawerLayout.closeDrawers();
ejecutarOpcionMenu(menuItem.getItemId());
return false; //Devuelvo false para que no quede seleccionado.
});
avisos.setOnClickListener(v -> {
if (presenter.estaConectado()) {
startActivity(new Intent(ListadosActivity.this, AvisosActivity.class));
}
});
share.setOnClickListener(v -> {
Intent sharingIntent = new Intent(Intent.ACTION_SEND);
sharingIntent.setType("text/plain");
sharingIntent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.app_name));
String invitacion = getString(R.string.share_text);
sharingIntent.putExtra(Intent.EXTRA_TEXT, invitacion);
String chooserText = getString(R.string.compartir);
startActivity(Intent.createChooser(sharingIntent, chooserText));
});
spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
switch (position) {
case 0:
startFragment(new ListadoBaresFragment());
break;
case 1:
startFragment(new ListadoJuegosFragment());
break;
case 2:
startFragment(new ListadoFavoritosFragment());
break;
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {}
});
}
/**
* Inicializa el spinner de arriba para elegir entre bares, juegos y favoritos
*/
private void setupSpinner() {
ArrayList<String> listaFragments = new ArrayList<>();
listaFragments.add(getString(R.string.bares));
listaFragments.add(getString(R.string.juegos));
if (presenter.estaConectado()) {
listaFragments.add(getString(R.string.mis_favoritos));
}
spinnerAdapter = new ArrayAdapter<>(this,
R.layout.spinner_text, listaFragments);
spinnerAdapter.setDropDownViewResource(R.layout.spinner_text_dropdown);
spinner.setAdapter(spinnerAdapter);
}
/**
* Wrapper para iniciar un fragmento
*/
private void startFragment(Fragment fragment) {
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
fragmentTransaction.replace(R.id.fragment_container, fragment);
fragmentTransaction.addToBackStack(null);
fragmentTransaction.commit();
}
/**
* Inicializa el drawer (menu del costado)
*/
private void setupDrawer() {
//Header
View header = navigationView.getHeaderView(0);
TextView usuarioActivo = header.findViewById(R.id.usuario_activo);
usuarioActivo.setText(presenter.getNombreUsuario());
//Opciones
Menu menu = navigationView.getMenu();
setupItems(menu, presenter.estaConectado());
}
/**
* Asigna los items del drawer, varian segun si esta conectado o no
* @param conectado true si el authUser no es null
*/
private void setupItems(Menu menu, boolean conectado) {
if (conectado) {
menu.findItem(R.id.iniciar_sesion).setVisible(false);
menu.findItem(R.id.agregar_bar).setVisible(true);
menu.findItem(R.id.juegos).setVisible(true);
menu.findItem(R.id.compras).setVisible(true);
menu.findItem(R.id.guardados).setVisible(true);
menu.findItem(R.id.contacto).setVisible(true);
menu.findItem(R.id.cerrar_sesion).setVisible(true);
menu.findItem(R.id.salir).setVisible(false);
} else {
menu.findItem(R.id.iniciar_sesion).setVisible(true);
menu.findItem(R.id.agregar_bar).setVisible(false);
menu.findItem(R.id.juegos).setVisible(false);
menu.findItem(R.id.compras).setVisible(false);
menu.findItem(R.id.guardados).setVisible(false);
menu.findItem(R.id.contacto).setVisible(false);
menu.findItem(R.id.cerrar_sesion).setVisible(false);
menu.findItem(R.id.salir).setVisible(true);
}
}
@Override
public void onBackPressed() {
finishAndRemoveTask();
}
/**
* Se aplican los filtros seleccionados en la busqueda de bares
* Se handlea aca pero se deriva a ListadoBaresFragment
* @param dialog El dialog de filtros
*/
@Override
public void aplicarFiltros(AlertDialog dialog) {
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f instanceof ListadoBaresFragment) {
((ListadoBaresFragment) f).aplicarFiltros(dialog, ultimaUbicacion);
}
}
/**
* Se handlea el evento de click en un juego de la lista.
* Se handlea aca pero se deriva a ListadoJuegosFragment
* @param juego el juego clickeado
*/
@Override
public void onClickJuego(Juego juego) {
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f instanceof ListadoJuegosFragment) {
((ListadoJuegosFragment) f).onClickJuego(juego);
}
}
/**
* Handler del DialogCrearCuenta
*/
@Override
public void login() {
loginUsuario();
}
/**
* Setea el icono para mostrar que hay avisos
*/
@Override
public void hayAvisos() {
avisos.setImageResource(R.drawable.ic_notifications_active_violet_24dp);
}
/**
* Setea el icono para mostrar que no hay avisos
*/
@Override
public void noHayAvisos() {
avisos.setImageResource(R.drawable.ic_notifications_none_violet_24dp);
}
/**
* Debido a la apertura de un dynamic link de participar en sorteo, abre el sorteo
* @param juego el sorteo
*/
@Override
public void abrirSorteo(Juego juego) {
DialogResumenJuego dialogResumenJuego = new DialogResumenJuego();
dialogResumenJuego.setJuego(juego);
dialogResumenJuego.show(getFragmentManager(), "sorteoPorDynLink");
}
/**
* Se participa en el juego seleccionado si se cumplen las condiciones
* Ej: si no esta ya participando, si no es una trivia y ya participo...
* Se handlea aca pero se deriva a ListadoJuegosFragment
* @param juego juego clickeado
*/
@Override
public void intentarParticiparDeJuego(Juego juego) {
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f instanceof ListadoJuegosFragment) {
((ListadoJuegosFragment) f).intentarParticiparDeJuego(juego);
}
}
/**
* Se deja de escuchar nuevos avisos al destruir
*/
@Override
protected void onDestroy() {
if (presenter.estaConectado()) {
presenter.dejarDeCheckearAvisos();
}
presenter.unbind();
super.onDestroy();
}
}
<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.DatosBarHorariosContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import java.util.HashMap;
public class DatosBarHorariosInteraccion implements DatosBarHorariosContract.Interaccion {
private Bar bar;
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
@Override
public Bar getBar() {
return bar;
}
@Override
public void setHorarios(HashMap<String, Integer> horariosInicial, HashMap<String, Integer> horariosFinal) {
bar.agregarHorarios(horariosInicial, horariosFinal);
}
@Override
public void setHappyHour(HashMap<String, Integer> happyhourInicial, HashMap<String, Integer> happyhourFinal) {
bar.agregarHappyhourHorarios(happyhourInicial, happyhourFinal);
}
}<file_sep>package com.eriochrome.bartime.vistas;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.PaginaSorteoContract;
import com.eriochrome.bartime.presenters.PaginaSorteoPresenter;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class PaginaSorteoActivity extends AppCompatActivity implements PaginaSorteoContract.View {
private PaginaSorteoPresenter presenter;
private ProgressBar progressBar;
private ImageButton volver;
private RelativeLayout container;
private TextView resumenDelJuego;
private TextView cantParticipantes;
private Button sortear;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pagina_sorteo);
presenter = new PaginaSorteoPresenter();
presenter.bind(this);
presenter.obtenerJuego(getIntent());
progressBar = findViewById(R.id.progressBar);
progressBar.setVisibility(View.GONE);
volver = findViewById(R.id.volver);
volver.setOnClickListener(v -> finish());
container = findViewById(R.id.container_rl);
resumenDelJuego = findViewById(R.id.resumen_juego);
cantParticipantes = findViewById(R.id.cant_participantes);
sortear = findViewById(R.id.sortear);
sortear.setOnClickListener(v -> {
if (!cantParticipantes.getText().equals("0")) {
presenter.sortear();
} else {
toastShort(PaginaSorteoActivity.this, getString(R.string.no_sortear_sin_participantes));
}
});
presenter.cargarDatos();
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void cargando() {
progressBar.setVisibility(View.VISIBLE);
container.setVisibility(View.GONE);
}
@Override
public void finSorteo(String participanteGanador) {
progressBar.setVisibility(View.GONE);
container.setVisibility(View.VISIBLE);
Intent i = new Intent(PaginaSorteoActivity.this, FinSorteoActivity.class);
i.putExtra("ganador", participanteGanador);
startActivity(i);
finish();
}
@Override
public void setResumenJuego(String resumenJuego) {
resumenDelJuego.setText(resumenJuego);
}
@Override
public void setCantParticipantes(int cantParticipantes) {
this.cantParticipantes.setText(String.valueOf(cantParticipantes));
}
@Override
public void finCargando() {
progressBar.setVisibility(View.GONE);
container.setVisibility(View.VISIBLE);
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.CrearDesafioContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.CrearDesafioInteraccion;
public class CrearDesafioPresenter implements CrearDesafioContract.Callback{
private CrearDesafioContract.Interaccion interaccion;
private CrearDesafioContract.View view;
public CrearDesafioPresenter() {
interaccion = new CrearDesafioInteraccion(this);
}
public void bind(CrearDesafioContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
public void enviarDesafio() {
String desafio = view.getDesafioText();
String dificultad = view.getDificultad();
boolean unicoGanador = view.esDeUnicoGanador();
interaccion.enviarDesafio(desafio, dificultad, unicoGanador);
}
@Override
public void enviado() {
view.enviado();
}
}<file_sep>package com.eriochrome.bartime.modelos;
import com.eriochrome.bartime.contracts.CrearSorteoContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.Sorteo;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
public class CrearSorteoInteraccion implements CrearSorteoContract.Interaccion {
private DatabaseReference refJuegos;
private final CrearSorteoContract.Listener listener;
private Bar bar;
public CrearSorteoInteraccion(CrearSorteoContract.Listener listener) {
this.listener = listener;
refJuegos = FirebaseDatabase.getInstance().getReference().child("juegos");
}
@Override
public void enviarSorteo(String fechaFin, String puntos) {
Sorteo sorteo = new Sorteo();
sorteo.setFechaFin(fechaFin);
sorteo.asignarPuntos(Integer.valueOf(puntos));
sorteo.asignarNombreBar(bar.getNombre());
sorteo.asignarTipo();
String desafioID = refJuegos.push().getKey();
sorteo.setID(desafioID);
if (desafioID != null) {
refJuegos.child("Sorteo").child(desafioID).setValue(sorteo)
.addOnSuccessListener(aVoid -> listener.enviado());
}
}
@Override
public void setBar(Bar bar) {
this.bar = bar;
}
}<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import com.eriochrome.bartime.R;
public class DialogValidarGanador extends DialogFragment {
public interface Listener {
void declararGanador();
}
private DialogValidarGanador.Listener listener;
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (DialogValidarGanador.Listener) context;
} catch (ClassCastException e) {
throw new ClassCastException(context.toString()
+ " must implement interface");
}
}
@NonNull
@Override
public Dialog onCreateDialog(@Nullable Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
String texto = getString(R.string.deseas_declarar_ganador);
builder.setMessage(texto);
builder.setPositiveButton(R.string.ok, (dialog, which) -> {
listener.declararGanador();
dismiss();
});
builder.setNegativeButton(R.string.cancelar, (dialog, which) -> {
dismiss();
});
return builder.create();
}
}
<file_sep>package com.eriochrome.bartime.vistas;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.widget.Button;
import android.widget.TextView;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.contracts.DatosBarHorariosContract;
import com.eriochrome.bartime.presenters.DatosBarHorariosPresenter;
import com.eriochrome.bartime.vistas.dialogs.DialogHappyHourPicker;
import com.eriochrome.bartime.vistas.dialogs.DialogHourPicker;
import java.util.ArrayList;
import java.util.HashMap;
import static com.eriochrome.bartime.utils.Utils.toastShort;
public class DatosBarHorariosActivity extends AppCompatActivity implements DatosBarHorariosContract.View {
private DatosBarHorariosPresenter presenter;
private Button continuar;
/**
* h: horario
* hh: happy hour
*/
private TextView hLunes, hhLunes;
private TextView hMartes, hhMartes;
private TextView hMiercoles, hhMiercoles;
private TextView hJueves, hhJueves;
private TextView hViernes, hhViernes;
private TextView hSabado, hhSabado;
private TextView hDomingo, hhDomingo;
private ArrayList<TextView> listaHorarios;
private boolean tieneHappyHour;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_datos_bar_horarios);
listaHorarios = new ArrayList<>();
presenter = new DatosBarHorariosPresenter();
presenter.bind(this);
setupHorarios();
continuar = findViewById(R.id.continuar);
presenter.obtenerBar(getIntent());
continuar.setOnClickListener(v -> {
if (completoHorarios()) {
Intent i = new Intent(DatosBarHorariosActivity.this, DatosBarOpcionalesActivity.class);
presenter.setHorarios(getHorariosInicial(), getHorariosFinal());
if (tieneHappyHour)
presenter.setHappyHour(getHappyhourInicial(), getHappyhourFinal());
i = presenter.enviarBar(i);
startActivity(i);
} else {
toastShort(this, getString(R.string.debes_completar_horarios_antes_de_continuar));
}
});
}
private boolean completoHorarios() {
for (TextView horario : listaHorarios) {
if (horario.getText().equals(" - ") || horario.getText().equals("0 - 0"))
return false;
}
return true;
}
private void setupHorarios() {
hLunes = findViewById(R.id.hLunes);
listaHorarios.add(hLunes);
findViewById(R.id.horario_lunes).setOnClickListener(v -> openHourPicker(hLunes));
hMartes = findViewById(R.id.hMartes);
listaHorarios.add(hMartes);
findViewById(R.id.horario_martes).setOnClickListener(v -> openHourPicker(hMartes));
hMiercoles = findViewById(R.id.hMiercoles);
listaHorarios.add(hMiercoles);
findViewById(R.id.horario_miercoles).setOnClickListener(v -> openHourPicker(hMiercoles));
hJueves = findViewById(R.id.hJueves);
listaHorarios.add(hJueves);
findViewById(R.id.horario_jueves).setOnClickListener(v -> openHourPicker(hJueves));
hViernes = findViewById(R.id.hViernes);
listaHorarios.add(hViernes);
findViewById(R.id.horario_viernes).setOnClickListener(v -> openHourPicker(hViernes));
hSabado = findViewById(R.id.hSabado);
listaHorarios.add(hSabado);
findViewById(R.id.horario_sabado).setOnClickListener(v -> openHourPicker(hSabado));
hDomingo = findViewById(R.id.hDomingo);
listaHorarios.add(hDomingo);
findViewById(R.id.horario_domingo).setOnClickListener(v -> openHourPicker(hDomingo));
hhLunes = findViewById(R.id.hhLunes);
findViewById(R.id.hh_lunes).setOnClickListener(v -> openHHPicker(hhLunes));
hhMartes = findViewById(R.id.hhMartes);
findViewById(R.id.hh_martes).setOnClickListener(v -> openHHPicker(hhMartes));
hhMiercoles = findViewById(R.id.hhMiercoles);
findViewById(R.id.hh_miercoles).setOnClickListener(v -> openHHPicker(hhMiercoles));
hhJueves = findViewById(R.id.hhJueves);
findViewById(R.id.hh_jueves).setOnClickListener(v -> openHHPicker(hhJueves));
hhViernes = findViewById(R.id.hhViernes);
findViewById(R.id.hh_viernes).setOnClickListener(v -> openHHPicker(hhViernes));
hhSabado = findViewById(R.id.hhSabado);
findViewById(R.id.hh_sabado).setOnClickListener(v -> openHHPicker(hhSabado));
hhDomingo = findViewById(R.id.hhDomingo);
findViewById(R.id.hh_domingo).setOnClickListener(v -> openHHPicker(hhDomingo));
}
private void openHourPicker(TextView tv) {
DialogHourPicker hourPicker = new DialogHourPicker();
hourPicker.setTextView(tv);
hourPicker.show(getFragmentManager(), "hourPicker");
}
private void openHHPicker(TextView tv) {
DialogHappyHourPicker hhPicker = new DialogHappyHourPicker();
hhPicker.setTextView(tv);
hhPicker.show(getFragmentManager(), "hhPicker");
tieneHappyHour = true;
}
private HashMap<String, Integer> getHorariosInicial() {
HashMap<String, Integer> devolver = new HashMap<>();
devolver.put("Domingo", getHorarioInicial(hDomingo));
devolver.put("Lunes", getHorarioInicial(hLunes));
devolver.put("Martes", getHorarioInicial(hMartes));
devolver.put("Miercoles", getHorarioInicial(hMiercoles));
devolver.put("Jueves", getHorarioInicial(hJueves));
devolver.put("Viernes", getHorarioInicial(hViernes));
devolver.put("Sabado", getHorarioInicial(hSabado));
return devolver;
}
private HashMap<String, Integer> getHorariosFinal() {
HashMap<String, Integer> devolver = new HashMap<>();
devolver.put("Domingo", getHorarioFinal(hDomingo));
devolver.put("Lunes", getHorarioFinal(hLunes));
devolver.put("Martes", getHorarioFinal(hMartes));
devolver.put("Miercoles", getHorarioFinal(hMiercoles));
devolver.put("Jueves", getHorarioFinal(hJueves));
devolver.put("Viernes", getHorarioFinal(hViernes));
devolver.put("Sabado", getHorarioFinal(hSabado));
return devolver;
}
private HashMap<String, Integer> getHappyhourInicial() {
HashMap<String, Integer> devolver = new HashMap<>();
devolver.put("Domingo", getHorarioInicial(hhDomingo));
devolver.put("Lunes", getHorarioInicial(hhLunes));
devolver.put("Martes", getHorarioInicial(hhMartes));
devolver.put("Miercoles", getHorarioInicial(hhMiercoles));
devolver.put("Jueves", getHorarioInicial(hhJueves));
devolver.put("Viernes", getHorarioInicial(hhViernes));
devolver.put("Sabado", getHorarioInicial(hhSabado));
return devolver;
}
private HashMap<String, Integer> getHappyhourFinal() {
HashMap<String, Integer> devolver = new HashMap<>();
devolver.put("Domingo", getHorarioFinal(hhDomingo));
devolver.put("Lunes", getHorarioFinal(hhLunes));
devolver.put("Martes", getHorarioFinal(hhMartes));
devolver.put("Miercoles", getHorarioFinal(hhMiercoles));
devolver.put("Jueves", getHorarioFinal(hhJueves));
devolver.put("Viernes", getHorarioFinal(hhViernes));
devolver.put("Sabado", getHorarioFinal(hhSabado));
return devolver;
}
private Integer getHorarioInicial(TextView tv) {
try {
String str = tv.getText().toString();
if (str.equals("Cerrado")) return 0;
return Integer.valueOf(str.split(" - ")[0]);
} catch (RuntimeException e) {
if (!tieneHappyHour) toastShort(this, "Ocurrio un error inesperado.");
finish();
return 0;
}
}
private Integer getHorarioFinal(TextView tv) {
try {
String str = tv.getText().toString();
if (str.equals("Cerrado")) return 0;
return Integer.valueOf(str.split(" - ")[1]);
} catch (RuntimeException e) {
if (!tieneHappyHour) toastShort(this, "Ocurrio un error inesperado.");
finish();
return 0;
}
}
@Override
protected void onDestroy() {
presenter.unbind();
super.onDestroy();
}
@Override
public void setHorarios(HashMap<String, Integer> horariosInicial, HashMap<String, Integer> horariosFinal) {
hLunes.setText(formatHorario(horariosInicial.get("Lunes"), horariosFinal.get("Lunes")));
hMartes.setText(formatHorario(horariosInicial.get("Martes"), horariosFinal.get("Martes")));
hMiercoles.setText(formatHorario(horariosInicial.get("Miercoles"), horariosFinal.get("Miercoles")));
hJueves.setText(formatHorario(horariosInicial.get("Jueves"), horariosFinal.get("Jueves")));
hViernes.setText(formatHorario(horariosInicial.get("Viernes"), horariosFinal.get("Viernes")));
hSabado.setText(formatHorario(horariosInicial.get("Sabado"), horariosFinal.get("Sabado")));
hDomingo.setText(formatHorario(horariosInicial.get("Domingo"), horariosFinal.get("Domingo")));
}
@Override
public void setHappyHour(HashMap<String, Integer> happyhourInicial, HashMap<String, Integer> happyhourFinal) {
hhLunes.setText(formatHorario(happyhourInicial.get("Lunes"), happyhourFinal.get("Lunes")));
hhMartes.setText(formatHorario(happyhourInicial.get("Martes"), happyhourFinal.get("Martes")));
hhMiercoles.setText(formatHorario(happyhourInicial.get("Miercoles"), happyhourFinal.get("Miercoles")));
hhJueves.setText(formatHorario(happyhourInicial.get("Jueves"), happyhourFinal.get("Jueves")));
hhViernes.setText(formatHorario(happyhourInicial.get("Viernes"), happyhourFinal.get("Viernes")));
hhSabado.setText(formatHorario(happyhourInicial.get("Sabado"), happyhourFinal.get("Sabado")));
hhDomingo.setText(formatHorario(happyhourInicial.get("Domingo"), happyhourFinal.get("Domingo")));
tieneHappyHour = true;
}
@SuppressLint("DefaultLocale")
private String formatHorario(Integer ini, Integer fin) {
if (ini == 0 && fin == 0) return getString(R.string.cerrado);
return String.format("%d - %d", ini, fin);
}
}
<file_sep>package com.eriochrome.bartime.adapters;
import android.graphics.Rect;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import android.view.View;
public class EspacioVerticalDecorator extends RecyclerView.ItemDecoration {
private final int espacioVertical;
public EspacioVerticalDecorator(int espacioVertical) {
this.espacioVertical = espacioVertical;
}
@Override
public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) {
if(parent.getChildAdapterPosition(view) != parent.getAdapter().getItemCount() -1) {
outRect.bottom = espacioVertical;
}
}
}
<file_sep>package com.eriochrome.bartime.modelos.entidades;
public class ItemTienda {
private String descripcion;
private int costo;
private String id;
public ItemTienda() {
}
public ItemTienda(String descripcion, int costo) {
this.descripcion = descripcion;
this.costo = costo;
}
public String getDescripcion() {
return descripcion;
}
public int getCosto() {
return costo;
}
public void setID(String id) {
this.id = id;
}
public String getID() {
return id;
}
}
<file_sep>package com.eriochrome.bartime.vistas.dialogs;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.Spinner;
import com.eriochrome.bartime.R;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import java.util.ArrayList;
import static com.eriochrome.bartime.utils.Utils.toastShort;
/**
* Aclaracion, se hace ese bardo raro para obtener precio y descripcion
* ya que puede no elegirse el precio por defecto del item de la lista.
*/
public class DialogCrearItemTienda extends DialogFragment {
public interface CrearItemListener {
void crearItem(ItemTienda itemTienda);
}
CrearItemListener listener;
private ArrayList<ItemTienda> itemsTienda;
private ArrayAdapter<String> adapter;
private EditText precio;
private Spinner spinnerItems;
@Override
public void onAttach(Context context) {
super.onAttach(context);
try {
listener = (CrearItemListener) context;
} catch (ClassCastException e) {
toastShort(context, "No se implemento la interfaz");
}
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
builder.setView(inflater.inflate(R.layout.dialog_crear_item_tienda, null));
builder.setTitle(getString(R.string.crear_nuevo_item));
builder.setPositiveButton(R.string.crear, (dialogInterface, i) -> {
listener.crearItem(getItem());
dismiss();
});
builder.setNegativeButton(R.string.cancelar, (dialogInterface, i) -> dismiss());
return builder.create();
}
private ItemTienda getItem() {
int precioSeleccionado = Integer.valueOf(precio.getText().toString());
String descripcionSeleccionada = (String) spinnerItems.getSelectedItem();
return new ItemTienda(descripcionSeleccionada, precioSeleccionado);
}
@Override
public void onStart() {
super.onStart();
cargarPosibilidades();
precio = ((AlertDialog)getDialog()).findViewById(R.id.precio);
spinnerItems = ((AlertDialog)getDialog()).findViewById(R.id.spinner_items);
adapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_list_item_1, getItemsParaSpinner());
spinnerItems.setAdapter(adapter);
spinnerItems.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
String precioText = Integer.toString(itemsTienda.get(position).getCosto());
precio.setText(precioText);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
precio.setText("");
}
});
}
private ArrayList<String> getItemsParaSpinner() {
ArrayList<String> devolver = new ArrayList<>();
for (ItemTienda item : itemsTienda) {
devolver.add(item.getDescripcion());
}
return devolver;
}
private void cargarPosibilidades() {
//En un futuro cambiar esto
itemsTienda = new ArrayList<>();
itemsTienda.add(new ItemTienda("10% de descuento en la siguiente compra.", 150));
itemsTienda.add(new ItemTienda("25% de descuento en la siguiente compra.", 250));
itemsTienda.add(new ItemTienda("50% de descuento en la siguiente compra.", 400));
itemsTienda.add(new ItemTienda("2x1 en la siguiente compra.", 600));
}
}
<file_sep>package com.eriochrome.bartime.modelos.entidades;
public class Sorteo extends Juego {
private String fechaFin;
public String getFechaFin() {
return fechaFin;
}
public void setID(String id) {
super.setID(id);
}
public void asignarTipo() {
super.asignarTipo("Sorteo");
}
public void setFechaFin(String fechaFin) {
this.fechaFin = fechaFin;
}
@Override
public String getTextoParticipacion(String nombreParticipante) {
return nombreParticipante
+ " esta ahora participando en el sorteo por "
+ puntos
+ " puntos.";
}
@Override
public String getTextoGanadorDeJuego() {
String nombreBar = getNombreBar();
return "¡Has ganado el sorteo de "
+ nombreBar
+ " por "
+ puntos
+ "!";
}
public String getDescripcionSorteo() {
return "Sorteo por "
+ puntos
+ " puntos. Finaliza el "
+ fechaFin;
}
/**
* Requerido por firebase
*/
public Sorteo() {
}
@Override
public int getPuntos() {
return super.getPuntos();
}
@Override
public String getTipoDeJuego() {
return super.getTipoDeJuego();
}
@Override
public String getNombreBar() {
return super.getNombreBar();
}
}
<file_sep>package com.eriochrome.bartime.presenters;
import android.content.Intent;
import com.eriochrome.bartime.contracts.TiendaContract;
import com.eriochrome.bartime.modelos.entidades.Bar;
import com.eriochrome.bartime.modelos.entidades.ItemTienda;
import com.eriochrome.bartime.modelos.TiendaInteraccion;
import java.util.ArrayList;
public class TiendaPresenter implements TiendaContract.Listener {
private TiendaContract.Interaccion interaccion;
private TiendaContract.View view;
public TiendaPresenter() {
interaccion = new TiendaInteraccion(this);
}
public void bind(TiendaContract.View view) {
this.view = view;
}
public void unbind() {
view = null;
}
public void obtenerBar(Intent intent) {
Bar bar = (Bar) intent.getSerializableExtra("bar");
interaccion.setBar(bar);
}
public void setupTienda() {
view.cargando();
interaccion.setupTienda();
}
@Override
public void listo(ArrayList<ItemTienda> items, Integer misPuntos) {
view.finCargando(items, misPuntos);
interaccion.guardarPuntos(misPuntos);
}
public int getPuntos() {
return interaccion.getPuntos();
}
public void comprarItem(ItemTienda itemTienda) {
interaccion.comprarItem(itemTienda);
}
public void onPause() {
interaccion.dejarDeEscucharCambios();
}
} | 34b1dd6ab6a1c306f66805c878f92a6ef27a7ee0 | [
"Markdown",
"Java"
] | 82 | Java | sberoch/BarTap | e1bec69b70a27a91c7b2c6c967b1f05fba3d589f | 24e7713deb0d2d4bf8227a47c115b9fd40755d41 |
refs/heads/master | <file_sep>import Vue from "vue";
import Vuex from "vuex";
import { loginUser } from "../api";
import {
saveAuthToCookie,
saveUserToCookie,
getUserFromCookie,
deleteCookie,
} from "../utils/cookies.js";
// import { truncate } from "fs";
Vue.use(Vuex);
export default new Vuex.Store({
state: {
user: {},
token: "",
},
getters: {
isLoggedIn(state) {
return !!state.token || getUserFromCookie();
},
userToken(state) {
return state.token;
},
userData(state) {
return state.user;
},
},
mutations: {
SET_USER(state, user) {
state.user = user;
},
SET_TOKEN(state, token) {
state.token = token;
},
LOGOUT(state) {
state.user = null;
state.token = null;
deleteCookie("vtn_auth");
deleteCookie("vtn_user");
},
},
actions: {
async LOGIN({ commit }, data) {
const response = await loginUser(data);
if (response.data.statusCode == 200) {
commit("SET_USER", response.data.user);
commit("SET_TOKEN", response.data.token);
saveUserToCookie(JSON.stringify(response.data.user));
saveAuthToCookie(response.data.token);
}
return response;
},
},
});
<file_sep>import Vue from 'vue';
import Router from 'vue-router';
import LoginModal from '../components/LoginModal.vue';
import List from '../components/mainview/List.vue';
import OfficeInspection from '../components/mainview/OfficeInspection.vue';
import OnSiteInspection from '../components/mainview/OnSiteInspection.vue';
Vue.use(Router)
export default new Router ({
mode: "history",
routes: [
{
path: "/",
name: "Login",
component: LoginModal
},
{
path: "/list",
name: "List",
component: List
},
{
path: "/office",
name: "OfficeInspection",
component: OfficeInspection
},
{
path: "/onsite",
name: "OnsiteInspection",
component: OnSiteInspection
},
]
})<file_sep>import axios from "axios";
import store from "../store/index.js";
import { getAuthFromCookie } from "../utils/cookies.js";
// basic config for axios
const APP_URL =
"https://saqw696o5l.execute-api.ap-northeast-2.amazonaws.com/vtn-usedphone/";
// instance & interceptor
function create(url, options) {
const instance = axios.create(
Object.assign(
{
baseURL: url,
},
options
)
);
return instance;
}
function createWithAuth(url, options) {
const instance = axios.create(
Object.assign(
{
baseURL: url,
},
options
)
);
instance.interceptors.request.use(
config => {
config.headers.Authorization =
store.getters["userToken"] || getAuthFromCookie();
config.headers["Content-Type"] = 'multipart/form-data';
return config;
},
error => {
return Promise.reject(error.response);
}
);
instance.interceptors.response.use(
config => {
return config;
},
error => {
return Promise.reject(error.response);
}
);
return instance;
}
const auth = create(APP_URL);
const vtn = createWithAuth(APP_URL);
// users
function loginUser(data) {
try {
return auth.post("login", JSON.stringify(data));
} catch (error) {
console.log(error);
return error;
}
}
function manageUsedPhoneData(data) {
try {
return auth.post("manageusedphonedata", JSON.stringify(data));
} catch (error) {
console.log(error);
return error;
}
}
function uploadphonedata(data) {
try {
return vtn.post("uploadphonedata", data);
} catch (error) {
console.log(error);
return error;
}
}
export {
loginUser,
manageUsedPhoneData,
uploadphonedata
};
| 0db1399189a0bed4d8d3cbd1859f2575ffc63d00 | [
"JavaScript"
] | 3 | JavaScript | labongbong/vtn-usedphone-list-app2 | fb1ff4e47df5cc054b31a9c8b2c5865a8e5507b6 | 84e6daf58bfe45fc134891b8f0775df23c3392b5 |
refs/heads/master | <file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Number;
use Carbon\Carbon;
class DepanController extends Controller
{
// public function index()
// {
// return view('index');
// }
protected function allData()
{
$now = Carbon::now('+5:30')->format('Y-m-d');
$tomm = Carbon::now('+5:30')->add(1, 'day')->format('Y-m-d');
$yest = Carbon::yesterday('+5:30')->format('Y-m-d');
$data = $this->getByDate();
$sayaka = $this->hirota();
$sayaka1 = $this->hirota1();
$sayaka2 = $this->hirota2();
$sayaka3 = $this->hirota3();
$sayaka4 = $this->hirota4();
$sayaka5 = $this->hirota5();
$sayaka6 = $this->hirota6();
return view('index', compact('data', 'now', 'yest', 'sayaka', 'sayaka1', 'sayaka2', 'sayaka3', 'sayaka4', 'sayaka5', 'sayaka6', 'tomm'));
}
public function getByDate()
{
$data = Number::all();
$today = Carbon::now('+5:30')->format('H:i:s a');
$morning = Carbon::createFromTime(9, 30, 00, '+5:30')->format('H:i:s a');
$morning1 = Carbon::createFromTime(23, 59, 59, '+5:30')->format('H:i:s a');
$morning2 = Carbon::createFromTime(00, 00, 00, '+5:30')->format('H:i:s a');
$morning3 = Carbon::createFromTime(9, 29, 59, '+5:30')->format('H:i:s a');
// $times = '';
if($today >= $morning && $today <= $morning1)
{
$data = Number::whereDate('tanggal', Carbon::now('+5:30')->format('Y-m-d'))->get();
}
elseif ($today >= $morning2 && $morning3)
{
$data = Number::whereDate('tanggal', Carbon::yesterday('+5:30')->format('Y-m-d'))->get();
}
return $data;
}
public function hirota()
{
return Number::all();
}
public function hirota1()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-1, 'day')->format('Y-m-d'))->get();
}
public function hirota2()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-2, 'day')->format('Y-m-d'))->get();
}
public function hirota3()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-3, 'day')->format('Y-m-d'))->get();
}
public function hirota4()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-4, 'day')->format('Y-m-d'))->get();
}
public function hirota5()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-5, 'day')->format('Y-m-d'))->get();
}
public function hirota6()
{
return Number::whereDate('tanggal', Carbon::yesterday('+5:30')->add(-6, 'day')->format('Y-m-d'))->get();
}
}
<file_sep>function addLeadingZero(n) {
return n < 10 ? '0' + n : n;
}
function windTheClock(timeZoneOffset)
{
var d = new Date();
d.setHours(d.getUTCHours() + timeZoneOffset); // set time zone offset
var h = d.getHours();
var m = d.getMinutes();
var s = d.getSeconds();
var ampm = h >= 12 ? 'PM' : 'AM';
h = h % 12;
h = h ? h : 12; // replace '0' w/ '12'
h = addLeadingZero(h);
m = addLeadingZero(m);
s = addLeadingZero(s);
document.all["jam"].innerHTML = h + ':' + m + ':' + s
+ ' ' + ampm;
setTimeout(function(){ windTheClock(timeZoneOffset) }, 1000);
}
window.onload = function() {
windTheClock(-6);
} | 0b8d7fba90017605e46902a81157c1e6de6d695e | [
"JavaScript",
"PHP"
] | 2 | PHP | putralangkat97/newdelhit093L | 1e64de51a690a284fbd56c8b53d7c8c97f4c2c8e | 16c085abea9c12b7077bd357b2cdf8c246a0368f |
refs/heads/master | <repo_name>priscalefawane/python<file_sep>/chap2/tiy64_2-2.py
message = "The world of Python!"
print(message)
message = "Hello Python class!"
print(message)<file_sep>/chap8/tiy215_8-12.py
def make_sandwich(*items):
"""Making sandwiches with items."""
print("\nMaking a delicious sandwich with the following items:")
for item in items:
print(f"-{item}")
print("You sandwich is ready for collection!")
make_sandwich('egg', 'tomato')
make_sandwich('tuna')
make_sandwich('cheese', 'tomato')
<file_sep>/chap4/tiy116_4-11.py
# Copy,add food to the list and print using for loop
my_pizzas = ['chicken & pine', 'pepperoni', 'supreme']
friend_pizzas = my_pizzas[:]
my_pizzas.append('chicken & mushroom')
friend_pizzas.append('something meaty')
# My favorite pizzas
print("\nMy favorite pizzas are:")
for pizza in my_pizzas:
print(my_pizzas)
# My friend's favorite pizzas.
print("\nMy friend's favorite pizzas are:")
for pizza in friend_pizzas:
print(friend_pizzas)
<file_sep>/chap6/tiy164_6-6.py
# Looping through the Keys in a Dictionary
favorite_languages = {
'john': 'python',
'sarah': 'c',
'lisa': 'ruby',
'phil': 'python',
'james': 'sql'
}
learners = [
'james',
'sarah',
'paul',
]
for name in favorite_languages.keys():
print(name.title())
if name in learners:
print(f"\t{name.title()}, thank you for responding.")
if 'paul' not in favorite_languages.keys():
print(f"Paul, you are welcome to take our poll.")<file_sep>/chap2/tiy76_2-10.py
# print name in uppercase and title case
name = "<NAME>"
print(name.title())
# Prisca 6/8/2020 reminds me of my sister' child
famous_person = "<NAME>"
message = f"I wish {famous_person} was still alive."
print(message)
<file_sep>/chap3/tiy82_3-2.py
names = ['mpho', 'neo', 'lesego', 'tshiamo']
message = f"I love you {names[0].title()}."
print(message)
message = f"I love you {names[1].title()}."
print(message)
message = f"I love you {names[2].title()}."
print(message)
message = f"I love you {names[3].title()}."
print(message)
<file_sep>/chap2/tiy71_2-5.py
author = "<NAME>"
famous_quote = f'{author} once said, "freedom has came to our land."'
print(famous_quote)
author = "<NAME>"
famous_quote = f'"freedom has came to our land."'
print(f"{author} once said, {famous_quote}")<file_sep>/chap4/tiy122_4-15.py
# Three kinds of carnivorous animals.
animals = [
'leopard',
'lion',
'cheetah']
for animal in animals:
print(animal.title())
print(f"A {animal} is one of "
f"carnivorous animals.\n")
print("Any of these animals "
"is a meat eating animal.\n")
# using for loop to print numbers from 1 to 20.
for value in range(1, 21):
print(value)
# My favorite pizzas
pizzas = [
'chicken & pine',
'pepperoni',
'supreme']
for pizza in pizzas:
print(f"\nI like {pizza} pizza!")
print(f"\nI like chicken & pine with lots of pines on it."
f"\nDouble cheese is always the way on pepperoni."
f"\nWow!!! The sauces on supreme, so delicious!"
f"\n\nI really like pizza! ")
<file_sep>/chap8/printing_models.py
import printing_function as pf
unprinted_designs = ['phone case', 'robot pendant', 'dodecahedron']
completed_models = []
pf.print_models(unprinted_designs, completed_models)
pf.show_completed_models(completed_models)
pf.print_models(unprinted_designs[:], completed_models)<file_sep>/chap5/tiy140_5-7.py
# Testing multiple conditions.
favorite_fruits = ['kiwis', "watermelons", 'grapefruits']
if 'watermelons' in favorite_fruits:
print("I really like watermelons!")
if 'apples' in favorite_fruits:
print("I really like apples!")
if 'oranges' in favorite_fruits:
print("I really like oranges!")
if 'kiwis' in favorite_fruits:
print("I really like kiwis!")
if 'grapes' in favorite_fruits:
print("I really like grapes!")<file_sep>/chap8/tiy206_8-6.py
# Returning a value.
def city_country(city, country):
"""Return a string formatted like 'Santiago, Chile'."""
return (f"{city.title()}, {country.title()}")
city = city_country('pretoria', 'south africa')
print(city)
city = city_country('gaborne', 'botswana')
print(city)
city = city_country('maseru', 'lesotho')
print(city)<file_sep>/chap3/tiy89_3-5.py
# People to invite for dinner
guests = ['grace', 'john', 'lisa']
print(guests)
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hello {guests[1].title()}! you are kindly invited for dinner, \n\tDay: Friday \n\tVaneu: 479 Umfuyaneni Sec \n\tTime: 19:30"
print(message)
message = f"Hi {guests[2].title()} my friend!! I'm inviting you for dinner on Friday 19:30 at my place."
print(message)
# Guest who can't make it
print(f"Unfortunately {guests[1].title()} can't make it.")
# replacing the name of the guest who can't make it
guests[1] = 'joe'
print(guests)
# invitation messages for people on the new list
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[1].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[2].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
<file_sep>/chap4/tiy110_4-7.py
# Multiples of 3 from 3 to 30 using for loop
threes = list(range(3, 31, 3))
print(threes)
for number in threes:
print(number)
<file_sep>/chap6/tiy171_6-12.py
# Information about a particular alien
alien_0 = {
'color': 'green',
'points': 5,
}
print(alien_0['color'])
print(alien_0['points'])
# adding items in dictionary
alien_0['year'] = 16
alien_0['name'] = "bialar"
# Checking if 'year' is present in dictionary
if 'year' in alien_0:
print(f"\nWow! Bialar is now older.\n")
# changing the color
alien_0['color'] = 'yellow'
for key, value in alien_0.items():
print(f"{key.title()}: \n\t{value}\n")
# deleting point from the dictionary
# Printing the dictionary
del alien_0['points']
print(alien_0)
# The number of items in dictionary
print("\nThe number of items is:")
print(len(alien_0))
<file_sep>/chap4/tiy110_4-4.py
for value in range(1, 1000001):
numbers = list(range(value))
print(numbers)<file_sep>/chap5/tiy131_5-1.py
# Conditional tests
fruit = 'apple'
print("Is fruit == 'apple'? "
"I predict True.")
print(fruit == 'apple')
print("Is fruit == 'orange'? "
"I predict False.")
print(fruit == 'orange')
motorcycle = 'kawasaki'
print("\nIs motorcycle == 'kawasaki'? "
"I predict True.")
print(motorcycle == 'kawasaki')
print("Is motorcycle == 'honda'? "
"I predict false.")
print(motorcycle == 'honda')
food = 'rice'
print("\nIs food == 'rice'?"
" I predict True.")
print(food == 'rice')
print("Is food == 'meat'? "
"I predict False.")
print(food == 'meat')
pet = 'cat'
print("\nIs pet == 'cat'?"
" I predict True.")
print(pet == 'cat')
print("Is pet == 'dog'? "
"I predict False.")
print(pet == 'dog')
number = 1
print("\nIs number == 1? "
"I predict True.")
print(number == 1)
print("Is number == 10? "
"I predict False.")
print(number == 10)<file_sep>/chap7/tiy185_7-7.py
# Infinite Loops
current_number = 1
while current_number <= 9:
print(current_number)
<file_sep>/chap8/tiy221_8-17.py
# Defining a function.
def display_message():
"""Display message about
what I'm leaning in this chapter."""
print(f"I'm leaning to write functions.")
display_message()
def make_sandwich(*items):
"""Making sandwiches with items."""
print("\nMaking a delicious sandwich "
"with the following items:")
for item in items:
print(f"-{item}")
print("You sandwich is ready for collection!")
make_sandwich('cucumber', 'tomato', 'lettuce')
make_sandwich('tuna', 'mayonise')
make_sandwich('cheese', 'tomato', 'egg', 'bacon')
# Using a function with a while loop.
print("\n")
def make_album(artist_name, title):
"""Return a dictionary describing a music album"""
music_album = {'artist': artist_name.title(),
'album': title.title()}
return music_album
artist_name_prompt = "\nWhich artist do you know? "
title_prompt = "Name one of that artist's album. "
# letting the user when to quit
print("Enter 'q' at any time to quit.")
while True:
a_name = input(artist_name_prompt)
if a_name == 'q':
break
title = input(title_prompt)
if title == 'q':
break
album = make_album(a_name, title )
print(album)
<file_sep>/chap8/tiy200_8-3.py
# Calling a function using positional and keyword arguments.
def make_shirt(shirt_size, shirt_message):
"""Display information on a shirt."""
print(f"I have a white {shirt_size} shirt.")
print(f"My white {shirt_size} shirt is written, "
f"'{shirt_message.title()}' in front of it.\n")
make_shirt('medium', '<i love python!/>')
make_shirt(shirt_size='medium', shirt_message='<i love python!/>')
<file_sep>/chap7/tiy189_7-10.py
# Filling a Dictionary with user input
responses = {}
# Set a flag to indicate that polling is active.
polling_active = True
while polling_active:
# prompt for user name and response.
name = input("what is your name? ")
place = input("If you could visit one place in the world,"
"where would you go? ")
# Store the response in the dictionary.
responses[name] = place
# Find out if anyone else is going to take the poll.
repeat = input("Would you like to let another person respond? "
"(yes/ no) ")
if repeat == 'no':
polling_active = False
# Polling is complete. Show the results.
print(f"\n---Poll Results---")
for name, place in responses.items():
print(f"{name.title()} would like to visit {place.title()}.")<file_sep>/chap2/tiy71_2-6.py
famous_person = "<NAME>"
message = f"I wish {famous_person} was still alive."
print(message)<file_sep>/chap5/tiy145_5-10.py
# Loop through the new_users list to see
# if each new username has already been used.
current_users = ['John', 'neo', 'admin', 'sam', 'lisa']
new_users = ['Neo', 'eva', 'Sam', 'joe', 'NELLY']
current_users_lower = [user.lower() for user in current_users]
for new_user in new_users:
if new_user.lower() in current_users_lower:
print(f"Sorry {new_user.title()}, that name is taken.")
else:
print(f"{new_user.title()} is available.")
<file_sep>/chap7/tiy178_7-3.py
# multiple of ten
number = input("Enter a number, and I'll let you know weather "
"the number is multiple of 10 or not: ")
number = int(number)
if number % 10 == 0:
print(f"\nThe number {number} is a multiple of 10.")
else:
print(f"\nThe number {number} is not a multiple of 10.")<file_sep>/chap8/tiy193_8-1.py
# Defining a function.
def display_message():
"""Display message about
what I'm leaning in this chapter."""
print(f"I'm leaning to write functions.")
display_message()<file_sep>/chap4/tiy105_4-1.py
# My favorite pizzas
pizzas = ['chicken & pine', 'pepperoni', 'supreme']
for pizza in pizzas:
print(f"I like {pizza} pizza!")
print(f"\nI like chicken & pine with lots of pines on it."
f"\nDouble cheese is always the way on pepperoni."
f"\nWow!!! The sauces on supreme, so delicious!"
f"\n\nI really like pizza! ")
<file_sep>/chap5/tiy139_5-4.py
# Version that runs the if block.
alien_color = "green"
if alien_color == 'green':
print("You just earned 5 points "
"for shooting the alien!")
else:
print("The player just earned 10 points!")
# Version of this program that runs the else block.
alien_color = 'yellow'
if alien_color == 'green':
print("The player earned 10 points "
"for shooting the alien!")
else:
print("The player just earned 10 points!")
<file_sep>/chap7/tiy185_7-4.py
# Using while loop to let the user choose when to quit
prompt = "Enter pizza_topping:"
prompt += "\nEnter 'quit' when done. "
pizza_topping = ""
while pizza_topping != 'quit':
pizza_topping = input(prompt)
if pizza_topping != 'quit':
print(f"\nI'll add {pizza_topping} to the pizza!\n")
else:
break
<file_sep>/chap6/tiy156_6-1.py
# Using a dictionary to store information
my_friend = {
'first_name': 'mpho',
'last_name': 'mafanyolle',
'age': 38,
'city': 'cape town',
'kids': 3,
}
print(my_friend['first_name'].title())
print(my_friend['last_name'].title())
print(my_friend['age'])
print(my_friend['city'].title())
print(my_friend['kids'])
<file_sep>/chap3/tiy94_3-10.py
my_boxes = ['shoes', 'socks', 'fruits', 'medications', 'keys']
print(my_boxes)
print(my_boxes[2].title())
print(my_boxes)
print(my_boxes[4])
print(my_boxes[-2])
print("\nOriginal list:")
print(my_boxes)
print("Sorted list:")
print(my_boxes)
my_boxes.reverse()
print(my_boxes)
print("Sorted reversed list:")
print(my_boxes)
my_boxes.sort()
print(my_boxes)
my_boxes[3] = 'soups'
print(my_boxes)
my_boxes.append('blankets')
print(my_boxes)
del my_boxes[2]
print(my_boxes)
my_boxes.insert(2, 'cups')
print(my_boxes)
my_boxes.remove('blankets')
print(my_boxes)
my_boxes.sort(reverse=True)
print(my_boxes)
popped_my_boxes = my_boxes.pop()
print(my_boxes)
print(popped_my_boxes)
message = f"\nThe length of my original list is 5."
print(message)
<file_sep>/chap4/hello_world.py
# Unnecessary indent
message = "Hello Python world"
print(message)<file_sep>/chap3/tiy89_3-4.py
guests = ['grace', 'john', 'lisa']
print(guests)
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hello {guests[1].title()}! you are kindly invited for dinner, \n\tDay: Friday \n\tVaneu: 479 Umfuyaneni Sec \n\tTime: 19:30"
print(message)
message = f"Hi {guests[2].title()} my friend!! I'm inviting you for dinner on Friday 19:30 at my place."
print(message)<file_sep>/chap6/tiy156_6-2.py
# Using a dictionary to store people’s favorite numbers
favorite_numbers = {"tshiamo": 5,
'lesego': 1,
'neo': 3,
'mpho': 6,
'caspri': 1,
'grace': 9,
'tumi': 2,
}
# Printing each person’s name and their favorite number
for key, value in favorite_numbers.items():
print(key)
print(value)
<file_sep>/chap8/tiy215_8-14.py
def make_car(manufacture, model_name, **car_info):
"""Storing information about a car in a dictionary."""
car_info['manufacture'] = manufacture.title()
car_info['model_name'] = model_name.title()
return car_info
car = make_car('subaru', 'outback',
color='blue',
tow_package=True)
print(car)
my_car = make_car('kia', 'picanto',
color='green',
year=2018,
sun_roof=True)
print(my_car)<file_sep>/chap5/tiy144_5-9.py
# Adding if test to make sure the list is not empty.
usernames = []
if usernames:
for username in usernames:
if username == 'admin':
print("Hello admin, would you like to "
"see a status report?")
else:
print(f"Hello {username.title()},"
f" thanks for logging in again.")
else:
print("We need to find some users!")<file_sep>/chap6/tiy163_6-4.py
glossary = {
'variable': 'a storage address of information referred as value.',
'immutable': 'values cannot be changed once they have been assigned.',
'indentation': 'space between the edge of the editor and start of the code.',
'string': 'anything inside quotes.',
'float': 'any number with decimal point.',
}
for k, v in glossary.items():
print(f"\nkey: {k}")
print(f"value: {v}")
# Automatically adding more terms to glossary.
print("current glossary is: ", glossary)
glossary['syntax'] = "a set of rules that defines how program will be written."
glossary['integers'] = 'positive or negative whole numbers with no decimal point.'
glossary['inequality'] = 'two items that does not have the same value.'
glossary['constant'] = 'type of variable that holds values which cannot change.'
glossary['comment'] = 'text that does not affect the outcome of a code.'
print("updated glossary is: ", glossary)
for k, v in glossary.items():
print(f"\nkey: {k}")
print(f"value: {v}")
<file_sep>/chap3/tiy89_3-7.py
# People to invite for dinner
guests = ['grace', 'john', 'lisa']
print(guests)
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hello {guests[1].title()}! you are kindly invited for dinner, \n\tDay: Friday \n\tVaneu: 479 Umfuyaneni Sec \n\tTime: 19:30"
print(message)
message = f"Hi {guests[2].title()} my friend!! I'm inviting you for dinner on Friday 19:30 at my place."
print(message)
# Guest who can't make it
print(f"Unfortunately {guests[1].title()} can't make it.")
# replacing the name of the guest who can't make it
guests[1] = 'joe'
print(guests)
# invitation messages for people on the new list
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[1].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[2].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
print(f"Hello {guests[0].title()}! I found a bigger dinner table.")
print(f"Hello {guests[1].title()}! I found a bigger dinner table.")
print(f"Hello {guests[2].title()}! I found a bigger dinner table.")
# Three more people to add on my guest list
guests.insert(0, 'neo')
print(guests)
guests.insert(2, 'linah')
print(guests)
guests.insert(5, 'sam')
print(guests)
# Invitation messages for all in the list
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[1].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[2].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[3].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[4].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[5].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
# Dinner table won't arrive in time for dinner
message = f"\nBecause of space,I can invite only two people."
print(message)
# People that I have to cancel the invite with because of the space
message = f"I'm really sorry {guests[5].title()}, I can't invite you to the dinner."
print(message)
guests.pop(5)
print(guests)
message = f"I'm really sorry {guests[0].title()}, I can't invite you to the dinner."
print(message)
guests.pop(0)
print(guests)
message = f"I'm really sorry {guests[1].title()}, I can't invite you to the dinner."
print(message)
guests.pop(1)
print(guests)
message = f"I'm really sorry {guests[1].title()}, I can't invite you to the dinner."
print(message)
guests.pop(1)
print(guests)
message = f"Hi {guests[0].title()}, you are still invited to dinner."
print(message)
message = f"Hi {guests[1].title()}, you are still invited to dinner."
print(message)
del guests[0]
print(guests)
del guests[0]
print(guests)
print(guests)
<file_sep>/chap4/tiy110_4-6.py
odd_numbers = list(range(1, 21, 2))
print(odd_numbers)
for number in odd_numbers:
print(number)<file_sep>/chap8/tiy200_8-4.py
# Equivalent function calls.
def make_shirt(shirt_size='large', shirt_message='i love python'):
"""Display information on a shirt."""
print(f"I have a white {shirt_size} shirt.")
print(f"My white {shirt_size} shirt is written, "
f"'{shirt_message.title()}' in front of it.\n")
make_shirt()
make_shirt(shirt_size='medium')
make_shirt(shirt_size='extra large', shirt_message='python is the simplest language')<file_sep>/chap4/tiy105_4-2.py
animals = ['leopard', 'lion', 'cheetah']
for animal in animals:
print(animal.title())
print(f"A {animal} is one of carnivorous animals.\n")
print("Any of these animals is a meat eating animal.")<file_sep>/chap2/tiy71_2-4.py
name = "prisca"
print(name.title())<file_sep>/chap8/tiy206_8-8.py
# Using a function with a while loop.
def make_album(artist_name, title):
"""Return a dictionary describing a music album"""
music_album = {'artist': artist_name.title(), 'album': title.title()}
return music_album
artist_name_prompt = "\nWhich artist do you know? "
title_prompt = "Name one of that artist's album. "
print("Enter 'q' at any time to quit.")
while True:
a_name = input(artist_name_prompt)
if a_name == 'q':
break
title = input(title_prompt)
if title == 'q':
break
album = make_album(a_name, title, )
print(album)
<file_sep>/chap8/tiy215_8-13.py
def build_profile(first, last, **my_info):
"""Building a dictionary containing things about me."""
my_info['first_name'] = first.title()
my_info['last_name'] = last.title()
return my_info
my_profile = build_profile('prisca', 'lefawane',
gender='female',
location='tembisa',
age=33,
study='python', )
print(my_profile)<file_sep>/chap4/players.py
# Slicing a list (to output element from the list)
players =['charles', 'martina', 'michael', 'florance', 'eli']
print(players[0:3])
print(players[:5])
print(players[2:])
print(players[-3:])
print(players[:-2])
print(players[0:5])
# Using slice in a for loop
players =['charles', 'martina', 'michael', 'florance', 'eli']
print("\nHere are my three players on my team:")
for player in players[:3]:
print(player.title())
print("\nMy last two players on my team:")
for player in players[3:]:
print(player.title())<file_sep>/chap5/conditional_tests.py
# Test using equality and inequality
animal = 'Lion'
print("\nIs animal.lower() == 'lion'? I predict True.")
print(animal.lower() == 'lion')
print("Is animal.lower == 'Lion'? I predict False.")
print(animal == 'lion')
print("\nIs animal.lower() != 'Lion'? I predict True.")
print(animal.lower() != 'Lion')
print("Is animal.lower != 'lion'? I predict False.")
print(animal == 'lion')
# Numerical test using equality and inequality
year = 2020
print("\nIs year == 2020? That's True.")
print(year == 2020)
print("Is year == 2002? I predict false.")
print(year == 2002)
print("\nIs year != 2002? That's True.")
print(year == 2020)
print("Is year != 2020? I predict false.")
print(year == 2002)
# Numerical test using greater than and less than
digit = 5
print("\nIs digit > 3? That's True.")
print(digit > 3)
print("Is digit > 7? I predict false.")
print(digit > 7)
print("\nIs digit < 9? That's True.")
print(digit < 9)
print("Is digit < 1? I predict false.")
print(digit < 1)
# Numerical test using greater than or equal to
age = 33
print("\nIs age >= 33? That's True.")
print(age >= 33)
print("Is age >= 48? I predict false.")
print(age >= 48)
# Numerical test using less than or equal to
print("\nIs age <= 33? That's True.")
print(age <= 33)
print("Is age <= 25? I predict false.")
print(age <= 25)
# Checking weather both boxes have 10 items, using and keyword
box_1 = 12
box_2 = 5
print("\nIs box_1 >= 10 and box_2 >=10?")
print(box_1 >= 10 and box_2 >= 10)
box_2 = 12
print("Is box_1 >= 10 and box_2 >=10?")
print(box_1 >= 10 and box_2 >= 10)
# Checking weather both boxes have 10 items, using or keyword
box_1 = 12
box_2 = 5
print("\nIs box_1 >= 10 or box_2 >=10?")
print(box_1 >= 10 or box_2 >= 10)
box_1 = 5
print("Is box_1 >= 10 or box_2 >=10?")
print(box_1 >= 10 or box_2 >= 10)
# Testing weather an item is not in the list
first_teams = ['orange',
'blue',
'red']
team = 'green'
if team not in first_teams:
print(f"\nTeam {team}, you are not going to the semi final.")
<file_sep>/chap8/tiy211_8-9.py
def show_messages(msgs):
"""Print text messages in a list"""
for msg in msgs:
print(msg.title())
text_messages = [
'sam is not feeling well',
'i will be late',
'i am not coming',
]
show_messages(text_messages)<file_sep>/chap4/first_numbers.py
# Using range() Function
for value in range(1, 5):
print(value)
# making list of numbers using range()
numbers = list(range(1, 6))
print(numbers)<file_sep>/chap3/tiy95_3-11.py
statement = f"I would like to buy a {cars[0].title()} car."
print(statement)
statement = f"I would like to own a {cars[1].title()} car."
print(statement)
statement = f"I own a {cars[2].title()} car."
print(statement)
statement = f"I own a {cars[4].title()} car."
print(statement)<file_sep>/chap8/tiy211_8-11.py
def show_messages(text_messages):
"""Print text messages in a list"""
for text_message in text_messages:
print(text_message)
def send_messages(text_messages):
"""Print each text message in the list."""
# new list to hold sent messages.
sent_messages = []
# Adding text messages in new list.
while text_messages:
text_message = text_messages.pop()
sent_message = text_message
sent_messages.append(sent_message)
# making list of original messages.
for sent_message in sent_messages:
text_messages.append(sent_message)
return text_messages
text_messages = [
'Sam is not feeling well',
'I will be late',
'I am not coming',
]
show_messages(text_messages)
print("\nSent messages:")
sent_messages = send_messages(text_messages[:])
show_messages(sent_messages)
print("\nOriginal messages:")
show_messages(text_messages)
<file_sep>/chap6/tiy156_6-3.py
# Using dictionary to make a glossary
glossary = {
'variable': 'a storage address of information referred as value.',
'immutable': 'values cannot be changed once they have been assigned.',
'indentation': 'space between the edge of the editor and start of the code.',
'string': 'anything inside quotes',
'float': 'any number with decimal point.',
}
for key, value in glossary.items():
print(f"{key}: \n{value}\n")<file_sep>/chap4/tiy111_4-9.py
# Making comprehension list
cubes = [number**3 for number in range(1, 11)]
print(cubes)
for number in cubes:
print(number)<file_sep>/chap7/tiy178_7-2.py
# Using int() to Accept Numerical Input
restaurant_seating = input("How many people are in you dinner group? ")
restaurant_seating = int(restaurant_seating)
if restaurant_seating > 8:
print(f"\nSorry, you'll have to wait for a table.")
else:
print(f"\nYour table is ready!")
<file_sep>/chap4/tiy116_4-10.py
cubes = []
for value in range(1, 11):
cube = value ** 3
cubes.append(cube)
print(cubes)
for value in cubes:
print(value)
print("\n\nThe first three items in the list are:")
print(cubes[:3])
print("\nThree items from the middle of the list are:")
print(cubes[4:7])
print("\nThe last three items from the list are:")
print(cubes[7:])<file_sep>/chap6/tiy171_6-10.py
# Making a list in a dictionary
favorite_numbers = {"tshiamo": [5, 6, 9],
'lesego': [1, 2],
'neo': [3, 7],
'mpho': [4, 6, 8, 11],
'caspri': [1, 2, 12],
'grace': [9, 13, 20],
'tumi': [2, 7],
}
for name, numbers in favorite_numbers.items():
print(f"\n{name.title()}'s favorite numbers are:")
for number in numbers:
print(f"\t{number}")
<file_sep>/chap3/tiy89_3-6.py
# People to invite for dinner
guests = ['grace', 'john', 'lisa']
print(guests)
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hello {guests[1].title()}! you are kindly invited for dinner, \n\tDay: Friday \n\tVaneu: 479 Umfuyaneni Sec \n\tTime: 19:30"
print(message)
message = f"Hi {guests[2].title()} my friend!! I'm inviting you for dinner on Friday 19:30 at my place."
print(message)
# Guest who can't make it
print(f"Unfortunately {guests[1].title()} can't make it.")
# replacing the name of the guest who can't make it
guests[1] = 'joe'
print(guests)
# invitation messages for people on the new list
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[1].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[2].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
# Three more people to add on my guest list
print(f"Hello guys! I found a bigger dinner table.")
guests.insert(0, 'neo')
print(guests)
guests.insert(2, 'linah')
print(guests)
guests.insert(5, 'sam')
print(guests)
# Invitation messages for all in the list
message = f"Hi {guests[0].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[1].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[2].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[3].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[4].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
message = f"Hi {guests[5].title()}, you are kindly invited for dinner at my place this Friday 19:30."
print(message)
<file_sep>/chap4/tiy119_4-13.py
# Using for loop to print tuple items
buffet_foods = ('ribs', 'nuggets', 'sandwich', 'fruit salad', 'chips')
for food in buffet_foods:
print(food.title())
# Replacing tuple items
print("\nOriginal buffet_foods:")
for food in buffet_foods:
print(food.title())
buffet_foods = ('ribs', 'nuggets', 'buns', 'vegetable salad', 'chips')
print("\nModified buffet_foods:")
for food in buffet_foods:
print(food)
# Trying to modify one of the items, and making sure that python rejects the change
buffet_foods[3] = ('rice',)
for food in buffet_foods:
print(food.title())
<file_sep>/chap2/tiy64_2-1.py
message = "Welcome to the world of Python!"
print(message)<file_sep>/chap6/tiy171_6-11.py
# Using a dictionary in a dictionary
cities = {
'johannesburg': {
'country': 'south africa',
'population': '5,783,000',
'fact': 'the busiest airport in africa',
},
'bloemfontein': {
'country': 'south africa',
'population': '567,000',
'fact': 'is the capital city of free state',
},
'durban': {
'country': 'south africa',
'population': '3,158,313',
'fact': 'famous for being the busiest'
' port in south africa ',
},
}
for city, city_information in cities.items():
print(f"\nCity: {city.title()}")
population = city_information['population']
fact = city_information['fact']
print(f"\tPopulation: {population}")
print(f"\tFact: {fact.title()}.")<file_sep>/chap6/tiy164_6-5.py
# Three major rivers and the country each river runs through
major_rivers = {
'mississippi': 'united state',
'zambezi': 'mozambique',
'amazon': 'brazil',
}
for key, value in major_rivers.items():
print(f"\nThe {key.title()} runs through {value.title()}.")
print(key.title())
print(value.title())
<file_sep>/chap8/tiy206_8-7.py
# Returning a dictionary.
def make_album(artist_name, title):
"""Return a dictionary describing a music album"""
music_album = {'artist': artist_name.title(), 'album': title.title()}
return music_album
album = make_album('l<NAME>', 'never too much', )
print(album)
album = make_album('<NAME>', 'Power of love')
print(album)
album = make_album('maria carey', 'caution')
print(album)
print("\n")
# Dictionary with number of songs.
def make_album(artist_name, title, tracks=None):
"""Return a dictionary describing a music album"""
music_album = {'artist': artist_name.title(), 'album': title.title()}
if tracks:
music_album['tracks'] = tracks
return music_album
album = make_album('<NAME>', 'amadlozi', 8)
print(album)
album = make_album('toni braxton', 'the final frontier')
print(album)
album = make_album('o<NAME>', 'afrika')
print(album)
<file_sep>/chap3/tiy82_3-1.py
names = ['mpho', 'neo', 'lesego', 'tshiamo']
print(names[0].title())
print(names[1].title())
print(names[2].title())
print(names[3].title())
<file_sep>/chap2/tiy71_2-3.py
name = "Mpho"
message = f"Hello {name},would you like to learn some Python today?"
print(message)
<file_sep>/chap5/tiy144_5-8.py
# Looping through the list
usernames = ['john', 'neo',
'admin', 'sam', 'lisa']
for username in usernames:
if username == 'admin':
print("Hello admin, would you like to see a status report?")
else:
print(f"Hello {username.title()}, thanks for logging in again.")
<file_sep>/chap6/tiy171_6-7.py
# making a list of dictionaries
my_friend = {
'first_name': 'mpho',
'last_name': 'mafanyolle',
'age': 38,
'city': 'cape town',
'kids': 3,
}
print(my_friend['first_name'].title())
print(my_friend['last_name'].title())
print(my_friend['age'])
print(my_friend['city'].title())
print(my_friend['kids'])
my_kid = {
'first_name': 'neo',
'last_name': 'lefawane',
'age': 12, 'gender': 'female',
'grade': 7,
}
my_sister = {
'first_name': 'maggy',
'last_name': 'lefawane',
'age': 41,
'city': 'lephalale',
'kids': 2,
}
people = [my_friend, my_kid, my_sister]
for person in people:
print(person)<file_sep>/chap6/tiy171_6-8.py
# making a list of dictionaries
pet_0 = {
'kind': 'dog',
'owner': 'phil',
'color': 'brown',
}
pet_1 = {
'kind': 'rabbit',
'owner': 'loyd',
'color': 'white',
}
pet_2 = {
'kind': 'hamster',
'owner': 'callie',
'color': 'gray & white',
}
pet_3 = {
'kind': 'cat',
'owner': 'joyce',
'color': 'black',
}
pets = [pet_0, pet_1, pet_2, pet_3]
for pet in pets:
print(pet)
<file_sep>/chap5/hello_admin.py
users = ['john', 'neo','admin', 'sam', 'lisa']
if user in users:
print(users)<file_sep>/chap4/even_numbers.py
# Listing even numbers between 1 and 10 (using range())
even_numbers = list(range(2, 11, 2))
print(even_numbers)
even_numbers = list(range(1, 11, 2))
print(even_numbers)
<file_sep>/chap8/tiy221_8-16.py
import pets
pets.describe_pet('hamster', 'harry')
pets.describe_pet('dog', 'willie')
from pets import describe_pet
describe_pet('hamster', 'harry')
describe_pet('dog', 'willie')
from pets import describe_pet as dp
dp('hamster', 'harry')
dp('dog', 'willie')
import pets as p
p.describe_pet('hamster', 'harry')
p.describe_pet('dog', 'willie')
from pets import *
describe_pet('hamster', 'harry')
describe_pet('dog', 'willie')
<file_sep>/chap3/tiy93_3-8.py
locations = ['mauritius', 'paris', 'zanzibar', 'disneyland', 'china']
print(locations)
print("Sorted list:")
print(sorted(locations))
print("Here is the original list:")
print(locations)
print("\nHere is the sorted reversed list:")
print(sorted(locations))
print(locations)
locations.reverse()
print(locations)
locations.reverse()
print(locations)
locations.sort()
print(locations)
locations.sort(reverse=True)
print(locations)<file_sep>/chap6/tiy171_6-9.py
# Making a list in a dictionary
favorite_places = {
'neo': ['school', 'mall', 'library'],
'callie': ['mall', 'tavern', 'casino' ],
'tshia': ['cinema', 'home', 'theatre'],
'mpho': ['gym', 'work', 'home'],
'lisa': ['pub', 'club', 'tavern'],
}
for name, places in favorite_places.items():
print(f"\n{name.title()}'s favorite places are:")
for place in places:
print(f"{place.title()}")<file_sep>/chap8/tiy193_8-2.py
# Passing information to a function.
def favorite_book(title):
"""Display favorite book."""
print(f"{title.title()} is one of my favorite books.")
favorite_book('comford herself')<file_sep>/chap7/tiy189_7-9.py
# Starting with sandwiched that has been ordered,
# and an empty list to hold finished sandwiches.
sandwich_orders = [
'chicken & mayo',
'pastrami',
'egg & tomato',
'cheese & tomato',
'pastrami',
'bacon & egg',
'pastrami',
]
finished_sandwiches = []
print(f"Sorry, we are out of pastrami today.\n")
# Removing pastrami from the order list.
while 'pastrami' in sandwich_orders:
sandwich_orders.remove('pastrami')
# Verify each ordered sandwich until there are no more sandwich orders.
# Move each ordered sandwich into the list of finished sandwiches.
while sandwich_orders:
current_sandwich = sandwich_orders.pop()
print(f"I'm making your {current_sandwich} sandwich.")
finished_sandwiches.append(current_sandwich)
# Display all finished sandwiches.
print("\n")
for finished_sandwich in finished_sandwiches:
print(f"I made a {finished_sandwich} sandwich.")
<file_sep>/chap3/tiy82_3-3.py
cars = ['vw', 'toyota', 'Kia', 'nissan']
statement = f"I would like to buy a {cars[0].title()} car."
print(statement)
statement = f"I would like to own a {cars[1].title()} car."
print(statement)
statement = f"I own a {cars[2].title()} car."
print(statement)
statement = f"I own a {cars[3].title()} car."
print(statement)<file_sep>/chap8/tiy200_8_5.py
# Default value and equivalent function calls
def describe_city(name, country='south africa'):
"""Display a city information"""
print(f"{name.title()} is in {country.title()}.")
describe_city('cape town')
describe_city('pretoria')
describe_city('gaborone', 'botswana')
<file_sep>/chap7/tiy185_7-6.py
# Using while statement to stop the loop
prompt = "Enter pizza_topping:"
prompt += "\nEnter 'quit' when done. "
pizza_topping = ""
while pizza_topping != 'quit':
pizza_topping = input(prompt)
if pizza_topping != 'quit':
print(f"\n\tI'll add {pizza_topping} to the pizza!\n")
# Using an active variable to control
# how long the loop runs.
active = True
while active:
pizza_topping = input(prompt)
if pizza_topping == 'quit':
active = False
else:
print(f"\n\t{pizza_topping.title()} is horrible!\n")
# Use a break statement to exit the loop
# when the user enters a 'quit' value.
while True:
pizza_topping = input(prompt)
if pizza_topping == 'quit':
break
else:
print(f"\n\tWow, {pizza_topping} is so delicious!\n")
| f67178df9daa92bd01bb4366edd2f140aed481fd | [
"Python"
] | 74 | Python | priscalefawane/python | 406d4fc07ff1c5086832734b68d4c3af4b1422ea | 81821ec0520b60ea96d22b40c6a36703700ea333 |
refs/heads/master | <repo_name>alexnevolin/ymap<file_sep>/js/yMap.js
var yMapApp = angular.module("YMap", ["kendo.directives"])
.controller("MarkCtrl", function($scope, $http) {
$scope.marksJSON;
$scope.idList = [];
$scope.counterList = [];
$scope.chosenFeatures = [];
$scope.featuresList = [];
var INEXACT_COORD = 0;
var COLOUR_MARK = 1;
var NUMBER_VALUE = 2;
var CHART = 3;
var SPEC_EVENT = 4;
$http.get('data/mark2.json').success(function(data) {
$scope.marksJSON = data;
});
$scope.colorCriteria;
var defaultColors = [
{"Класс дома" : ['#ddd0ca', '#672B10']},
{"Материал стен дома" : ['#f5dada', '#BF0909']},
{"Максимальная этажность дома" : ['#ebebff', '#0000FF']},
{"Год постройки дома" : ['#dae7d8', '#146302']},
{"Когорта дома Объекта" : ['#eddeed', '#8C278C']}
];
var arrColors = ['#ffffff'];
var flag = false;
var chosenCriteria;
$http.get('data/colors.json').success(function(response) {
$scope.colorCriteria = response;
});
function paint(count, from, to, target)
{
var n = count;
var sFadeStartColor = from;
var sFadeFinishColor = to;
var aRGBStart = sFadeStartColor.replace("#","").match(/.{2}/g);
var aRGBFinish = sFadeFinishColor.replace("#","").match(/.{2}/g);
for (var i = 1; i <= n; i++) {
arrColors[i] = getFadeMiddleColor(i, n, aRGBStart, aRGBFinish);
};
}
function getFadeMiddleColor(i, n, aRGBStart, aRGBFinish)
{
/*процент содержания конечного цвета в текущем промежуточном цвете;
изменяется от 0 (не включая 0) до 1 (1 = 100%)*/
var finishPercent = i/n;
/*процент содержания начального цвета в текущем промежуточном цвете;
изменяется от 1 до 0 (1 = 100%) */
var startPercent = 1 - finishPercent;
var R,G,B;
//вычисляем значения красного, зеленого, синего промежуточного цвета
R = Math.floor( ('0x'+aRGBStart[0]) * startPercent + ('0x'+aRGBFinish[0]) * finishPercent );
G = Math.floor( ('0x'+aRGBStart[1]) * startPercent + ('0x'+aRGBFinish[1]) * finishPercent );
B = Math.floor( ('0x'+aRGBStart[2]) * startPercent + ('0x'+aRGBFinish[2]) * finishPercent );
return 'rgb('+R+ ',' + G + ',' + B +')';
}
function getMaxPriorities(criteria){
var priorities;
var maxPriority = 0;
for (var key in $scope.colorCriteria) {
if (key == criteria) {
priorities = $scope.colorCriteria[key];
}
}
for (var value in priorities) {
if(priorities[value] > maxPriority){
maxPriority = priorities[value];
}
}
return maxPriority;
}
function getParamsInterface(string, returnCriteria){
var criterias = $scope.colorCriteria;
for (var key in criterias) {
for (var value in criterias[key]) {
if (value == string) {
return returnCriteria ? key : criterias[key][value];
}
}
}
}
function getDefaultColorsByCriteria(criteria){
for (var i = 0; i < defaultColors.length; i++) {
if(defaultColors[i][criteria] !== undefined){
return defaultColors[i][criteria];
}
}
}
function putMarks(map) {
var marksCount = $scope.marksJSON.houses;
var markNum = 0;
var specEvents = {};
var greyBorders = {};
for (var i = 0; i < marksCount.length; i++) {
bindFeatures(marksCount[i]);
var id = marksCount[i].mark_id;
var coords = marksCount[i].mark_coords;
var counter = $scope.chosenFeatures[NUMBER_VALUE];
var defaultColor = getDefaultColorsByCriteria(getParamsInterface($scope.chosenFeatures[COLOUR_MARK], true));
paint(getMaxPriorities(chosenCriteria), defaultColor[0], defaultColor[1]);
var colour = arrColors[getParamsInterface($scope.chosenFeatures[COLOUR_MARK], false)];
specEvents[id] = $scope.chosenFeatures[SPEC_EVENT];
greyBorders[id] = $scope.chosenFeatures[INEXACT_COORD];
$scope.idList[i] = id;
$scope.counterList[i] = $scope.chosenFeatures[CHART];
var chartBuild = function() {
markLayout.superclass.build.call(this);
var markId = "#id_" + $scope.idList[markNum];
var borderId = "#br_" + $scope.idList[markNum];
var pinId = "#vs_" + $scope.idList[markNum];
var currentMark = $(markId);
var greyBorder = $(borderId);
var pinBorder = $(pinId);
var chart = new MarkChart($scope.idList[markNum]);
chart.chartType = "ring";
chart.data = [+$scope.counterList[markNum], 8 - $scope.counterList[markNum]];
chart.colors = ['#0FFF2B', '#00ffff'];
chart.draw();
if(currentMark.data('id') == $scope.idList[markNum] ){
var specEvent;
for(var key in specEvents){
if(key == $scope.idList[markNum]){
specEvent = specEvents[key];
}
}
specEvent == "1" ? currentMark.css('visibility','visible') : currentMark.css('visibility','hidden');
}
if(greyBorder.data('id') == $scope.idList[markNum] ){
var uncoords;
for(var key in greyBorders){
if(key == $scope.idList[markNum]){
uncoords = greyBorders[key];
}
}
if(uncoords) {
greyBorder.css('box-shadow', '#ad9c94 0 0 0 2px');
pinBorder.css('visibility', 'visible');
}
}
(markNum < $scope.marksJSON.houses.length + 1) ? markNum++ : markNum = 0;
};
var markLayout = ymaps.templateLayoutFactory.createClass(getMarkTemplate(marksCount[i].type,id), {
build: chartBuild
});
// var balloonLayout = '';
var polygonPlacemark = new ymaps.Placemark(
[coords.x, coords.y], {
// balloonContent: balloonLayout,
name: '<NAME>',
chartCount: counter,
colour: colour
}, {
iconLayout: markLayout
}
);
polygonPlacemark.events.add('contextmenu', function(e) {
console.log("Показать балун");
//polygonPlacemark.balloon.open(e.get('coords'), 'asd');
});
map.geoObjects.add(polygonPlacemark);
}
}
function putChosenFeatures(data, features, i) {
if(flag) {
chosenCriteria = features[i];
flag = false;
}
for (var key in data) {
if (key == features[i]) {
$scope.chosenFeatures[++i] = data[key];
}
}
}
function bindFeatures(dataMark) {
var features = $scope.featuresList;
var position = 0;
for (var key in dataMark) {
if (key == "mark_colors") {
flag = true;
}
if (key != "mark_coords" && key != "mark_id" && key != "type" && key != "mark_uncoords") {
putChosenFeatures(dataMark[key], features, position);
position++;
}
if (key == "mark_uncoords") {
$scope.chosenFeatures[INEXACT_COORD] = dataMark[key];
}
}
}
function init() {
var map = new ymaps.Map("map_container", {
center: [55.73, 37.58],
zoom: 10
}),
CustomControlClass = function(options) {
CustomControlClass.superclass.constructor.call(this, options);
this._$content = null;
this._geocoderDeferred = null;
};
$scope.map = map;
ymaps.util.augment(CustomControlClass, ymaps.collection.Item, {
onAddToMap: function(map) {
CustomControlClass.superclass.onAddToMap.call(this, map);
this._lastCenter = null;
this.getParent().getChildElement(this).then(this._onGetChildElement, this);
},
onRemoveFromMap: function(oldMap) {
this._lastCenter = null;
if (this._$content) {
this._$content.remove();
this._mapEventGroup.removeAll();
}
CustomControlClass.superclass.onRemoveFromMap.call(this, oldMap);
},
_onGetChildElement: function(parentDomContainer) {
this._$content = templateWindowFeatures.appendTo(parentDomContainer);
this._mapEventGroup = this.getMap().events.group();
var myWindow = $("#window"),
features = $("#features"),
mapContainer = $("#map_container");
var cache = myWindow.html();
features.click(function() {
myWindow.data("kendoWindow").open();
features.fadeOut();
});
myWindow.on('click', '#refreshFeatures', function() {
myWindow.html(cache);
});
mapContainer.click(function() {
if (features.css('display') == "none") {
myWindow.data("kendoWindow").close();
}
});
function defaultFeatures() {
var vals = $('.k-radio').map(function(i, el) {
if ($(el).prop('checked')) {
return $(el).val();
}
}).get();
$scope.featuresList = vals;
}
myWindow.on('click', '#sendFeatures', function() {
defaultFeatures();
map.geoObjects.removeAll();
markNum = 0;
putMarks(map);
});
function onClose() {
features.fadeIn();
}
myWindow.kendoWindow({
visible: false,
title: "Задать характеристики для отображения меток",
actions: [
"Close"
],
close: onClose
}).data("kendoWindow");
defaultFeatures();
putMarks(map);
}
});
var buttonControl = new CustomControlClass();
map.controls.add(buttonControl, {
float: 'right',
position: {
top: 50,
right: 10
}
});
}
ymaps.ready(init);
});
<file_sep>/README.md
# This project requires any web server for normal work. Recommended nginx server.<file_sep>/js/template.js
var templateWindowFeatures = $('<button class="k-button" id="features">Характеристики</button>' +
'<div class="features-window" id="window">' +
'<div class="list-section k-content">' +
'<span>Основной цвет фигуры</span>'+
'<ul class="fieldlist">' +
'<li><input type="radio" name="colour-mark" id="colourm1" value="Класс дома" class="k-radio" checked="checked">' +
'<label class="k-radio-label" for="colourm1">Класс дома</label></li>' +
'<li><input type="radio" name="colour-mark" id="colourm2" value="Материал стен дома" class="k-radio">' +
'<label class="k-radio-label" for="colourm2">Материал стен дома</label></li>' +
'<li><input type="radio" name="colour-mark" id="colourm3" value="Максимальная этажность дома" class="k-radio">' +
'<label class="k-radio-label" for="colourm3">Максимальная этажность дома</label></li>' +
'<li><input type="radio" name="colour-mark" id="colourm4" value="Год постройки дома" class="k-radio">' +
'<label class="k-radio-label" for="colourm4">Год постройки дома</label></li>' +
'<li><input type="radio" name="colour-mark" id="colourm5" value="Когорта дома Объекта" class="k-radio">' +
'<label class="k-radio-label" for="colourm5">Когорта дома Объекта </label></li>' +
'</ul>' +
'<span>Числовое значение в середине метки</span>'+
'<ul class="fieldlist">' +
'<li><input type="radio" name="number-mark" id="numberm1" value="Количество Аналогов/Объявлений в доме" class="k-radio" checked="checked">' +
'<label class="k-radio-label" for="numberm1">Количество Аналогов/Объявлений в доме</label></li>' +
'<li><input type="radio" name="number-mark" id="numberm2" value="Максимальная этажность дома" class="k-radio">' +
'<label class="k-radio-label" for="numberm2">Максимальная этажность дома</label></li>' +
'<li><input type="radio" name="number-mark" id="numberm3" value="Год постройки дома" class="k-radio">' +
'<label class="k-radio-label" for="numberm3">Год постройки дома</label></li>' +
'<li><input type="radio" name="number-mark" id="numberm4" value="Количество описанных квартир в доме по Росреестру" class="k-radio">' +
'<label class="k-radio-label" for="numberm4">Количество описанных квартир в доме по Росреестру</label></li>' +
'<li><input type="radio" name="number-mark" id="numberm5" value="Средняя удельная цена Аналогов/Объявлений (тыс.руб.)" class="k-radio">' +
'<label class="k-radio-label" for="numberm5">Средняя удельная цена Аналогов/Объявлений (тыс.руб.)</label></li>' +
'<li><input type="radio" name="number-mark" id="numberm6" value="Удельная Кадастровая стоимость (тыс.руб.)" class="k-radio">' +
'<label class="k-radio-label" for="numberm6">Удельная Кадастровая стоимость (тыс.руб.)</label></li>' +
'</ul>' +
'<span>Круговая диаграмма</span>'+
'<ul class="fieldlist">' +
'<li><input type="radio" name="diagram-mark" id="diagramm1" value="Доли Аналогов/Объявлений в доме по комнатности (1, 2, 3 , 4+)" class="k-radio" checked="checked">' +
'<label class="k-radio-label" for="diagramm1">Доли Аналогов/Объявлений в доме по комнатности (1, 2, 3 , 4+)</label></li>' +
'<li><input type="radio" name="diagram-mark" id="diagramm2" value="Доли Аналогов/Объявлений в доме по классу (дихотомия: доминирующий класс и остальные)" class="k-radio">' +
'<label class="k-radio-label" for="diagramm2">Доли Аналогов/Объявлений в доме по классу (дихотомия: доминирующий класс и остальные)</label></li>' +
'</ul>' +
'<span>Дополнительный знак "Красный круг"</span>'+
'<ul class="fieldlist">' +
'<li><input type="radio" name="special-mark" id="specialm1" value="В доме выданы кредиты: да/нет (информация о выданных кредитах в доме)" class="k-radio" checked="checked">' +
'<label class="k-radio-label" for="specialm1">В доме выданы кредиты: да/нет (информация о выданных кредитах в доме)</label></li>' +
'<li><input type="radio" name="special-mark" id="specialm2" value="Описание дома рассогласовано: да/нет (характеристики рассогласованности)" class="k-radio">' +
'<label class="k-radio-label" for="specialm2">Описание дома рассогласовано: да/нет (характеристики рассогласованности)</label></li>' +
'</ul>' +
'</div>' +
'<button class="k-button controlFeatures" id="refreshFeatures">Отменить</button>' +
'<button class="k-button controlFeatures" id="sendFeatures">Показать</button>' +
'</div>');
var getMarkTemplate = function(type,id) {
var tmpl = '';
switch (type) {
case 'zc':
tmpl = '<div class="placemark_layout_container">'+
'<div class="zc_layout" style="background: {{properties.colour}};">' +
'<div class="zc_layout_left" style="border-right: 10px solid {{properties.colour}};"></div>' +
'<div class="zc_layout_right" style="border-left: 10px solid {{properties.colour}};"></div>' +
'<canvas id="' + id + '" class="canvas" width="70" height="70"></canvas>' +
'<div class="pin" style="border-top: 20px solid {{properties.colour}};"></div>' +
'<span class="text">{{ properties.chartCount }}</span>'+
'<div id="id_'+id+'" data-id="'+id+'" class="zc_trigger"></div>'+
'</div></div>';
break;
case 'home':
tmpl = '<div class="placemark_layout_container">' +
'<div class="home_layout" style="border-color: {{properties.colour}};">' +
'<div class="home_layout_pin pin" style="border-top: 23px solid {{properties.colour}};"></div>' +
'<span class="home_layout_text text">{{ properties.chartCount }}</span>' +
'<canvas id="' + id + '" width="70" height="70" class="home_layout_canvas canvas"></canvas>' +
'</div></div>';
break;
case 'landmark':
tmpl = '<div class="landmark"><div class="landmark_center"></div></div><canvas style="display:none" id="' + id + '""></canvas>';
break;
case 'object':
tmpl = '<div class="placemark_layout_container">' +
'<div class="sq_layout" style="background: {{properties.colour}};">' +
'<div class="sq_layout_pin pin" style="border-top: 22px solid {{properties.colour}};"></div>' +
'<span class="sq_layout_text text">{{ properties.chartCount }}</span>' +
'<canvas id="' + id + '" width="70" height="70" class="sq_layout_canvas canvas"></canvas>' +
'</div></div>';
break;
case 'analog':
tmpl = '<div class="placemark_layout_container">' +
'<div class="analog_layout" id="br_'+id+'" data-id="'+id+'" style="background: {{properties.colour}}; ">' +
'<span class="analog_layout_text text">{{ properties.chartCount }}</span>' +
'<canvas id="' + id + '" width="70" height="70" class="analog_layout_canvas canvas"></canvas>' +
'<div class="analog_uncoord pin" id="vs_'+id+'" data-id="'+id+'" style="visibility: hidden;"></div>'+
'<div class="analog_layout_pin pin" style="border-top: 23px solid {{properties.colour}};"></div>'+
'</div></div>';
break;
case 'landmark_home_ak':
tmpl = '<div class="landmark_home"><div class="landmark_home_icon fa fa-home"></div></div><canvas style="display:none" id="' + id + '""></canvas>';
break;
case 'landmark_home_aknew':
tmpl = '<div class="landmark_home"><div class="landmark_home_aknew_icon fa fa-building"></div></div><canvas style="display:none" id="' + id + '""></canvas>';
break;
case 'landmark_zc_ak':
tmpl = '<div class="placemark_layout_container">'+
'<div class="landmark_zc_layout" style="background: #000;">' +
'<div class="landmark_zc_layout_left" style="border-right: 6px solid #000;"></div>' +
'<div class="landmark_zc_layout_right" style="border-left: 6px solid #000;"></div>' +
'<canvas id="' + id + '" class="canvas" width="70" height="70" style="display:none;"></canvas>' +
'<div class="landmark_zc_pin" style="border-top: 16px solid #000;"></div>' +
'<span class="landmark_zc_ak_text"><i class="landmark_zc_ak_icon fa fa-home"></i></span>'+
'<div id="id_'+id+'" data-id="'+id+'" class="zc_trigger" style="display:none;"></div>'+
'</div></div>';
break;
case 'landmark_zc_aknew':
tmpl = '<div class="placemark_layout_container">'+
'<div class="landmark_zc_layout" style="background: #000;">' +
'<div class="landmark_zc_layout_left" style="border-right: 6px solid #000;"></div>' +
'<div class="landmark_zc_layout_right" style="border-left: 6px solid #000;"></div>' +
'<canvas id="' + id + '" class="canvas" width="70" height="70" style="display:none;"></canvas>' +
'<div class="landmark_zc_pin" style="border-top: 16px solid #000;"></div>' +
'<span class="landmark_zc_ak_text"><i class="landmark_zc_aknew_icon fa fa-building"></i></span>'+
'<div id="id_'+id+'" data-id="'+id+'" class="zc_trigger" style="display:none;"></div>'+
'</div></div>';
break;
}
return tmpl;
} | 2c991a6f6b667f2daba2e47197187c52d6d97811 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | alexnevolin/ymap | bb01c8e906289015a004626a819bec6cef06a994 | 27470e15749ee49f1b2b9d7a89efe81b38bc2d03 |
refs/heads/main | <repo_name>dgnewsom/Unreal_Engine_C-<file_sep>/SimpleShooter/Source/SimpleShooter/SimpleShooterPlayerController.h
// Copyright <NAME> 2021
#pragma once
#include "CoreMinimal.h"
#include "GameFramework/PlayerController.h"
#include "SimpleShooterPlayerController.generated.h"
UCLASS()
class SIMPLESHOOTER_API ASimpleShooterPlayerController : public APlayerController
{
GENERATED_BODY()
public:
virtual void GameHasEnded(AActor* EndGameFocus, bool bIsWinner) override;
private:
UPROPERTY(EditAnywhere, Category="UI Classes")
TSubclassOf<class UUserWidget> HUDClass;
UPROPERTY(EditAnywhere, Category="UI Classes")
TSubclassOf<class UUserWidget> LoseScreenClass;
UPROPERTY(EditAnywhere, Category="UI Classes")
TSubclassOf<class UUserWidget> WinScreenClass;
UPROPERTY()
UUserWidget* HUDScreen;
UPROPERTY(EditAnywhere)
float RestartDelay = 5.f;
FTimerHandle RestartTimer;
protected:
virtual void BeginPlay() override;
};
<file_sep>/SimpleShooter/Source/SimpleShooter/Gun.h
// Copyright <NAME> 2021
#pragma once
#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include "Gun.generated.h"
UCLASS()
class SIMPLESHOOTER_API AGun : public AActor
{
GENERATED_BODY()
public:
// Sets default values for this actor's properties
AGun();
void PullTrigger();
protected:
// Called when the game starts or when spawned
virtual void BeginPlay() override;
public:
// Called every frame
virtual void Tick(float DeltaTime) override;
private:
UPROPERTY(VisibleAnywhere)
USceneComponent* GunRoot;
UPROPERTY(VisibleAnywhere)
USkeletalMeshComponent* GunMesh;
UPROPERTY(EditAnywhere, Category = "Gun Stats")
float MaxRange = 1000.f;
UPROPERTY(EditAnywhere, Category = "Gun Stats")
float DamageAmount = 10.f;
UPROPERTY(EditAnywhere, Category = "Effects")
UParticleSystem* MuzzleFlash;
UPROPERTY(EditAnywhere, Category = "Effects")
UParticleSystem* HitEffect;
UPROPERTY(EditAnywhere, Category = "Sounds")
USoundBase* MuzzleSound;
UPROPERTY(EditAnywhere, Category = "Sounds")
USoundBase* HitSound;
bool GunTrace(FHitResult &Hit, FVector &HitRotation);
AController* GetOwnerController() const;
};
<file_sep>/ToonTanks/Config/DefaultGame.ini
[/Script/EngineSettings.GeneralProjectSettings]
ProjectID=52B42AE740CC6830E80445A56D5FF608
ProjectName=Toon Tanks
CompanyName=<NAME>
CopyrightNotice=Copyright <NAME> 2021
[StartupActions]
bAddPacks=False
<file_sep>/SimpleShooter/Source/SimpleShooter/ShooterAIController.h
// Copyright <NAME> 2021
#pragma once
#include "CoreMinimal.h"
#include "AIController.h"
#include "ShooterAIController.generated.h"
/**
*
*/
UCLASS()
class SIMPLESHOOTER_API AShooterAIController : public AAIController
{
GENERATED_BODY()
public:
AShooterAIController();
virtual void Tick(float DeltaSeconds) override;
bool IsDead() const;
protected:
virtual void BeginPlay() override;
private:
UPROPERTY(EditAnywhere)
class UBehaviorTree* AIBehavior;
APawn* PlayerPawn;
/*UPROPERTY(EditAnywhere)
float AcceptanceRadius = 300.f;*/
};
<file_sep>/BuildingEscape/Config/DefaultGame.ini
[/Script/EngineSettings.GeneralProjectSettings]
ProjectID=9C4B4B0A4C93C28A34396692B6D982BA
CopyrightNotice=Copyright - <NAME> 2021
ProjectName=Building Escape
<file_sep>/BullCowGame/Source/BullCowGame/BullCowCartridge.cpp
// Fill out your copyright notice in the Description page of Project Settings.
#include "BullCowCartridge.h"
#include "Misc/FileHelper.h"
#include "Misc/Paths.h"
#include "Math/UnrealMathUtility.h"
void UBullCowCartridge::BeginPlay() // When the game starts
{
Super::BeginPlay();
PopulateWordLists();
SetupGame();
}
void UBullCowCartridge::OnInput(const FString& PlayerInput) // When the player hits enter
{
if (bGameOver)
{
SetupGame();
}
else
{
ProcessGuess(PlayerInput);
}
}
void UBullCowCartridge::ProcessGuess(const FString& Guess)
{
if (Guess.ToUpper() == HiddenWord)
{
PrintLine(TEXT("Correct! - The word was %s"), *HiddenWord);
EndGame();
return;
}
if (Guess.Len() != HiddenWord.Len())
{
PrintLine(TEXT("Please choose a %i letter word!"), HiddenWord.Len());
return;
}
if (!CheckIsogram(Guess))
{
PrintLine(TEXT("No repeating letters!\nTry Again"));
return;
}
RemoveLife();
FBullCowCount Score = GetBullCows(Guess.ToUpper());
PrintLine(TEXT("You have %i Bulls and %i Cows."), Score.Bulls, Score.Cows);
}
const bool UBullCowCartridge::CheckIsogram(const FString& Word)
{
for (int32 index = 0; index < Word.Len(); index++)
{
for (int32 comparison = index+1; comparison < Word.Len(); comparison++)
{
if (Word[index] == Word[comparison])
{
return false;
}
}
}
return true;
}
void UBullCowCartridge::SetupGame()
{
ClearScreen();
HiddenWord = "";
HiddenWord = Words[FMath::RandRange(0,Words.Num()-1)].ToUpper();
Lives = HiddenWord.Len()*2;
bGameOver = false;
DisplayWelcomeMessage();
DebugPrintouts();
}
const void UBullCowCartridge::DisplayWelcomeMessage()
{
PrintLine(TEXT("Welcome to Bull Cows!"));
PrintLine(TEXT("Guess the %i letter word!"), HiddenWord.Len());
PrintLine(TEXT("You have %i lives"), Lives);
PrintLine(TEXT("Type your guess.\nPress enter to continue..."));
}
void UBullCowCartridge::EndGame()
{
bGameOver = true;
PrintLine(TEXT("Press enter to play again"));
}
void UBullCowCartridge::RemoveLife()
{
--Lives;
if (Lives <= 0)
{
GameOver();
}
else
{
DisplayLives();
}
}
const void UBullCowCartridge::DisplayLives()
{
if (Lives == 1)
{
PrintLine(TEXT("Incorrect Guess!\nYou have 1 life remaining"));
}
else
{
PrintLine(TEXT("Incorrect Guess!\nYou have %i lives remaining"), Lives);
}
}
void UBullCowCartridge::GameOver()
{
PrintLine(TEXT("Game Over - The correct word was %s"), *HiddenWord);
EndGame();
}
void UBullCowCartridge::PopulateWordLists()
{
const FString WordListPath = FPaths::ProjectContentDir() / TEXT("WordsList/ukenglish.txt");
FFileHelper::LoadFileToStringArray(Words, *WordListPath);
Words = GetValidWords(Words);
}
TArray<FString> UBullCowCartridge::GetValidWords(const TArray<FString>& WordList)
{
TArray<FString> ToReturn;
for (FString Word : WordList)
{
if (Word.Len() >= 4 && Word.Len() <= 8 && CheckIsogram(Word))
{
ToReturn.Emplace(Word);
}
}
return ToReturn;
}
const void UBullCowCartridge::DebugPrintouts()
{
PrintLine(TEXT("****Debug Info****"));
PrintLine(TEXT("Number of valid words = %i"), Words.Num());
PrintLine(TEXT("The hidden word is %s"), *HiddenWord);
}
const FBullCowCount UBullCowCartridge::GetBullCows(const FString& Guess)
{
FBullCowCount Count;
for (int32 i = 0; i < Guess.Len(); i++)
{
if (HiddenWord[i] == Guess[i])
{
Count.Bulls++;
continue;
}
for (int32 j = 0; j < HiddenWord.Len(); j++)
{
if (Guess[i] == HiddenWord[j])
{
Count.Cows++;
break;
}
}
}
return Count;
}
<file_sep>/SimpleShooter/Config/DefaultGame.ini
[/Script/EngineSettings.GeneralProjectSettings]
ProjectID=7D8B5EC1476625D96737D388D869D6A2
CopyrightNotice=Copyright <NAME> 2021
ProjectName=Simple Shooter
<file_sep>/BullCowGame/Source/BullCowGame/BullCowCartridge.h
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "Console/Cartridge.h"
#include "BullCowCartridge.generated.h"
struct FBullCowCount
{
int32 Bulls = 0;
int32 Cows = 0;
};
UCLASS(ClassGroup=(Custom), meta=(BlueprintSpawnableComponent))
class BULLCOWGAME_API UBullCowCartridge : public UCartridge
{
GENERATED_BODY()
public:
virtual void BeginPlay() override;
virtual void OnInput(const FString& PlayerInput) override;
void ProcessGuess(const FString& Guess);
const bool CheckIsogram(const FString& Word);
void SetupGame();
const void DisplayWelcomeMessage();
void EndGame();
void RemoveLife();
const void DisplayLives();
void GameOver();
void PopulateWordLists();
TArray<FString> GetValidWords(const TArray<FString>& WordList);
const void DebugPrintouts();
const FBullCowCount GetBullCows(const FString& Guess);
// Your declarations go below!
private:
TArray<FString> Words;
FString HiddenWord;
int32 Lives;
bool bGameOver;
};
<file_sep>/SimpleShooter/Source/SimpleShooter/SimpleShooterPlayerController.cpp
// Copyright <NAME> 2021
#include "SimpleShooterPlayerController.h"
#include "TimerManager.h"
#include "Blueprint/UserWidget.h"
void ASimpleShooterPlayerController::GameHasEnded(AActor* EndGameFocus, bool bIsWinner)
{
Super::GameHasEnded(EndGameFocus, bIsWinner);
HUDScreen->RemoveFromViewport();
if(bIsWinner)
{
UUserWidget* WinScreen = CreateWidget(this,WinScreenClass);
if(WinScreen != nullptr)
{
WinScreen->AddToViewport();
}
}
else
{
UUserWidget* LoseScreen = CreateWidget(this,LoseScreenClass);
if(LoseScreen != nullptr)
{
LoseScreen->AddToViewport();
}
}
GetWorldTimerManager().SetTimer(RestartTimer,this, &APlayerController::RestartLevel,RestartDelay);
}
void ASimpleShooterPlayerController::BeginPlay()
{
Super::BeginPlay();
HUDScreen = CreateWidget(this,HUDClass);
if(HUDScreen != nullptr)
{
HUDScreen->AddToViewport();
}
}
<file_sep>/SimpleShooter/Source/SimpleShooter/Gun.cpp
// Copyright <NAME> 2021
#include "Gun.h"
#include "Components/SkeletalMeshComponent.h"
#include "Particles/ParticleSystemComponent.h"
#include "Kismet/GameplayStatics.h"
#include "DrawDebugHelpers.h"
// Sets default values
AGun::AGun()
{
// Set this actor to call Tick() every frame. You can turn this off to improve performance if you don't need it.
PrimaryActorTick.bCanEverTick = true;
GunRoot = CreateDefaultSubobject<USceneComponent>(TEXT("Gun Root"));
SetRootComponent(GunRoot);
GunMesh = CreateDefaultSubobject<USkeletalMeshComponent>("Gun Mesh");
GunMesh->SetupAttachment(GunRoot);
}
void AGun::PullTrigger()
{
UGameplayStatics::SpawnEmitterAttached(MuzzleFlash, GunMesh, TEXT("MuzzleFlashSocket"));
UGameplayStatics::SpawnSoundAttached(MuzzleSound, GunMesh, TEXT("MuzzleFlashSocket"));
FHitResult Hit;
FVector ShotDirection;
bool bSuccessful = GunTrace(Hit,ShotDirection);
if(bSuccessful)
{
UGameplayStatics::SpawnEmitterAtLocation(GetWorld(),HitEffect,Hit.Location,ShotDirection.Rotation());
UGameplayStatics::SpawnSoundAtLocation(GetWorld(),HitSound,Hit.Location,ShotDirection.Rotation());
AActor* HitActor = Hit.GetActor();
if(HitActor != nullptr)
{
AController * OwnerController = GetOwnerController();
if(OwnerController == nullptr){return;}
FPointDamageEvent DamageEvent(DamageAmount, Hit, ShotDirection, nullptr);
HitActor->TakeDamage(DamageAmount,DamageEvent,OwnerController,this);
}
}
}
// Called when the game starts or when spawned
void AGun::BeginPlay()
{
Super::BeginPlay();
}
// Called every frame
void AGun::Tick(float DeltaTime)
{
Super::Tick(DeltaTime);
}
bool AGun::GunTrace(FHitResult& Hit, FVector& ShotDirection)
{
AController * OwnerController = GetOwnerController();
if(OwnerController == nullptr){return false;}
FRotator PlayerViewRotation;
FVector PlayerViewLocation;
OwnerController->GetPlayerViewPoint(PlayerViewLocation,PlayerViewRotation);
ShotDirection = -PlayerViewRotation.Vector();
FVector End = PlayerViewLocation + PlayerViewRotation.Vector() * MaxRange;
FCollisionQueryParams Params;
Params.AddIgnoredActor(this);
Params.AddIgnoredActor(GetOwner());
return GetWorld()->LineTraceSingleByChannel(Hit,PlayerViewLocation,End,ECC_GameTraceChannel1,Params);
}
AController* AGun::GetOwnerController() const
{
APawn* OwnerPawn = Cast<APawn>(GetOwner());
if(OwnerPawn == nullptr){return nullptr;}
return OwnerPawn->GetController();
}
<file_sep>/TripleX/tripleX.cpp
#include <iostream>
#include <ctime>
void PrintIntroduction(int DifficultyLevel)
{
//Introduction text
std::cout << "\nYou are a secret agent breaking into a level " << DifficultyLevel << " secure server...\n";
std::cout << "Enter the correct code to continue...\n";
}
bool PlayGame(int DifficultyLevel)
{
PrintIntroduction(DifficultyLevel);
//Declare code numbers
const int CodeA = rand() % DifficultyLevel + DifficultyLevel, CodeB = rand() % DifficultyLevel + DifficultyLevel, CodeC = rand() % DifficultyLevel + DifficultyLevel;
//Calculate sum and product
int CodeSum = CodeA + CodeB + CodeC;
int CodeProduct = CodeA * CodeB * CodeC;
//Print CodeSum and CodeProduct to console
std::cout << "\n\tThere are 3 numbers in the code\n";
std::cout << "\tThe sum of the numbers in the code is " << CodeSum << "\n";
std::cout << "\tThe product of the numbers in the code is " << CodeProduct << "\n";
//Get player guess
int GuessA, GuessB, GuessC;
std::cout << "\nEnter code (three digits separated by spaces)\n<<<< ";
std::cin >> GuessA;
std::cin >> GuessB;
std::cin >> GuessC;
//Calculate player guess sum and product
int GuessSum = GuessA + GuessB + GuessC;
int GuessProduct = GuessA * GuessB * GuessC;
//Compare player guess to code
if(GuessSum == CodeSum && GuessProduct == CodeProduct)
{
std::cout << "\nCongratulations you're in!\n";
return true;
}
else
{
std::cout << "\nUnlucky - Try Again!\n";
return false;
}
}
int main()
{
srand(time(NULL));
int Difficulty = 1;
int MaxDifficulty = 9;
while(Difficulty <= MaxDifficulty)
{
bool bLevelComplete = PlayGame(Difficulty);
std::cin.clear();
std::cin.ignore();
if(bLevelComplete)
{
++Difficulty;
}
}
std::cout << "Congratulations you have completed the game!";
return 0;
}
| 5cfc51db04e750dd33c0953a72075fdef528834a | [
"C++",
"INI"
] | 11 | C++ | dgnewsom/Unreal_Engine_C- | a3a9cdbae25b32a963263a14ee5b297dcf6c6ddf | 4db15d4123eb13cf0503abaaff9d57028d41a079 |
refs/heads/master | <repo_name>snehasishmohanta/finding_the_query_whose_edit_distance_is_less-than_2<file_sep>/gettig_data.py
"""a program to find all the query whose edit distance is less than 2
Input:-
00c1ae5c-b881-404d-a041-148c4a4addfa , nicer dicer plus chopper
00c1ae5c-b881-404d-a041-148c4a4addfa , nicer dicer plus chopper
00c1ae5c-b881-404d-a041-148c4a4addfa , nicer dicer plus chopper
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
0d7dbb56-eafa-421a-9a15-6bf794441727 , watches men&WOMEN
1a6d56dd-8883-464c-9b96-de460c507ab8 , kalamkari
1c5df50a-7684-4572-b1c2-278b44a292f2 , sparx
1c5df50a-7684-4572-b1c2-278b44a292f2 , sparx
1cb587b1-268c-4662-9c61-e2437d3cb338 , mamy poko pants xl
1d7eb183-7627-41d8-9eec-cb960d8030c2 , jbl flip 2
1e3afa6e-bbab-49cb-dbe8-3a3a979b16d9 , beauty product
1f6a819c-8af9-4932-d440-b7fa821d3901 , nike dry fit t shirt
1f6a819c-8af9-4932-d440-b7fa821d3901 , nike dry fit t shirt
1f6a819c-8af9-4932-d440-b7fa821d3901 , nike dry 90 t shirt
1f6ef121-f462-4be2-f164-d12fc997938f , spy pen
1f6ef121-f462-4be2-f164-d12fc997938f , spy pen
1f6ef121-f462-4be2-f164-d12fc997938f , spy pen
1fec318b-25a9-4088-aff8-d69d163cc7d4 , Lcd display for micromax bolt A27
1fec318b-25a9-4088-aff8-d69d163cc7d4 , Lcd display for micromax bolt A27
2b97faeb-ef31-45ae-bd58-84b4708f82e2 , bean bags filled with beans
2dbdd57d-4c34-42c5-f93e-17831fce72b2 , vans shoes
2e5e1f5e-bc22-4c81-cc93-18b161658181 , hand tools
2e5e1f5e-bc22-4c81-cc93-18b161658181 , hand tools
2fb09a5e-a73c-4f6b-b6bf-34f594b4b4e6 , backpacks
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3e2a5027-57db-44a6-ddac-1754dd92714d , mamy poko pants medium
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
3fa51f52-9379-4c7c-83cc-e7e70c251484 , iphone 6 64 gb
03fcfd37-7ab5-4acd-fd47-7df412c10ad8 , xiaomi redme 1s screenguard
3fd96987-e2f4-4596-8731-e6bed9163c1f , cash on delivery sun glasses medium size
3fd96987-e2f4-4596-8731-e6bed9163c1f , cash on delivery sun glasses
4cebccb5-12eb-48f8-8966-321a65c91fa4 , reebok shoe
4cebccb5-12eb-48f8-8966-321a65c91fa4 , easytone
4cebccb5-12eb-48f8-8966-321a65c91fa4 , reebok
4cebccb5-12eb-48f8-8966-321a65c91fa4 , reebok
4cebccb5-12eb-48f8-8966-321a65c91fa4 , reebok
4d67373f-ca45-4137-efd0-0da69c78123d , movie
4d67373f-ca45-4137-efd0-0da69c78123d , movie
4d67373f-ca45-4137-efd0-0da69c78123d , bookmy show
4d67373f-ca45-4137-efd0-0da69c78123d , bookmy show
4d67373f-ca45-4137-efd0-0da69c78123d , book my show
4d67373f-ca45-4137-efd0-0da69c78123d , book my show
4d67373f-ca45-4137-efd0-0da69c78123d , book my show
4d67373f-ca45-4137-efd0-0da69c78123d , book my show
4d67373f-ca45-4137-efd0-0da69c78123d , book my show
4f3605f1-bbbe-441a-96d9-2326f7bac50a , bedsheets & pillow
4f3605f1-bbbe-441a-96d9-2326f7bac50a , bedsheets & pillow
4f3605f1-bbbe-441a-96d9-2326f7bac50a , bedsheets & pillow
4f3605f1-bbbe-441a-96d9-2326f7bac50a , bedsheets & pillow
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ad7ad39-75a7-483e-a223-c200e38ae92f , Friends adult diapers - large
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , philips bread toaster
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
5ca5e493-ead4-459f-efc8-9085703d197a , containers
05cafdee-69de-4491-d879-90c82b6013f1 , watch strap
05cafdee-69de-4491-d879-90c82b6013f1 , watch strap
05cafdee-69de-4491-d879-90c82b6013f1 , watch strap
5cd32745-3444-49d1-86d1-6b0b2998cc33 , led light
5cd32745-3444-49d1-86d1-6b0b2998cc33 , led light
5cd32745-3444-49d1-86d1-6b0b2998cc33 , keyboard
6ae7e2ef-7dab-4d51-8962-aa4b0696bf02 , gloves
6cf56396-6916-43bd-c684-ddca85658724 , emergency led
6cf56396-6916-43bd-c684-ddca85658724 , emergency led
6cf56396-6916-43bd-c684-ddca85658724 , emergency led
07a1794d-cf8e-410f-b4fb-8ba579460674 , Leather Money Clip Wallet
7a384803-a4fe-489e-8825-669b861ae969 , watches for men
7a384803-a4fe-489e-8825-669b861ae969 , watches for men
7a384803-a4fe-489e-8825-669b861ae969 , gym set
7a384803-a4fe-489e-8825-669b861ae969 , gym set
7a384803-a4fe-489e-8825-669b861ae969 , gym set
7a384803-a4fe-489e-8825-669b861ae969 , gym set
7a384803-a4fe-489e-8825-669b861ae969 , gym set
7a384803-a4fe-489e-8825-669b861ae969 , watches for men
7a384803-a4fe-489e-8825-669b861ae969 , watches for men
7a384803-a4fe-489e-8825-669b861ae969 , watches for men
7bf1c1a3-1e0c-4ab9-aff4-5a03ea03b05e , toys
7bf1c1a3-1e0c-4ab9-aff4-5a03ea03b05e , toys
7bf1c1a3-1e0c-4ab9-aff4-5a03ea03b05e , toys
7daa603c-e741-4ee9-b994-daa04876d8e7 , hd video grapfic card
7e969193-8985-4071-ced4-69be1a2c943a , everready led 7w
7e969193-8985-4071-ced4-69be1a2c943a , everready led 7w
7e969193-8985-4071-ced4-69be1a2c943a , everready led 7w
7e969193-8985-4071-ced4-69be1a2c943a , everready led 7w
7e969193-8985-4071-ced4-69be1a2c943a , everready led
7ee41a6c-0a70-4b83-f945-385761835b00 , sandwich
7ee41a6c-0a70-4b83-f945-385761835b00 , sandwich
7ee41a6c-0a70-4b83-f945-385761835b00 , sandwich
7ee41a6c-0a70-4b83-f945-385761835b00 , sandwich
7ee41a6c-0a70-4b83-f945-385761835b00 , sandwich
7ee41a6c-0a70-4b83-f945-385761835b00 , tie and pocket square
7ee41a6c-0a70-4b83-f945-385761835b00 , tie and pocket square
7ee41a6c-0a70-4b83-f945-385761835b00 , ties
7ee41a6c-0a70-4b83-f945-385761835b00 , ties
7fda21a5-c432-4d95-f93d-6275b68bb396 , men trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , men trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , men trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , 8 gb pen drive
7fda21a5-c432-4d95-f93d-6275b68bb396 , 16 gb pen drive
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , pack of trunks
7fda21a5-c432-4d95-f93d-6275b68bb396 , mens boxers trunks underwear
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo
08a90f6f-84b2-4829-b212-6a503ffad2ac , samsung galaxy s3 neo screen
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8bbcbbd8-0dd2-4c6a-b5a2-0b31dcf557c0 , beats headphones
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8d928537-f57e-423d-eb26-5d1accaa4dd7 , watch
8e2d19d7-12e1-48ab-a78f-b6e32aef1763 , hoodies
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , formal shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , formal shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , formal shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , formal shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , sports shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , running shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , running shoe
8e841f9b-8d5c-4fad-bb1a-a7c3cbed9d30 , running shoe
08e24705-15ee-4cb9-8bf5-76b94e77b230 , Wrist Wraps
8f9affe7-37ed-4ee7-a969-5332a7ec0a52 , bong
8f9affe7-37ed-4ee7-a969-5332a7ec0a52 , bong
8f9affe7-37ed-4ee7-a969-5332a7ec0a52 , bong
8fef16b0-75d5-49da-8aa9-68e121161da9 , powerbank
8fef16b0-75d5-49da-8aa9-68e121161da9 , powerbank
9b98a9be-bb63-4310-87d5-592a66ae602a , leggings
9b98a9be-bb63-4310-87d5-592a66ae602a , leggings
9b98a9be-bb63-4310-87d5-592a66ae602a , jeggings
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
9f58795f-c673-49ca-adc5-f88e3ed2e143 , sparx sandals for men
25dde496-a467-435b-f8b3-5ed91b4ecb1c , heater
25dde496-a467-435b-f8b3-5ed91b4ecb1c , heater
25dde496-a467-435b-f8b3-5ed91b4ecb1c , heater
31da20a1-b18f-4e7f-8e62-9cfd7ef9383f , XOLO A600 white cover
31da20a1-b18f-4e7f-8e62-9cfd7ef9383f , XOLO A600 white cover
41bcffb5-3f89-43e9-ff88-25f254ed9257 , facial kit
41bcffb5-3f89-43e9-ff88-25f254ed9257 , facial kit
41bcffb5-3f89-43e9-ff88-25f254ed9257 , facial kit
41bcffb5-3f89-43e9-ff88-25f254ed9257 , facial kit
41bcffb5-3f89-43e9-ff88-25f254ed9257 , facial kit
49c6d01e-43da-4da5-aff0-e3a70897adc2 , duffle bags
49c6d01e-43da-4da5-aff0-e3a70897adc2 , duffle bags
051f0a61-da0a-46cc-fb9c-94003c19b5ab , a116 black cover and screen guard
051f0a61-da0a-46cc-fb9c-94003c19b5ab , a116 black cover and screen guard
54b1867c-b9da-4943-f6bc-ea569e9ce4d8 , women solid white kurta
54b1867c-b9da-4943-f6bc-ea569e9ce4d8 , women solid white kurta
54b1867c-b9da-4943-f6bc-ea569e9ce4d8 , women solid white kurta
54b1867c-b9da-4943-f6bc-ea569e9ce4d8 , women solid white kurta
54b1867c-b9da-4943-f6bc-ea569e9ce4d8 , women solid white kurta
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar street light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar street light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar street light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , solar light
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , 3.7 volt rechargable battery
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , 3.7 volt rechargable battery
57c84ee6-4e1d-4dcc-a929-63204c7d6c79 , 3.7 volt rechargable battery
60bc16d3-3173-4673-b6ee-e137e6225854 , PHILIPS DSP 30U 5.1 SPEAKER SYSTEM
60bc16d3-3173-4673-b6ee-e137e6225854 , PHILIPS DSP 30U 5.1 SPEAKER SYSTEM
60bc16d3-3173-4673-b6ee-e137e6225854 , stainless steel water bottle
60bc16d3-3173-4673-b6ee-e137e6225854 , stainless steel water bottle
60bc16d3-3173-4673-b6ee-e137e6225854 , stainless steel water bottle
60bc16d3-3173-4673-b6ee-e137e6225854 , stainless steel water bottle
61e8fd72-ae1f-4818-d13a-ee8125c0925d , quadcopter
61e8fd72-ae1f-4818-d13a-ee8125c0925d , quadcopter
61e8fd72-ae1f-4818-d13a-ee8125c0925d , drone
61e8fd72-ae1f-4818-d13a-ee8125c0925d , drone
61e8fd72-ae1f-4818-d13a-ee8125c0925d , quadcopter
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , sony earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , sony earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , sony earphones
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , xbox gaming console for pc
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , xbox gaming console for pc
62f3b09c-36e7-43f2-abb2-7aaceb00de3c , xbox gaming console for pc
72bd0d73-b62f-4360-dd56-0f4344e498f8 , adidas socks-pack of 3
72bd0d73-b62f-4360-dd56-0f4344e498f8 , jockey briefs
72bd0d73-b62f-4360-dd56-0f4344e498f8 , briefs for men
72bd0d73-b62f-4360-dd56-0f4344e498f8 , briefs for men
76f17c2d-7cbd-4cc1-bc04-ff164abfa3d3 , Vibrandz Creation Iflashdrive 16gb For Iphone And Ipad (External Drive)
78ee6f31-806e-416c-9bb6-6118e1b08cf6 , car charger
83e9d7a9-2213-4ab7-8043-5e4daa066542 , sexual wellness
83e9d7a9-2213-4ab7-8043-5e4daa066542 , sexual wellness
83e9d7a9-2213-4ab7-8043-5e4daa066542 , sexual wellness
83e9d7a9-2213-4ab7-8043-5e4daa066542 , lamborghini
83f9d42d-7004-43eb-efec-82bddbfdf556 , hp laptop
85d0a675-3288-4495-9a95-bf7e1e98deaa , hand tools
85d0a675-3288-4495-9a95-bf7e1e98deaa , hand tools
85d0a675-3288-4495-9a95-bf7e1e98deaa , hand tools
89c7b43e-1ef1-4ee5-8add-1771c7e9debf , moong dal
89c7b43e-1ef1-4ee5-8add-1771c7e9debf , moong dal
89c7b43e-1ef1-4ee5-8add-1771c7e9debf , moong
89c7b43e-1ef1-4ee5-8add-1771c7e9debf , namkeen
89c7b43e-1ef1-4ee5-8add-1771c7e9debf , bikano
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
89d0c6d0-2b05-4db4-accb-4a363653149c , mercury blue aviator sunglass
90f670c1-ca0a-4c37-a8c5-20f31854e8be , lpg hose
99ebc94e-756b-47ee-bd78-305a4e72bb50 , jbl csx 696
297e02e3-a40d-4e0c-82aa-ffba6de9a550 , MCR SLEEPER
297e02e3-a40d-4e0c-82aa-ffba6de9a550 , MCR SLEEPER FOR MENS
316db6b1-d28f-4c31-9776-5cb2533ec1a8 , king bandana
567bbba3-dbd4-4f7c-b634-820bbd23cbbd , SAMSUNG CHARGER
567bbba3-dbd4-4f7c-b634-820bbd23cbbd , SAMSUNG CHARGER
567bbba3-dbd4-4f7c-b634-820bbd23cbbd , SAMSUNG CHARGER
567bbba3-dbd4-4f7c-b634-820bbd23cbbd , SAMSUNG CHARGER
567bbba3-dbd4-4f7c-b634-820bbd23cbbd , SAMSUNG CHARGER
726c98da-bac0-432e-d6c4-595da9daffd8 , dlink 4g lte wilreless router cdma
728ab6a6-8835-4a04-a732-6c2c901f8317 , dell 3531
728ab6a6-8835-4a04-a732-6c2c901f8317 , dell 3531
765c5626-e4f4-44a6-ed14-b39d94f8f8ac , apple 4s back cover
765c5626-e4f4-44a6-ed14-b39d94f8f8ac , apple 4s back cover black
765c5626-e4f4-44a6-ed14-b39d94f8f8ac , apple 4s back cover black
765c5626-e4f4-44a6-ed14-b39d94f8f8ac , apple 4s back cover black
765c5626-e4f4-44a6-ed14-b39d94f8f8ac , apple 4s back cover black
775f1159-e310-42b6-d3b0-5ea3fb959568 , case cover for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed case cover for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed case cover for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed back case for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia L
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia L
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia L
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia L
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia zr
775f1159-e310-42b6-d3b0-5ea3fb959568 , printed backcase for xperia zr
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
800e6c4c-8557-4ecd-c3d8-78738479690f , prices of slimming capsule
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
969b4a9e-72ae-48b7-974f-8d59342616e1 , washing machine trolley
1444a2ba-6ccd-4dc1-8797-2d29cc460cb5 , mattress king size
2015ac6f-5084-4be5-eb75-c41ec6bc036b , samsung micro sd card
2015ac6f-5084-4be5-eb75-c41ec6bc036b , samsung micro sd card
2015ac6f-5084-4be5-eb75-c41ec6bc036b , samsung micro sd card
2015ac6f-5084-4be5-eb75-c41ec6bc036b , samsung micro sd card
2015ac6f-5084-4be5-eb75-c41ec6bc036b , samsung micro sd card
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
3613df92-ca46-4f1e-c940-20b6e7447b79 , otg
6878f1ad-012f-43b9-8c47-001cbbbcd182 , six pack bulk 2 kg
6878f1ad-012f-43b9-8c47-001cbbbcd182 , six pack bulk 2 kg
6878f1ad-012f-43b9-8c47-001cbbbcd182 , six pack bulk 2 kg
6878f1ad-012f-43b9-8c47-001cbbbcd182 , six pack bulk 2 kg
7317fac2-7d58-4d86-e737-f121ab195d95 , gripe water
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , trimmer for men
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
7436c9be-6372-42d4-ecef-eef7d4df193f , hair dryers
8119c370-1bb3-422f-bf26-8d92a65529bc , toe shoes
8119c370-1bb3-422f-bf26-8d92a65529bc , toe shoes
8119c370-1bb3-422f-bf26-8d92a65529bc , toe shoes
8119c370-1bb3-422f-bf26-8d92a65529bc , toe shoes
8450ce48-a704-437d-e0fe-ba5eaeba04f0 , saving foam with gillet vector
8450ce48-a704-437d-e0fe-ba5eaeba04f0 , saving foam with gillet vector
8450ce48-a704-437d-e0fe-ba5eaeba04f0 , saving foam with gillet vector
8450ce48-a704-437d-e0fe-ba5eaeba04f0 , saving foam with gillet vector
08651da6-84c0-42e8-c61d-909047201474 , nokia 1100
08651da6-84c0-42e8-c61d-909047201474 , nokia 1100 phone
08651da6-84c0-42e8-c61d-909047201474 , nokia 1100 phone
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
08893fc3-9aa2-428e-8c0b-e81ca15df3d5 , samsung h5500
9627c790-74b3-43ee-a78e-0c989ebf7701 , nokia x2
9627c790-74b3-43ee-a78e-0c989ebf7701 , nokia 2690 mobile phone
9627c790-74b3-43ee-a78e-0c989ebf7701 , nokia 2690 mobile phone
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , LED CLEANING KIT
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
9832d7e1-ebf6-4c38-9cb7-582a7b30c25d , led cleaning kit
25640cd1-2975-4960-c32d-509c643f21b6 , 32gb micro sd card
25640cd1-2975-4960-c32d-509c643f21b6 , 32gb micro sd card
25640cd1-2975-4960-c32d-509c643f21b6 , 32gb micro sd card
25640cd1-2975-4960-c32d-509c643f21b6 , 32gb micro sd card
25640cd1-2975-4960-c32d-509c643f21b6 , 32gb micro sd card
25640cd1-2975-4960-c32d-509c643f21b6 , footwear for men
34177ac5-5f9d-4e79-e1f9-a387c7f2f1ab , laptop external hard disk case
34177ac5-5f9d-4e79-e1f9-a387c7f2f1ab , laptop external hard disk case
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
49339bb9-4298-48c9-f6b6-f267d39b4add , bags
71440f1a-9cc2-48b7-be7f-0af81b8e94f6 , samsung dishwasher
71440f1a-9cc2-48b7-be7f-0af81b8e94f6 , samsung dishwasher
82746c7f-630a-4800-e550-5815ef6e864c , spice mi600 flip cover
82746c7f-630a-4800-e550-5815ef6e864c , spice mi600 flip cover
82746c7f-630a-4800-e550-5815ef6e864c , spice mobile mi600 flip cover
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
82936fba-e0dc-4ccc-f77a-843a24709e04 , WATCHES
82936fba-e0dc-4ccc-f77a-843a24709e04 , WATCHES
82936fba-e0dc-4ccc-f77a-843a24709e04 , WATCHES
82936fba-e0dc-4ccc-f77a-843a24709e04 , platinum rings for men
82936fba-e0dc-4ccc-f77a-843a24709e04 , platinum rings for men
82936fba-e0dc-4ccc-f77a-843a24709e04 , platinum rings for men
82936fba-e0dc-4ccc-f77a-843a24709e04 , platinum rings for men
82936fba-e0dc-4ccc-f77a-843a24709e04 , mobile
82936fba-e0dc-4ccc-f77a-843a24709e04 , LENEVO A6000 4G MOBILE
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
89319dba-7afd-4570-bef3-31dc32ca51a4 , usb card reader
95958ebb-a5ea-4346-cfa1-3fcfcf14c5aa , lenovo g50-70
95958ebb-a5ea-4346-cfa1-3fcfcf14c5aa , lenovo g50-70
95958ebb-a5ea-4346-cfa1-3fcfcf14c5aa , lenovo g50-70
118646dc-ad6d-43a6-9968-23758d9ba124 , Puma
118646dc-ad6d-43a6-9968-23758d9ba124 , Puma
282049d2-a362-4f9f-8c59-3ea73266c38e , headsets
282049d2-a362-4f9f-8c59-3ea73266c38e , headsets
282049d2-a362-4f9f-8c59-3ea73266c38e , headsets
282049d2-a362-4f9f-8c59-3ea73266c38e , headsets
282049d2-a362-4f9f-8c59-3ea73266c38e , headsets
282049d2-a362-4f9f-8c59-3ea73266c38e , headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
282049d2-a362-4f9f-8c59-3ea73266c38e , wireless headphones
436245b3-76d8-4380-b59d-51211094b2cf , heater
436245b3-76d8-4380-b59d-51211094b2cf , branded kitchen utensils
567450c4-2602-4643-b701-9dc91a75e3a2 , kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stainless steel kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stainless steel kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stainless steel kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stainless steel kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stain steel kadai
567450c4-2602-4643-b701-9dc91a75e3a2 , stainless steel kadai 1 ltr
66734614-7325-438b-962a-a5faadeb7841 , leather jacket
66734614-7325-438b-962a-a5faadeb7841 , leather jacket
82674636-7d89-42f2-e5b3-5d97ff79b11f , EPSON L210
82674636-7d89-42f2-e5b3-5d97ff79b11f , epson l210
82674636-7d89-42f2-e5b3-5d97ff79b11f , epson l210
83618338-70a0-4512-c763-0307fe5acef0 , woman jacket
83618338-70a0-4512-c763-0307fe5acef0 , woman jacket
83618338-70a0-4512-c763-0307fe5acef0 , woman jacket
83618338-70a0-4512-c763-0307fe5acef0 , man jacket
83618338-70a0-4512-c763-0307fe5acef0 , man jacket
97941946-62ca-48fc-b893-b88eca866876 , helmet bike
97941946-62ca-48fc-b893-b88eca866876 , helmet bike
a0e69813-3dcf-4e2e-ffa0-b2c026aa1fa1 , mach 3
a0e69813-3dcf-4e2e-ffa0-b2c026aa1fa1 , mach 3
a0e69813-3dcf-4e2e-ffa0-b2c026aa1fa1 , mach 3
a0e69813-3dcf-4e2e-ffa0-b2c026aa1fa1 , mach 3
a0e69813-3dcf-4e2e-ffa0-b2c026aa1fa1 , mach 3
a3a9a3d8-3a04-4c0e-93ce-fc99807e6af8 , skirts mini
a3a9a3d8-3a04-4c0e-93ce-fc99807e6af8 , skirts mini
a9cfe6f6-333e-4c0c-984b-ca26840db68a , led bulb
a9cfe6f6-333e-4c0c-984b-ca26840db68a , led bulb
a9cfe6f6-333e-4c0c-984b-ca26840db68a , led bulb
a9cfe6f6-333e-4c0c-984b-ca26840db68a , led bulb
a9cfe6f6-333e-4c0c-984b-ca26840db68a , led bulb
a9cfe6f6-333e-4c0c-984b-ca26840db68a , bvpl
a9d3b40b-e34f-4293-872f-0a06923582c7 , xeaomi mi powerbank
a9d3b40b-e34f-4293-872f-0a06923582c7 , xeaomi mi powerbank
a9d3b40b-e34f-4293-872f-0a06923582c7 , xeaomi mi powerbank
a9d3b40b-e34f-4293-872f-0a06923582c7 , xeaomi mi powerbank
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a9e67c55-bb81-4548-c540-062fa8c3b4ed , ipad
a59ad0a1-22f4-4215-8e5d-9a958f31e6cf , muscle tech
a59ad0a1-22f4-4215-8e5d-9a958f31e6cf , muscle tech
a1634c0e-e915-498c-963d-4cab23717651 , rebook sokes
a418051c-0f19-47d4-de46-7288572c012e , lg lb5510
a418051c-0f19-47d4-de46-7288572c012e , lg lb5510
a418051c-0f19-47d4-de46-7288572c012e , lg led lb5510
a452089f-a57e-48c0-e3d2-036505f03ec9 , monkey cap
a452089f-a57e-48c0-e3d2-036505f03ec9 , monkey cap
a452089f-a57e-48c0-e3d2-036505f03ec9 , monkey cap
aa3a8c22-e036-4d5d-8a5c-1dcfddf2d3ee , belt for men
aa3a8c22-e036-4d5d-8a5c-1dcfddf2d3ee , belt for men
aa3a8c22-e036-4d5d-8a5c-1dcfddf2d3ee , men purse
af29a728-f82e-46ad-e6a6-02b57fdd9d9b , footwears
af33e18b-8a34-43c7-a661-04783b3339a1 , sports shoes for men
af33e18b-8a34-43c7-a661-04783b3339a1 , sports shoes for men
aff8c7d3-0943-4ff5-d788-3c3d5fdf2fd7 , replica watches
aff8c7d3-0943-4ff5-d788-3c3d5fdf2fd7 , sony led tv 55 inches
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b3ff2b22-694f-4b0d-fb12-045b77ee2f2c , quantum health analyzer
b8c70417-ee38-4535-a076-9b85e3bc0d46 , gas cooktops
b8c70417-ee38-4535-a076-9b85e3bc0d46 , gas cooktops
b8c70417-ee38-4535-a076-9b85e3bc0d46 , gas cooktops
b8c70417-ee38-4535-a076-9b85e3bc0d46 , gas cooktops
b4276f74-5e50-4262-816e-ec7056181d07 , blank dvd
b4276f74-5e50-4262-816e-ec7056181d07 , blank dvd
b4276f74-5e50-4262-816e-ec7056181d07 , blank dvd
b4276f74-5e50-4262-816e-ec7056181d07 , blank dvd
b4276f74-5e50-4262-816e-ec7056181d07 , blank dvd
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus chopper
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
ba8febfe-5587-4e2c-c089-44adbdd35ffb , nicer dicer plus
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
bd43875e-b1a9-490d-ee24-8f695ea96555 , sport shoes
c2b21d06-3ddb-4059-8efd-15af0da06bbd , MI Power bank
c006c5ab-9a87-4b0e-8a31-bb05d5ca7ffd , studio 9
c006c5ab-9a87-4b0e-8a31-bb05d5ca7ffd , studio 9
c006c5ab-9a87-4b0e-8a31-bb05d5ca7ffd , studio 9
c006c5ab-9a87-4b0e-8a31-bb05d5ca7ffd , studio 9
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c68ebe44-7f90-42b2-81c8-db340ed869e5 , scissors
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 20kg rubber weight plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 10kg spare rubber plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 10kg spare rubber plates
c232eb9d-66e0-42db-94e6-2d7e401012c0 , 10kg spare rubber plates
c692bbd7-5003-422d-d9ff-57fd73173433 , moto e
c756f78b-4089-4aa6-cb6a-8d6ec1d44fe3 , revolver lighter
c756f78b-4089-4aa6-cb6a-8d6ec1d44fe3 , revolver lighter
c756f78b-4089-4aa6-cb6a-8d6ec1d44fe3 , revolver lighter
c38074f6-2576-4238-b541-f36cd8b6447f , ausus zenphone 5 cases
c38074f6-2576-4238-b541-f36cd8b6447f , ausus zenphone 5 cases
c38074f6-2576-4238-b541-f36cd8b6447f , ausus zenphone 5 cases
c38074f6-2576-4238-b541-f36cd8b6447f , ausus zenphone 5 cases
c38074f6-2576-4238-b541-f36cd8b6447f , ausus zenphone 5 cases
cb06c9d1-e83a-44de-d88c-b095a387a748 , beats by dr.dre monster
cb06c9d1-e83a-44de-d88c-b095a387a748 , beats by dr.dre monster
cb06c9d1-e83a-44de-d88c-b095a387a748 , beats by dr.dre monster
cb06c9d1-e83a-44de-d88c-b095a387a748 , beats by dr.dre monster
cb06c9d1-e83a-44de-d88c-b095a387a748 , Beats by Dr.Dre Pro
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , htc one
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
cc4b3c92-d6ca-482a-9e67-2bf20254af22 , imported mobile
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
ccb4cf8f-ff5c-4197-c0cb-e045c24ed680 , off road helmets
cf113da4-d402-420a-900a-e2c7f25c1238 , puma kevler
cf113da4-d402-420a-900a-e2c7f25c1238 , syska led bulbs
cf113da4-d402-420a-900a-e2c7f25c1238 , syska led bulbs
cf113da4-d402-420a-900a-e2c7f25c1238 , syska led bulbs
cf113da4-d402-420a-900a-e2c7f25c1238 , syska led bulbs
cf113da4-d402-420a-900a-e2c7f25c1238 , syska led bulbs
d5fd6d65-3af2-4bda-881c-0b654dfe94b0 , folding headphones
d5fd6d65-3af2-4bda-881c-0b654dfe94b0 , folding headphones
d6a63920-e270-4957-b41e-2aea5652671e , portable usb mp3 player
d6b1841f-0bca-45c7-89e5-70ed2e2d1005 , puma sandals for men
d6b1841f-0bca-45c7-89e5-70ed2e2d1005 , puma sandals for men
d6b1841f-0bca-45c7-89e5-70ed2e2d1005 , puma sandals for men
d8be3d81-45e7-46f4-b51b-44af88fa869d , basket ball
d16c5fbb-1123-4f26-ddbc-6212f2f3d7db , kalash wall stickers
d16c5fbb-1123-4f26-ddbc-6212f2f3d7db , kalash wall stickers
d16c5fbb-1123-4f26-ddbc-6212f2f3d7db , kalash wall stickers
d89f8fde-1c89-412d-f258-c4bdf9f65f51 , command and conquer
d900ec5f-bd71-4e2b-84d0-6a2105050923 , minoxidil
d900ec5f-bd71-4e2b-84d0-6a2105050923 , minoxidil 5
d900ec5f-bd71-4e2b-84d0-6a2105050923 , livon hair gain
d900ec5f-bd71-4e2b-84d0-6a2105050923 , livon hair gain
d900ec5f-bd71-4e2b-84d0-6a2105050923 , uvb comb
d900ec5f-bd71-4e2b-84d0-6a2105050923 , uvb comb
d900ec5f-bd71-4e2b-84d0-6a2105050923 , uvb comb
d900ec5f-bd71-4e2b-84d0-6a2105050923 , uvb comb
dba91160-dec4-454c-f34a-c29d6d95c459 , DVD PLATERS
dba91160-dec4-454c-f34a-c29d6d95c459 , DVD PLAYERS
dba91160-dec4-454c-f34a-c29d6d95c459 , DVD PLAYERS
dba91160-dec4-454c-f34a-c29d6d95c459 , DVD PLAYERS
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD WITH FM TUNER
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPOD
dba91160-dec4-454c-f34a-c29d6d95c459 , IPAD
e7740613-ca66-4b2f-eb70-8263882c9f44 , titanium s8
e7740613-ca66-4b2f-eb70-8263882c9f44 , titanium s8
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberry
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberry
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
eb187e6f-79cb-4cc4-d358-8324172ca353 , burberrywallets
ebeb5119-c79c-4364-f3f3-3759c7197e48 , Celkon Win 400
ebeb5119-c79c-4364-f3f3-3759c7197e48 , Celkon Win 400
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f7ae4f26-fa87-413f-b3ab-70751f6d3c9b , power bank
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f10b73c2-1b67-4cf7-cb7d-87f3737049cf , cotton handkerchiefs for men
f51ecda7-c865-4c40-a924-4afa3fc7a266 , Yu Yureka
f51ecda7-c865-4c40-a924-4afa3fc7a266 , Yu Yureka
f51ecda7-c865-4c40-a924-4afa3fc7a266 , Yu Yureka mobiel
f61fd513-a40b-4b36-978a-209aff41331c , hard disk
f4982793-8c21-4175-eacd-f4424078fdc3 , otg
f4982793-8c21-4175-eacd-f4424078fdc3 , otg
fc820e73-b746-4a2a-a14b-6b4bb696337d , bop bags
fc820e73-b746-4a2a-a14b-6b4bb696337d , bop bags
tempered glass
fd0a98b5-3ec6-4860-bbc7-2e044d80c413 , tempered glass
ff13b076-14e3-4030-9db9-0df9d86c1af2 , Magnetic Charging Dock For Xperia Xperia Z2
"""
def min_edit_dist(s1, s2):
m=len(s1)+1
n=len(s2)+1
tbl = {}
for i in range(m): tbl[i,0]=i
for j in range(n): tbl[0,j]=j
for i in range(1, m):
for j in range(1, n):
cost = 0 if s1[i-1] == s2[j-1] else 1
tbl[i,j] = min(tbl[i, j-1]+1, tbl[i-1, j]+1, tbl[i-1, j-1]+cost)
return tbl[i,j]
d = {}
with open("all_search.txt") as text:
for line in text:
line = line.strip("\n")
for lines in line.split("\n"):
try:
key, val = lines.split(",")
d.setdefault(key,[]).append(val.lower())
except:
pass
values = d.values()
keys = d.keys()
for v in values:
for i in range(0,len(v)-1):
if v[i] != v[i+1]:
if min_edit_dist(v[i], v[i+1]) <= 2:
print v[i]+" > "+v[i+1]
"""
output:-
dvd platers > dvd players
ipod > ipad
bookmy show > book my show
leggings > jeggings
woman jacket > man jacket
minoxidil > minoxidil 5
printed backcase for xperia l > printed backcase for xperia zr
8 gb pen drive > 16 gb pen drive
"""
| e2cb8fd399e7f027699627338b30d29ebc921946 | [
"Python"
] | 1 | Python | snehasishmohanta/finding_the_query_whose_edit_distance_is_less-than_2 | 848c8908bd8b2149b356bf710d88ca446f47ff36 | b4e4b4434999ef0534659d30894ced84295de88e |
refs/heads/main | <file_sep>using UnityEngine;
public class Seal : MonoBehaviour
{
// this script blocks (setactive = false) something (target)
public GameObject target;
private void Start()
{
target.SetActive(false);
}
private void OnDisable()
{
if (target != null)
{
target.SetActive(true);
}
}
}
<file_sep>using UnityEngine;
using TMPro;
public class CandleManager : MonoBehaviour
{
#region Variables
public TMP_Text candleText;
private AudioManager audioManager;
private int candleCounter = 0;
#endregion
#region Methods
private void Start()
{
UpdateCandleText(); // updates when game starts
audioManager = AudioManager.GetInstance;
}
///<summary> ads value to candle counter</summary>
public void AddCandle(int value)
{
if (value > 0) audioManager.PlaySFX(AudioManager.SFX.PickUp);
candleCounter += value;
UpdateCandleText();
}
/// <summary>
/// checks for candle
/// then deactivates seal and removes a candle
/// </summary>
public void BurnSeal(GameObject other)
{
bool canBurn = candleCounter > 0;
if (canBurn)
{
other.SetActive(false);
AddCandle(-1);
}
}
/// <summary> updates candle text </summary>
private void UpdateCandleText()
{
candleText.text = "Candles: " + candleCounter.ToString();
}
#endregion
}
<file_sep>using UnityEngine;
public class SavesManager : MonoBehaviour
{
public void SetLevel(int activeLevel)
{
// saves level to prefs
PlayerPrefs.SetInt("activeLevel", activeLevel);
Debug.Log("El nivel activo es: " + activeLevel);
}
public void SetCheckpoint(Transform pos)
{
// if player has this prefs and is different from current checkpoint
// it saves pos as a new checkpoint
// if player hasnt (esta bien asi? lol) prefs it saves pos as new checkpoint
if (PlayerPrefs.HasKey("checkpointX") && PlayerPrefs.HasKey("checkpointY"))
{
Vector3 checkpoint = new Vector3(PlayerPrefs.GetFloat("checkpointX"), PlayerPrefs.GetFloat("checkpointY"), 0);
// checks if it is a new checkpoint
if (checkpoint != pos.position)
{
SetCoordinates(pos.position);
}
}
else
{
SetCoordinates(pos.position);
}
}
public void SetScene(string activeScene)
{
DeleteSaves(); // deletes saves for next scene
PlayerPrefs.SetString("scene", activeScene);
}
public void DeleteSaves()
{
// deletes prefs
PlayerPrefs.DeleteKey("activeLevel");
PlayerPrefs.DeleteKey("scene");
PlayerPrefs.DeleteKey("checkpointX");
PlayerPrefs.DeleteKey("checkpointY");
Debug.Log("PlayerPrefs deleted");
}
private void SetCoordinates(Vector3 checkpoint)
{
// this actually saves the prefs (checkpoint)
PlayerPrefs.SetFloat("checkpointX", checkpoint.x);
PlayerPrefs.SetFloat("checkpointY", checkpoint.y);
PlayerPrefs.Save();
Debug.Log("New checkpoint in: " + checkpoint);
}
}
<file_sep>using UnityEngine;
using UnityEngine.Events;
[RequireComponent(typeof(PlayerMovement))]
public class Possessable : MonoBehaviour
{
// no lo pude hacer usando solo OnPlayerEnter
// ya que desactivas un rato al fantasma
// asi que tuve que usar un "isOnRange"
#region Variables
#region Possess
public UnityEvent onPossess;
private bool isGhostOnRange = false;
private bool isPossessing = false;
#endregion
#region Setup
private CinemachineController myCinemachine;
private PlayerMovement movement;
private KeyCode actionButton;
private float gravitiScale;
private Rigidbody2D rb2D;
#endregion
#endregion
#region Methods
private void Start()
{
movement = GetComponent<PlayerMovement>();
rb2D = GetComponent<Rigidbody2D>();
gravitiScale = rb2D.gravityScale;
actionButton = KeybindingsManager.GetInstance.GetJumpButton;
myCinemachine = GameManager.GetInstance.cinemachineController;
GameManager.GetInstance.onDeath += OnDeath;
}
private void Update()
{
if ((isPossessing || isGhostOnRange) && Input.GetKeyDown(actionButton))
{
onPossess.Invoke();
}
}
/// <summary>
/// checks if ghost is on range
/// </summary>
public void GhostOnRange()
{
isGhostOnRange = !isGhostOnRange;
}
/// <summary>
/// manages death if ghost was possessing something
/// else it does nothing
/// </summary>
private void OnDeath(float value)
{
if (isPossessing)
{
onPossess.Invoke();
}
}
/// <summary>
/// possess object
/// </summary>
public void Possessed()
{
movement.enabled = !movement.enabled;
isPossessing = !isPossessing;
rb2D.freezeRotation = movement.enabled; // freezes rotation
// removes gravity or gives it back
if (movement.enabled)
{
rb2D.gravityScale = 0;
ChangeFollow(this.transform);
}
else rb2D.gravityScale = gravitiScale;
}
/// <summary>
/// when you stop possessing moves
/// ghost to object last position
/// </summary>
public void MoveGhost(Transform ghost)
{
// moves ghost to box position
if (!isPossessing)
{
ghost.position = transform.position;
ChangeFollow(ghost);
}
}
/// <summary>
/// summons ChangeFollow from CinemachineController
/// </summary>
void ChangeFollow(Transform objective)
{
if (myCinemachine != null) myCinemachine.ChangeFollow(objective);
}
#endregion
}
<file_sep>using UnityEngine;
using UnityEngine.Events;
public class Lever : MonoBehaviour
{
#region Variables
public UnityEvent onActivation;
public Sprite leverOff;
private SpriteRenderer spriteRenderer;
private KeyCode actionButton;
#endregion
#region Methods
private void Start()
{
actionButton = KeybindingsManager.GetInstance.GetJumpButton;
spriteRenderer = GetComponent<SpriteRenderer>();
}
/// <summary>
/// activates it and disables it so you can't use it again
/// </summary>
public void Activate()
{
if (Input.GetKeyDown(actionButton))
{
spriteRenderer.sprite = leverOff; // changes sprite
this.enabled = false; // disables script
onActivation?.Invoke();
}
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections.Generic;
[RequireComponent(typeof(Rigidbody2D))]
public class PlayerJump : MonoBehaviour
{
#region Enum
enum Animations
{
Idle,
Walking,
Jumping_Rising,
Jumping_Falling
}
#endregion
#region Variables
#region Setup
public float walkingTreshold = 0.05f;
public Animator animator;
public float jumpForce;
private string[] animations = { "Idle", "Walking", "Jumping_Rising", "Jumping_Falling" };
private AudioManager audioManager;
private KeyCode jumpButton;
Queue<KeyCode> inputBuffer;
private Rigidbody2D rb2D;
#endregion
#region VariableJump
private bool releaseJump = false;
private bool isOnGround = false;
private bool startTimer = false;
private float jumpTimer = 0.2f;
private float gravityScale;
private float timer;
#endregion
#region CoyoteTime
public float coyoteFrames = 3f;
public float coyoteTimer;
private Vector3 raycastOffset = new Vector3(0.18f, 0, 0);
private float raycastLenght = 0.7f;
#endregion
#endregion
#region Methods
private void Awake()
{
inputBuffer = new Queue<KeyCode>();
rb2D = GetComponent<Rigidbody2D>();
gravityScale = rb2D.gravityScale;
timer = jumpTimer;
}
private void Start()
{
jumpButton = KeybindingsManager.GetInstance.GetJumpButton;
audioManager = AudioManager.GetInstance;
}
private void Update()
{
ManageAnimations();
if (isGroundColliding())
{
coyoteTimer = 0; // resets coyoteTimer
isOnGround = true;
}
else
{
coyoteTimer += 1; // start adding to coyoteTimer
isOnGround = false;
}
if (Input.GetKeyDown(jumpButton))
{
inputBuffer.Enqueue(jumpButton); // saves space to buffer
Invoke("RemoveAction", 0.1f); // deletes action after 0.1f
}
// dynamic jump
if ((isOnGround || coyoteTimer < coyoteFrames) && inputBuffer.Count > 0)
{
if (inputBuffer.Peek() == jumpButton)
{
// peeks into buffer to check for jumpButton
inputBuffer.Clear(); // clears buffer when you jump to avoid double jump on the same frame
Jump();
}
}
if (Input.GetKeyUp(jumpButton))
{
releaseJump = true;
}
if (startTimer)
{
// stops jump
timer -= Time.deltaTime;
if (timer <= 0)
{
releaseJump = true;
}
}
if (releaseJump)
{
StopJump();
}
}
void ManageAnimations()
{
// no lo probe a fondo por si hay bugs pero mas o menos funciona
if (isOnGround && Mathf.Abs(rb2D.velocity.y) <= walkingTreshold)
{
// la 2da condicion chequea es por si hay una caja o una plataforma mientras salta para que no quede rara la animacion
if (Mathf.Abs(rb2D.velocity.x) >= walkingTreshold)
{
animator.Play("Walking");
audioManager.WalkingSFX(AudioManager.CharacterSFX.Walking);
}
else animator.Play("Idle");
}
else
{
if (startTimer) animator.Play("Jumping_Rising");
else animator.Play("Jumping_Falling");
}
}
private bool isGroundColliding()
{
// checks if player is colliding with floor
// and returns bool
RaycastHit2D rayHit1 = Physics2D.Raycast(transform.position, Vector3.down, raycastLenght);
RaycastHit2D rayHit2 = Physics2D.Raycast(transform.position + raycastOffset, Vector3.down, raycastLenght);
RaycastHit2D rayHit3 = Physics2D.Raycast(transform.position - raycastOffset, Vector3.down, raycastLenght);
bool isCollidingCenter = rayHit1.collider && (rayHit1.collider.CompareTag("Floor") || rayHit1.collider.CompareTag("Box"));
bool isCollidingRight = rayHit2.collider && (rayHit2.collider.gameObject.CompareTag("Floor") || rayHit2.collider.CompareTag("Box"));
bool isCollidingLeft = rayHit3.collider && (rayHit3.collider.gameObject.CompareTag("Floor") || rayHit3.collider.CompareTag("Box"));
Debug.DrawRay(transform.position, Vector3.down * raycastLenght, Color.red);
Debug.DrawRay(transform.position + raycastOffset, Vector3.down * raycastLenght, Color.red);
Debug.DrawRay(transform.position - raycastOffset, Vector3.down * raycastLenght, Color.red);
return isCollidingCenter || isCollidingLeft || isCollidingRight;
}
private void Jump()
{
isOnGround = false;
audioManager.PlayCharSFX(AudioManager.CharacterSFX.Jump);
rb2D.gravityScale = 0;
rb2D.velocity = Vector2.zero;
rb2D.AddForce(Vector2.up * jumpForce);
startTimer = true;
// animator.Play(animations[(int)Animations.Jumping_Rising]);
}
private void StopJump()
{
rb2D.gravityScale = gravityScale;
releaseJump = false;
timer = jumpTimer;
startTimer = false;
// animator.Play(animations[(int)Animations.Jumping_Falling]);
}
private void RemoveAction()
{
if (inputBuffer.Count > 0) inputBuffer.Dequeue();
}
private void OnDisable()
{
rb2D.gravityScale = gravityScale;
if (animator.isActiveAndEnabled) animator.Play("Idle");
}
#endregion
}<file_sep># project-phantom
A short platform/puzzle game
<file_sep>using UnityEngine;
using UnityEngine.SceneManagement;
public class MainMenu : MonoBehaviour
{
private AudioManager audioManager;
/// <summary>
/// calls function in gamemanager
/// for clicking in the button
/// </summary>
private void Start()
{
audioManager = AudioManager.GetInstance;
string currentScene = SceneManager.GetActiveScene().name;
if (currentScene == "MainMenu")
{
// otra condicion puesta a ultimo momento... 😎
audioManager.FadeMusic(AudioManager.BackgroundMusic.MainMenuMusic);
}
}
public void PauseMenu()
{
GameManager.GetInstance.PauseGame();
}
/// <summary>
/// quits game
/// </summary>
public void QuitGame()
{
Application.Quit();
}
}
<file_sep>using UnityEngine;
public class Portal : MonoBehaviour
{
private AudioManager audioManager;
// for managin portal audio
private void Start()
{
audioManager = AudioManager.GetInstance;
}
public void AproachingPortal()
{
audioManager.PlaySFX(AudioManager.SFX.Portal);
}
public void LeavingPortal()
{
audioManager.FadeOutSFX();
}
}
<file_sep>using UnityEngine;
using UnityEngine.SceneManagement;
using TMPro;
public class GameManager : MonoBehaviour
{
#region Variables
#region Pause
public delegate void OnGamePaused(bool paused);
public event OnGamePaused onGamePaused;
private bool gamePaused = false;
private KeyCode pauseButton;
#endregion
#region Prompt
public Vector3 vectorOff = new Vector3(0, 1, 0);
public GameObject prompt;
#endregion
#region Death
public delegate void OnDeath(float duration);
public GameObject deathTransition;
public event OnDeath onDeath;
public float duration = 1.2f;
public float deathAnimExtension = 0.3f;
private AudioManager audioManager;
#endregion
#region Dialogues
[Header("Dialogues")]
public GameObject dialogueBox;
public TMP_Text dialogueText;
public Dialogue[] dialogues;
public TMP_Text keyText;
#endregion
public CinemachineController cinemachineController;
public Transform checkpoint;
private static GameManager instance;
#endregion
#region Methods
void Awake()
{
if (instance != null && instance != this)
{
Destroy(this.gameObject);
}
else
{
instance = this;
}
}
void Start()
{
pauseButton = KeybindingsManager.GetInstance.GetPauseButton;
audioManager = AudioManager.GetInstance;
if (dialogues != null)
{
foreach (Dialogue dialogue in dialogues)
{
dialogue.showDialogue += ShowDialogue;
}
}
}
/// <summary>
/// a delegate, recieves text(dialogue) and a key(to press)
/// and shows it on screen
/// </summary>
public void ShowDialogue(TextAsset text, TextAsset key)
{
dialogueText.text = text.text;
if (key != null) keyText.text = "Press " + key.text;
else keyText.text = "";
dialogueBox.SetActive(true);
}
private void Update()
{
if (Input.GetKeyDown(pauseButton))
{
PauseGame();
}
}
public void PauseGame()
{
// pauses game using delegates
gamePaused = !gamePaused;
if (onGamePaused != null)
{
ManageMenu();
onGamePaused(gamePaused);
}
}
/// <summary>
/// invokes? this delegate
/// also it calls a little animation
/// </summary>
public void PlayerDeath()
{
if (onDeath != null)
{
onDeath(duration);
}
audioManager.PlayCharSFX(AudioManager.CharacterSFX.Death);
DeathScreen(); // starts animation
Invoke("DeathScreen", duration + deathAnimExtension); // stops animation (little delay to avoid camera glitches) magic numbers
}
/// <summary>
/// a little black screen covers the screen
/// is not the best but it works
/// </summary>
private void DeathScreen()
{
deathTransition.SetActive(!deathTransition.activeInHierarchy);
}
/// <summary>
/// couldnt find a better place to make this method
/// and didnt want to create a script for a single method
/// so it is here 😎 (it kinda makes sense)
/// </summary>
public void MovePrompt(Transform objective)
{
prompt.transform.position = objective.transform.position + vectorOff;
prompt.SetActive(!prompt.activeInHierarchy);
}
/// <summary>
/// loads/unloads Game menu scene
/// </summary>
void ManageMenu()
{
if (SceneManager.GetSceneByName("GameMenu").isLoaded)
{
SceneManager.UnloadSceneAsync("GameMenu");
}
else
{
SceneManager.LoadSceneAsync("GameMenu", LoadSceneMode.Additive);
}
}
void OnDestroy()
{
if (instance != this)
{
instance = null;
}
}
// set & get
public static GameManager GetInstance
{
get { return instance; }
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections;
[RequireComponent(typeof(Rigidbody2D))]
public class PlayerMovement : MonoBehaviour
{
#region Variables
#region Setup
public SpriteRenderer spriteRenderer;
public float movementSpeed;
public bool isPlayable;
public bool isGhost;
private AudioManager audioManager;
private CapsuleCollider2D col2D;
private Rigidbody2D rb2D;
#endregion
#region Movement
private bool stopMovement = false;
private Vector2 playerMomentum;
private float hMovement;
private float vMovement;
#endregion
#endregion
#region Methods
private void Awake()
{
rb2D = GetComponent<Rigidbody2D>();
spriteRenderer = spriteRenderer.GetComponent<SpriteRenderer>();
if (isPlayable)
{
col2D = GetComponent<CapsuleCollider2D>();
}
}
private void Start()
{
audioManager = AudioManager.GetInstance;
GameManager.GetInstance.onGamePaused += PauseResume;
GameManager.GetInstance.onDeath += OnDeath;
}
private void Update()
{
if (stopMovement) return;
hMovement = Input.GetAxisRaw("Horizontal");
FlipSprite();
if (isGhost)
{
// if character is ghost you can move vertically
vMovement = Input.GetAxisRaw("Vertical");
if (hMovement != 0 || vMovement != 0)
{
audioManager.WalkingSFX(AudioManager.CharacterSFX.GhostFloating);
}
}
}
private void FixedUpdate()
{
if (stopMovement) return;
// separated to avoid errors (player cant move vertically)
if (isGhost)
{
GhostMovement();
}
else
{
HumanMovement();
}
}
/// <summary>
/// manages death animation for playable characters
/// </summary>
public void OnDeath(float duration)
{
if (isPlayable && this.enabled)
{
StartCoroutine(PlayerDeath(duration)); // starts corrutine for death animation
}
}
/// <summary>
/// little animation for human and ghost
/// </summary>
IEnumerator PlayerDeath(float duration)
{
AnimationSetup();
yield return new WaitForSeconds(duration);
AnimationSetup();
}
/// <summary>
/// setup for the animation
/// </summary>
void AnimationSetup()
{
stopMovement = !stopMovement;
col2D.enabled = !col2D.enabled;
if (stopMovement)
{
rb2D.constraints = RigidbodyConstraints2D.FreezePositionX | RigidbodyConstraints2D.FreezeRotation;
// characters little animation (no need to separate this in a diff method)
if (isGhost) spriteRenderer.color = new Color(1, 1, 1, 0.2f);
else rb2D.AddForce(Vector2.up * 600);
}
else
{
rb2D.velocity = Vector3.zero;
rb2D.constraints = RigidbodyConstraints2D.FreezeRotation;
if (isGhost) spriteRenderer.color = Color.white; // removes? ghost transparency
}
}
void FlipSprite()
{
// flips sprite depending on direction
if (hMovement < 0)
{
spriteRenderer.flipX = true;
}
else if (hMovement > 0)
{
spriteRenderer.flipX = false;
}
}
void GhostMovement()
{
rb2D.velocity = new Vector2(hMovement, vMovement) * movementSpeed;
}
void HumanMovement()
{
rb2D.velocity = new Vector2(hMovement * movementSpeed, rb2D.velocity.y);
}
/// <summary>
/// stores player momentum before pause
/// and gives it to player after pause
/// </summary>
public void PauseResume(bool gamePaused)
{
if (gamePaused)
{
playerMomentum = rb2D.velocity;
rb2D.bodyType = RigidbodyType2D.Static;
}
else
{
rb2D.bodyType = RigidbodyType2D.Dynamic;
rb2D.velocity = playerMomentum;
}
stopMovement = gamePaused;
}
/// <summary>
/// stops ghost velocity
/// mostly used when changing characters
/// </summary>
void StopGhost()
{
rb2D.velocity = Vector2.zero;
}
private void OnDisable()
{
// stops ghost momentum when switching
if (isGhost)
{
StopGhost();
}
}
private void OnDestroy()
{
GameManager.GetInstance.onGamePaused -= PauseResume;
GameManager.GetInstance.onDeath -= OnDeath;
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections.Generic;
public class Dialogue : MonoBehaviour
{
public delegate void ShowDialogue(TextAsset text, TextAsset key);
public ShowDialogue showDialogue;
public ShowDialogue showKey;
public TextAsset key;
public TextAsset text;
public void ActivateDialogue()
{
showDialogue(text, key);
}
}
<file_sep>using UnityEngine;
public class Curtains : MonoBehaviour
{
#region Variables
public bool curtainState = false;
public Animator transition;
#endregion
#region Methods
/// <summary>
/// triggers transition
/// </summary>
public void OnGhostTrigger()
{
curtainState = !curtainState;
transition.SetBool("isGhostIn", curtainState);
}
#endregion
}
<file_sep>using UnityEngine;
using UnityEngine.Events;
using System.Collections;
public class Tutorial : MonoBehaviour
{
private int currentStep = 0;
public GameObject[] disableOnTutorial;
public UnityEvent[] tutorialEvents;
private KeyCode[] keyCode = new KeyCode[2]; // 0 = change char , 1 = jump button
private void Start()
{
tutorialEvents[0]?.Invoke();
keyCode[0] = KeybindingsManager.GetInstance.GetChangeCharactersButton;
keyCode[1] = KeybindingsManager.GetInstance.GetJumpButton;
}
public void PlayEvent(int step)
{
currentStep = step;
tutorialEvents[step]?.Invoke();
}
public void DisableOnTutorial()
{
foreach (GameObject disable in disableOnTutorial)
{
disable.SetActive(false);
}
}
public void WaitForKeyPress(int value)
{
KeyCode key = keyCode[value];
StartCoroutine(WaitingForKeyPress(key));
}
IEnumerator WaitingForKeyPress(KeyCode key)
{
while (!Input.GetKeyDown(key))
{
yield return null;
}
PlayEvent(currentStep + 1);
}
}
<file_sep>using UnityEngine;
public class GhostLimit : MonoBehaviour
{
// seguro se puede hacer un poco mas prolijo pero a estas alturas ya ni ganas 😂
public bool isHorizontal;
public float displacement = 0.2f;
public Vector3 vMovement = new Vector3(0, 0.2f, 0);
public Vector3 hMovement = new Vector3(0.2f, 0, 0);
public void Limit(Transform other)
{
if (isHorizontal)
{
if (other.position.y < transform.position.y)
{
other.position = other.position - vMovement;
}
else other.position = other.position + vMovement;
}
else
{
if (other.position.x < transform.position.x)
{
other.position = other.position - hMovement;
}
else other.position = other.position + hMovement;
}
}
}
<file_sep>using UnityEngine;
public class HorizontalMovement : MonoBehaviour
{
#region Variables
public int direction = 1;
public float movementSpeed = 5f;
private AudioManager audioManager;
private bool stopMovement = false;
#endregion
#region Methods
private void Start()
{
audioManager = AudioManager.GetInstance;
GameManager.GetInstance.onGamePaused += PauseResume;
}
// aca antes era con un pingpong pero al usar Time.time daba muchos problemas
// asi que en los ultimos dias lo cambie a como esta ahora, por eso esta un poco desprolijo
private void Update()
{
if (stopMovement) return;
transform.position += direction * Vector3.right * Time.deltaTime * movementSpeed;
}
public void ChangeDirection()
{
direction *= -1;
}
private void PauseResume(bool gamePaused)
{
stopMovement = gamePaused;
}
public void ApproachingZombie()
{
audioManager.PlaySFX(AudioManager.SFX.Zombie);
}
public void LeavingZombie()
{
audioManager.FadeOutSFX();
}
private void OnDestroy()
{
GameManager.GetInstance.onGamePaused -= PauseResume;
}
#endregion
}
<file_sep>using UnityEngine;
using Cinemachine;
public class CinemachineController : MonoBehaviour
{
public CinemachineVirtualCamera myCinemachine;
public void ChangeFollow(Transform objective)
{
myCinemachine.m_Follow = objective;
}
}
<file_sep>using UnityEngine;
using UnityEngine.Audio;
using System.Collections;
public class AudioManager : MonoBehaviour
{
#region Enum
public enum BackgroundMusic
{
HumanMusic,
GhostMusic,
MainMenuMusic,
GameOver
}
public enum CharacterSFX
{
Jump,
Walking,
Death,
GhostFloating
}
public enum SFX
{
BridgeStart,
BridgeStop,
Zombie,
PickUp,
Portal
}
#endregion
#region Variables
#region Audio
public AudioSource characterSFXSource;
public AudioClip[] CharacterSFXClips;
public AudioSource musicSource;
public AudioClip[] musicClips;
public AudioSource SFXSource;
public AudioClip[] SFXClips;
public AudioMixer mixer;
float volumeRange = 0.1f;
float pitchRange = 0.2f;
float initialVolume;
#endregion
private static AudioManager instance;
#endregion
#region Methods
private void Awake()
{
if (instance != null && instance != this)
{
Destroy(this.gameObject);
}
else
{
instance = this;
}
DontDestroyOnLoad(gameObject);
}
private void Start()
{
// when starting it will awlays play this music
// because you will always start with the human player
mixer.GetFloat("SFXVolume", out initialVolume);
}
/// <summary>
/// checks from a list to play music from
/// </summary>
public void PlayMusic(BackgroundMusic musicClip)
{
switch (musicClip)
{
case BackgroundMusic.HumanMusic:
musicSource.clip = musicClips[(int)BackgroundMusic.HumanMusic];
break;
case BackgroundMusic.GhostMusic:
musicSource.clip = musicClips[(int)BackgroundMusic.GhostMusic];
break;
case BackgroundMusic.MainMenuMusic:
musicSource.clip = musicClips[(int)BackgroundMusic.MainMenuMusic];
break;
case BackgroundMusic.GameOver:
musicSource.clip = musicClips[(int)BackgroundMusic.GameOver];
break;
}
musicSource.Play();
}
/// <summary>
/// fades out current music and plays the next one
/// </summary>
public void FadeMusic(BackgroundMusic musicClip)
{
StopAllCoroutines(); // stops fade in/out, it helps when spamming this
StartCoroutine(FadeOut(musicClip));
}
/// <summary>
/// smoothly lowers music
/// </summary>
IEnumerator FadeOut(BackgroundMusic musicClip)
{
float duration = 0.01f;
float lowerStep = 2f;
int lowestVolume = -34;
float musicVolume;
mixer.GetFloat("MusicVolume", out musicVolume);
while (musicVolume > lowestVolume)
{
mixer.SetFloat("MusicVolume", musicVolume -= lowerStep);
yield return new WaitForSeconds(duration);
}
PlayMusic(musicClip);
StartCoroutine(FadeIn());
}
/// <summary>
/// smoothly turns music up
/// </summary>
IEnumerator FadeIn()
{
float duration = 0.05f;
float upStep = 1f;
int highestVolume = -28;
float musicVolume;
mixer.GetFloat("MusicVolume", out musicVolume);
while (musicVolume < highestVolume)
{
mixer.SetFloat("MusicVolume", musicVolume += upStep);
yield return new WaitForSeconds(duration);
}
}
/// <summary>
/// plays a sfx
/// </summary>
public void PlayCharSFX(CharacterSFX sfxClip)
{
switch (sfxClip)
{
case CharacterSFX.Jump:
characterSFXSource.clip = CharacterSFXClips[(int)CharacterSFX.Jump];
break;
case CharacterSFX.Death:
characterSFXSource.clip = CharacterSFXClips[(int)CharacterSFX.Death];
break;
}
float startingVolume = 0.5f;
float startingPitch = 0.9f;
RandomizeSound(startingVolume, startingPitch);
}
/// <summary>
/// checks if this is source is not playing
/// and plays the walking sound
/// </summary>
public void WalkingSFX(CharacterSFX sfxClip)
{
if (!characterSFXSource.isPlaying)
{
float startingVolume = 0.4f;
float startingPitch = 0.9f;
switch (sfxClip)
{
case CharacterSFX.Walking:
characterSFXSource.clip = CharacterSFXClips[(int)CharacterSFX.Walking];
break;
case CharacterSFX.GhostFloating:
characterSFXSource.clip = CharacterSFXClips[(int)CharacterSFX.GhostFloating];
break;
}
RandomizeSound(startingVolume, startingPitch);
}
}
/// <summary>
/// Plays sounds on SFX Source
/// </summary>
public void PlaySFX(SFX SFXClip)
{
switch (SFXClip)
{
case SFX.BridgeStart:
SFXSource.clip = SFXClips[(int)SFX.BridgeStart];
break;
case SFX.BridgeStop:
SFXSource.clip = SFXClips[(int)SFX.BridgeStop];
break;
case SFX.Zombie:
SFXSource.clip = SFXClips[(int)SFX.Zombie];
float zombieVol = -10; // zombie sfx es re fuerte, aca lo bajamos un poco
mixer.SetFloat("SFXVolume", zombieVol);
SFXSource.loop = true;
break;
case SFX.PickUp:
SFXSource.clip = SFXClips[(int)SFX.PickUp];
break;
case SFX.Portal:
SFXSource.clip = SFXClips[(int)SFX.Portal];
float portalVol = -15; // portal sfx es re fuerte, aca lo bajamos un poco
mixer.SetFloat("SFXVolume", portalVol);
SFXSource.loop = true;
// esta repetido con lo de arriba pero por poco tiempo se queda asi
break;
}
SFXSource.Play();
}
/// <summary>
/// Fade out for sfx
/// </summary>
public void FadeOutSFX()
{
StopAllCoroutines();
StartCoroutine(FadingOutSFX());
}
/// <summary>
/// coroutine for fadeoutsfx
/// and stops looping
/// </summary>
IEnumerator FadingOutSFX()
{
float duration = 0.01f;
float lowerStep = 2f;
int lowestVolume = -30;
float sfxVolume;
mixer.GetFloat("SFXVolume", out sfxVolume);
while (sfxVolume > lowestVolume)
{
mixer.SetFloat("SFXVolume", sfxVolume -= lowerStep);
yield return new WaitForSeconds(duration);
}
//resets sfx parameters
SFXSource.loop = false;
mixer.SetFloat("SFXVolume", initialVolume);
SFXSource.Pause();
}
/// <summary>
/// varies volume and pitch of a sound to make it
/// less monotonous
/// </summary>
void RandomizeSound(float volume, float pitch)
{
characterSFXSource.volume = GetRandom(volume, volumeRange);
characterSFXSource.pitch = GetRandom(pitch, pitchRange);
characterSFXSource.Play();
}
/// <summary>
/// Random.Range a little smaller to make it easier to read
/// </summary>
float GetRandom(float value, float range)
{
return Random.Range(value - range, value + range);
}
public static AudioManager GetInstance
{
get { return instance; }
}
#endregion
}
<file_sep>using UnityEngine;
public class MoveObject : MonoBehaviour
{
#region Variables
public bool isMoving = false;
public Transform objective;
public Transform finalPos;
public bool playMovSound;
public int speed = 5;
AudioManager audioManager;
#endregion
#region Methods
private void Start()
{
audioManager = AudioManager.GetInstance;
}
private void Update()
{
if (isMoving)
{
MoveObjective();
}
}
/// <summary>
/// changes isMoving state
/// </summary>
public void IsMoving()
{
isMoving = !isMoving;
if (playMovSound) audioManager.PlaySFX(AudioManager.SFX.BridgeStart);
}
/// <summary>
/// moves objective to final pos
/// </summary>
public void MoveObjective()
{
Vector3 vector3 = Vector3.MoveTowards(objective.position, finalPos.position, speed * Time.deltaTime);
objective.position = vector3;
// stops excecuting if objective is on place
if (objective.position == finalPos.position)
{
isMoving = false;
if (playMovSound) audioManager.PlaySFX(AudioManager.SFX.BridgeStop);
}
}
/// <summary>
/// instantly moves objective to final position
/// </summary>
public void InstantMove()
{
objective.position = finalPos.position;
}
public void ForceStopSFX()
{
audioManager.SFXSource.Stop();
}
#endregion
}
<file_sep>using UnityEngine;
using UnityEngine.Events;
public class OnPlayerEnter : MonoBehaviour
{
#region Variables
public UnityEvent onPlayerInside;
public UnityEvent onPlayerEnter;
public UnityEvent onPlayerExit;
public UnityEvent onGhostEnter;
public UnityEvent onGhostExit;
private bool isPlayerInside = false;
#endregion
#region Methods
public void PlayerInside()
{
isPlayerInside = !isPlayerInside;
}
private void Update()
{
if (isPlayerInside)
{
onPlayerInside?.Invoke();
}
}
private void OnTriggerEnter2D(Collider2D other)
{
if (IsHuman(other.gameObject))
{
onPlayerEnter?.Invoke();
}
else if (IsGhost(other.gameObject))
{
onGhostEnter?.Invoke();
}
}
private void OnTriggerExit2D(Collider2D other)
{
if (IsHuman(other.gameObject))
{
onPlayerExit?.Invoke();
}
else if (IsGhost(other.gameObject))
{
onGhostExit?.Invoke();
}
}
private void OnCollisionEnter2D(Collision2D other)
{
if (IsHuman(other.gameObject))
{
onPlayerEnter?.Invoke();
}
}
private void OnCollisionExit2D(Collision2D other)
{
if (IsHuman(other.gameObject))
{
onPlayerExit?.Invoke();
}
}
private bool IsHuman(GameObject other)
{
return other.CompareTag("HumanPlayer");
}
private bool IsGhost(GameObject other)
{
return other.CompareTag("GhostPlayer");
}
#endregion
}
<file_sep>using UnityEngine;
public class KeybindingsManager : MonoBehaviour
{
// manages keybindings
#region Variables
#region KeyCodes
private KeyCode changeCharactersButton = KeyCode.LeftShift;
private KeyCode pauseButton = KeyCode.Escape;
private KeyCode jumpButton = KeyCode.Space;
#endregion
private static KeybindingsManager instance;
#endregion
#region Methods
void Awake()
{
if (instance != null && instance != this)
{
Destroy(this.gameObject);
}
else
{
instance = this;
}
}
void OnDestroy()
{
if (instance == this)
{
instance = null;
}
}
// set & get
public static KeybindingsManager GetInstance
{
get { return instance; }
}
public KeyCode GetJumpButton
{
get { return jumpButton; }
}
public KeyCode GetChangeCharactersButton
{
get { return changeCharactersButton; }
}
public KeyCode GetPauseButton
{
get { return pauseButton; }
}
#endregion
}<file_sep>using UnityEngine;
using UnityEngine.SceneManagement;
public class SceneManagement : MonoBehaviour
{
private AudioManager audioManager;
/// <summary>
/// loads scene
/// </summary>
private void Start()
{
audioManager = AudioManager.GetInstance;
}
public void LoadScene(string value)
{
if (value == "GameOver") audioManager.FadeMusic(AudioManager.BackgroundMusic.GameOver); // estas ultimas cosas son tan shady pero bue 😎
SceneManager.LoadScene(value);
}
/// <summary>
/// checks if player has a save
/// if not starts from level 0
/// </summary>
public void PlayGame()
{
string value = "level_000"; // default : first level
if (PlayerPrefs.HasKey("scene"))
{
value = PlayerPrefs.GetString("scene");
}
audioManager.FadeMusic(AudioManager.BackgroundMusic.HumanMusic);
LoadScene(value);
}
}
<file_sep>using UnityEngine;
public class CheckpointManager : MonoBehaviour
{
private Transform checkpoint;
#region Methods
private void Start()
{
checkpoint = GameManager.GetInstance.checkpoint;
if (PlayerPrefs.HasKey("checkpointX") && PlayerPrefs.HasKey("checkpointY"))
{
// sets initial position for checkpoint if player has prefs
Vector3 initialPosition;
initialPosition = new Vector3(PlayerPrefs.GetFloat("checkpointX"), PlayerPrefs.GetFloat("checkpointY"), checkpoint.position.z);
checkpoint.position = initialPosition;
}
}
public void MoveCheckpoint(Transform destination)
{
// moves checkpoint
checkpoint.position = destination.position;
}
#endregion
}
<file_sep>using UnityEngine;
using UnityEngine.Events;
public class OnZombieEnter : MonoBehaviour
{
// lo hice en un script aparte al OnPlayerEnter para separarlo
// pero se puede hacer en ese script tambien
public UnityEvent onZombieEnter;
private void OnTriggerEnter2D(Collider2D other)
{
if (other.gameObject.CompareTag("Zombie"))
{
onZombieEnter?.Invoke();
}
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class PlayerManager : MonoBehaviour
{
#region Enumerators
public enum Players
{
HumanPlayer = 0,
GhostPlayer = 1
}
#endregion
#region Variables
#region Setup
public bool onTutorial = false;
private CinemachineController myCinemachine;
private KeyCode changeCharactersButton;
private KeyCode actionButton;
#endregion
#region PlayerSetup
public PlayerMovement[] players = new PlayerMovement[2];
public Transform[] playersPosition = new Transform[2];
public SpriteRenderer ghostSprite;
public Collider2D ghostHitbox;
public float followSpeed = 5f;
public PlayerJump humanJump;
public GameObject ghost; // for possessing objects
private Vector2 followHumanOffset = new Vector2(1, 0.5f);
private bool isCurrentHuman = true;
#endregion
private AudioManager audioManager;
private Transform checkpoint;
#endregion
#region Methods
private void Start()
{
changeCharactersButton = KeybindingsManager.GetInstance.GetChangeCharactersButton;
actionButton = KeybindingsManager.GetInstance.GetJumpButton;
myCinemachine = GameManager.GetInstance.cinemachineController;
checkpoint = GameManager.GetInstance.checkpoint;
audioManager = AudioManager.GetInstance;
if (!onTutorial) MovePlayers(checkpoint); // sets initial position
GameManager.GetInstance.onDeath += OnDeath;
}
private void Update()
{
if (Input.GetKeyDown(changeCharactersButton))
{
ChangePlayerControl();
}
}
private void LateUpdate()
{
if (isCurrentHuman)
{
FollowHumanPlayer();
}
}
public void MovePlayers(Transform destination)
{
// if you go through a door while possessing this makes the ghost move too
// because it was inactive while possessing
if (!ghost.activeSelf)
{
DisableGhost();
}
// moves players back to destination (usually checkpoint)
foreach (Transform player in playersPosition)
{
player.position = destination.position;
}
}
/// <summary>
/// resets player after a while
/// on hit
/// </summary>
public void OnDeath(float duration)
{
StartCoroutine(DeathTransition(duration));
}
/// <summary>
/// corrutine for OnDeath();
/// </summary>
IEnumerator DeathTransition(float duration)
{
yield return new WaitForSeconds(duration);
MovePlayers(checkpoint); // moves player to checkpoint
}
public void DisableGhost()
{
// activates or desactivates ghost
// for possessing (most of the time)
ghost.SetActive(!ghost.activeSelf);
}
public void ChangePlayerControl()
{
// stops/starts movement script in player
// and changes isCurrentHuman
// checks if ghost is active in case you are
// possessing
if (ghost.activeInHierarchy)
{
foreach (PlayerMovement player in players)
{
player.enabled = !player.enabled; // stops movement script
// changes follow in cinemachine if cinemachine is avalible
if (player.enabled && (myCinemachine != null))
{
myCinemachine.ChangeFollow(player.transform);
}
}
humanJump.enabled = !humanJump.enabled; // stops jumping script
isCurrentHuman = !isCurrentHuman;
ChangeMusic();
// enables/disables hitbox in case ghost was inside
// a box (or something else) when changing controll to it
ghostHitbox.enabled = !isCurrentHuman;
}
}
void ChangeMusic()
{
if (isCurrentHuman) audioManager.FadeMusic(AudioManager.BackgroundMusic.HumanMusic);
else audioManager.FadeMusic(AudioManager.BackgroundMusic.GhostMusic);
}
void FollowHumanPlayer()
{
/* makes ghost follow player with a little delay */
Transform humanPosition = playersPosition[(int)Players.HumanPlayer];
Transform ghostPosition = playersPosition[(int)Players.GhostPlayer];
float t = Time.deltaTime * followSpeed;
int relativeDirection = 1; // no supe que otro nombre ponerle pero creo que este sirve
if (humanPosition.position.x >= ghostPosition.position.x)
{
relativeDirection *= -1;
ghostSprite.flipX = false;
}
else ghostSprite.flipX = true;
float xLerp = Mathf.Lerp(ghostPosition.position.x, humanPosition.position.x + (relativeDirection * followHumanOffset.x), t);
float yLerp = Mathf.Lerp(ghostPosition.position.y, humanPosition.position.y + followHumanOffset.y, t);
ghostPosition.position = new Vector3(xLerp, yLerp, ghostPosition.position.z);
}
private void OnDestroy()
{
GameManager.GetInstance.onDeath -= OnDeath;
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections;
public class LevelManager : MonoBehaviour
{
#region Variables
public CameraManager cameraManager;
public GameObject[] levelList;
#endregion
#region Methods
private void Start()
{
int initialLevel = 0;
if (PlayerPrefs.HasKey("activeLevel"))
{
initialLevel = PlayerPrefs.GetInt("activeLevel");
cameraManager.SetCamera(levelList[initialLevel].transform); // move camera to level pos
}
LoadLevel(initialLevel);
}
public void LoadLevel(int nextLevel)
{
// loads level
levelList[nextLevel].SetActive(true);
}
public void UnloadLevel()
{
// unloads previous level
// esto funciona mas generico, si una puerta secreta
// te hace avanzar al final del juego podrias usarlo
// a diferencia de como lo teniamos antes que
// solo descargaba el activeLevel - 1
int previousLevel = PlayerPrefs.GetInt("activeLevel");
StartCoroutine(UnloadingLevel(previousLevel));
}
IEnumerator UnloadingLevel(int previousLevel)
{
// waits a while before unloading previous level
yield return new WaitForSeconds(3f);
levelList[previousLevel].SetActive(false);
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections;
public class CameraManager : MonoBehaviour
{
#region Variables
public int transitionSpeed = 20;
public Transform cameraPos;
#endregion
#region Methods
public void SetCamera(Transform destination)
{
// instantly moves camera
cameraPos.position = SetCameraPosition(destination.position);
}
public void MoveCamera(Transform nextPosition)
{
// smoothly moves camera
StartCoroutine(MovingCamera(nextPosition));
}
IEnumerator MovingCamera(Transform nextPosition)
{
// creates objective for camera
Vector3 destination = SetCameraPosition(nextPosition.position);
while (IsCameraOnPlace(destination))
{
// moves camera if its position is different from destination
cameraPos.position = Vector3.MoveTowards(cameraPos.position, destination, transitionSpeed * Time.deltaTime);
yield return null;
}
}
Vector3 SetCameraPosition(Vector3 position)
{
return new Vector3(position.x, position.y, cameraPos.position.z);
}
private bool IsCameraOnPlace(Vector3 destination)
{
// false while camera is moving
return cameraPos.position.x != destination.x || cameraPos.position.y != destination.y;
}
#endregion
}
<file_sep>using UnityEngine;
using System.Collections;
[RequireComponent(typeof(BoxCollider2D))]
public class Platforms : MonoBehaviour
{
#region Variables
public PlayerMovement playerMovement; // temp fix
public float duration = 0.3f;
private bool isPlayerOn = false;
private BoxCollider2D col2d;
#endregion
#region Methods
private void Awake()
{
col2d = GetComponent<BoxCollider2D>();
}
private void Update()
{
// last part is a temp fix may fix later
if (isPlayerOn && Input.GetKeyDown(KeyCode.S) && playerMovement.isActiveAndEnabled)
{
StartCoroutine(DisableCollider());
}
}
public void PlayerIsOn()
{
isPlayerOn = !isPlayerOn;
}
/// <summary>
/// disables collider to jump down
/// </summary>
IEnumerator DisableCollider()
{
col2d.enabled = false;
yield return new WaitForSeconds(0.3f);
col2d.enabled = true;
}
#endregion
}
| 61f12b1daedc2b0b07a6fd4ccdc3d598d58a6023 | [
"Markdown",
"C#"
] | 28 | C# | rmaha14/unity-project-phantom | c57611385594728aa95d20400049f49ff0ff86d8 | e4d66017262fb41379f0ac7e27f80c6b1f9830fe |
refs/heads/master | <file_sep>package com.company;
public class EqualsAndHCode {
public static void equalsAndHC() {
Object object = new Object();
int hCode;
hCode = object.hashCode();
System.out.println(hCode);
}
}
<file_sep>package com.company;
public class ExceptionTest {
public static void test() throws Exception {
try {
throw new Exception("A");
} finally {
throw new Exception("B");
}
}
public static void ExceptionTest(String[] args) {
try {
ExceptionTest.test();
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
}
| 9df1c17903c339cf94fe535207bafda9903282d2 | [
"Java"
] | 2 | Java | IvanIschenko/StudyProjectForMyStudents | 1e2c3f1d33a688985269abec6735bba6fd62ea66 | 109a2d3807531f5c2a460294e929ab49c2d395b9 |
refs/heads/main | <file_sep>import { render } from 'react-dom'
import { configureStore, getDefaultMiddleware } from '@reduxjs/toolkit'
import { createEpicMiddleware } from 'redux-observable'
import { Provider } from 'react-redux'
import suggestionSlice from './suggestionSlice'
import rootEpics from './suggestionEpic'
import App from './App'
const epicMiddleware = createEpicMiddleware()
const store = configureStore({
reducer: {
suggestions: suggestionSlice
},
middleware: (getDefaultMiddleware) =>
getDefaultMiddleware().concat(epicMiddleware)
})
epicMiddleware.run(rootEpics)
const Root = () => (
<Provider store={store}>
<App />
</Provider>
)
render(<Root />, document.getElementById('root'))
<file_sep>import { useEffect, useRef, useState } from 'react'
import { useDispatch } from 'react-redux'
export default function useSearch(keyword) {
const [search, setSearch] = useState(keyword)
const firstRender = useRef(true)
const dispatch = useDispatch()
useEffect(() => {
if (firstRender.current) {
firstRender.current = false
return
}
dispatch({
type: 'suggestion/search/request',
payload: search
})
}, [search, dispatch])
return [search, setSearch]
}
| 010cd9aab1b7d7e99158058cb7f61378dbe15a42 | [
"JavaScript"
] | 2 | JavaScript | tuandungbrse/redux-observable-search | b2580679b9a08283b159dfac1d64facb828884d6 | dbd28f8398b9635b4deb5b4c42b27848834a4976 |
refs/heads/main | <file_sep># LibraryApp
This app is a training app from "learn.codewithchris.com".
Remade it myself for pracicing Swift.
<file_sep>//
// BookContentView.swift
// LibraryApp
//
// Created by JH on 2021-08-07.
//
import SwiftUI
struct BookContentView: View {
@EnvironmentObject var model: BookModel
@State private var page = 0
var book: Book
var body: some View {
TabView(selection: $page) {
ForEach(book.content.indices) { index in
VStack(alignment: .center) {
Text(book.content[index])
.tag(index)
Spacer()
Text("\(page + 1)")
}
}
.padding()
}
.tabViewStyle(PageTabViewStyle())
.indexViewStyle(PageIndexViewStyle(backgroundDisplayMode: .never))
.onChange(of: page, perform: { value in
model.updatePage(forId: book.id, page: page)
})
.onAppear {
//page = book.currentPage
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
page = book.currentPage
}
}
}
}
struct BookContentView_Previews: PreviewProvider {
static var previews: some View {
BookContentView(book: Book())
.environmentObject(BookModel())
}
}
<file_sep>//
// BookListView.swift
// LibraryApp
//
// Created by JH on 2021-08-07.
//
import SwiftUI
struct BookListView: View {
@EnvironmentObject var model: BookModel
var body: some View {
NavigationView {
ScrollView {
LazyVStack(alignment: .leading, spacing: 30) {
ForEach(model.books) { book in
NavigationLink(destination: BookDetailView(book: book)) {
BookPreview(book: book)
.padding([.leading, .trailing], 20)
}
// The NavigationLink is a known workaround for an issue identified in iOS 14.5 where the navigation link pops the view as per these forum threads:
// https://developer.apple.com/forums/thread/677333
// https://forums.swift.org/t/14-5-beta3-navigationlink-unexpected-pop/45279
NavigationLink(destination: EmptyView()) {
EmptyView()
}
}
}
.padding(.top)
}
.navigationTitle("My Library")
}
}
}
struct BookListView_Previews: PreviewProvider {
static var previews: some View {
BookListView()
.environmentObject(BookModel())
}
}
<file_sep>//
// LibraryApp.swift
// LibraryApp
//
// Created by JH on 2021-08-07.
//
import SwiftUI
@main
struct LibraryApp: App {
var body: some Scene {
WindowGroup {
BookListView()
.environmentObject(BookModel())
}
}
}
<file_sep>//
// BookModel.swift
// LibraryApp
//
// Created by JH on 2021-08-07.
//
import Foundation
class BookModel : ObservableObject {
@Published var books = [Book]()
init() {
self.books = getLocalJson()
}
/// Retrieve all record data form a local JSON file with name `fileName` and extension `fileExtension`..
func getLocalJson(_ fileName: String = "Data", fileExtension: String = "json") -> [Book] {
var books = [Book]()
// Get link to data file
let url = Bundle.main.url(forResource: fileName, withExtension: fileExtension)
guard url != nil else {
print("Could not retrieve category data: \(fileName).\(fileExtension) not found.")
return books
}
do {
// Decode the data and return it
let data = try Data(contentsOf: url!)
books = try JSONDecoder().decode([Book].self, from: data)
return books
} catch {
print("Error retrieving category data: \(error.localizedDescription)")
}
return books
}
/// Update the specified book's rating. Does nothing if `forId` is invalid.
func updateRating(forId: Int, rating: Int) {
if let index = books.firstIndex(where: { $0.id == forId }) {
books[index].rating = rating
}
}
/// Update the specified book's favourite status. Does nothing if `forId` is invalid.
func updateFavourite(forId: Int) {
if let index = books.firstIndex(where: { $0.id == forId }) {
books[index].isFavourite.toggle()
}
}
/// Update the specified book's current page. Does nothing if `forId` is invalid.
func updatePage(forId: Int, page: Int) {
if let index = books.firstIndex(where: { $0.id == forId }) {
books[index].currentPage = page
}
}
}
<file_sep>//
// BookDetailView.swift
// LibraryApp
//
// Created by JH on 2021-08-07.
//
import SwiftUI
struct BookDetailView: View {
@EnvironmentObject var model: BookModel
@State private var rating = 2
var book: Book
var body: some View {
VStack(spacing: 20) {
NavigationLink(destination: BookContentView(book: book)) {
VStack {
Text("Read Now!")
.font(.title)
.accentColor(.black)
Image("cover\(book.id)")
.resizable()
.scaledToFit()
}
}
.padding()
Text("Mark for later!")
.font(.headline)
Button(action: { model.updateFavourite(forId: book.id) }) {
Image(systemName: book.isFavourite ? "star.fill" : "star")
.resizable()
.frame(width: 28, height: 28)
.foregroundColor(.yellow)
}
Text("Rate \(book.title)")
.font(.headline)
Picker("Rate this book!", selection: $rating) {
ForEach(1..<6) { index in
Text("\(index)")
.tag(index)
}
}
.pickerStyle(SegmentedPickerStyle())
.padding([.leading, .trailing, .bottom], 60)
.onChange(of: rating, perform: { value in
model.updateRating(forId: book.id, rating: rating)
})
}
.onAppear { rating = book.rating }
.navigationBarTitle("\(book.title)")
}
}
struct BookDetailView_Previews: PreviewProvider {
static var previews: some View {
BookDetailView(book:Book())
.environmentObject(BookModel())
}
}
| ebc3f7c95a4407bef7cebc51f32cac0f6d6a827a | [
"Markdown",
"Swift"
] | 6 | Markdown | jc1155/LibraryApp | 1a192c8d882c83bc8830b21caabde702a6e93d25 | b64b5ea61c22d87330e65a1bdc5d7da1cb018b63 |
refs/heads/master | <repo_name>kushagrasharma/cell-slide-analysis<file_sep>/get_puncta_stats.py
#@String directory
from ij import IJ, WindowManager
from ij.plugin import Duplicator, Thresholder
from loci.plugins import BF
import os
def get_image_paths():
files = os.listdir(directory)
files = [f.strip() for f in files]
files = [f for f in files if f[-4:]=='.oib']
return files
def run_analysis(img_name):
path = directory + img_name
results_dir = directory + "Results/"
if not os.path.exists(results_dir):
os.makedirs(results_dir)
img = BF.openImagePlus(path)[0]
imgDup = img.duplicate()
#imgDup = img.Duplicator()
imgDup.show()
IJ.setAutoThreshold(imgDup, "Default dark no-reset")
# IJ.run(imgDup, "Threshold...", "")
# IJ.run(imgDup, "Convert to Mask", "method=Default background=Dark calculate black")
# IJ.run(imgDup, run("Close-", "stack"))
IJ.run(imgDup, "Set Measurements...", "area mean min display decimal=3")
IJ.run("Analyze Particles...", "size=2-Infinity circularity=0.15-1.00 display clear stack")
IJ.saveAs("Results", results_dir + img_name + "_results.csv")
IJ.selectWindow("Results")
IJ.run("Close")
img.close()
imgDup.close()
if __name__ in ['__builtin__','__main__']:
if directory[-1] != '/':
directory += '/'
imagePaths = get_image_paths()
for path in imagePaths:
run_analysis(path)
IJ.run("Quit")
<file_sep>/README.md
USAGE:
fiji --ij2 --run YOURPATH/cell-slide-analysis/get_puncta_stats.py 'directory="/PATH/TO/IMAGES/IMAGEDIR"'
| 7c50838dbdb2913d5d7b4ebb40f7a0430675467f | [
"Markdown",
"Python"
] | 2 | Python | kushagrasharma/cell-slide-analysis | 9d2c183c247e60b0a3b9f313e192541a82f5e2e3 | 5ec3182a3f74d3422e3a129947f672f004986d39 |
refs/heads/master | <file_sep>class ArticlesController < ApplicationController
before_filter :paginate_articles, :only => :index
load_resource :find_by => :slug
load_and_authorize_resource
def index
end
def show
end
def new
end
def edit
end
def create
@article.author = current_user
if @article.save
flash[:success] = 'Successfully published an article.'
redirect_to @article
else
render 'new'
end
end
def update
params[:article][:category_ids] ||= []
if @article.update_attributes(article_params)
flash[:success] = 'Article has been updated!'
redirect_to @article
else
render :action => 'edit'
end
end
def destroy
@article.destroy
flash[:notice] = 'Successfully destroyed article'
redirect_to articles_path
end
private
def paginate_articles
query = if user_signed_in? && (current_user.role?(:admin) ||
current_user.role?(:author))
Article.all
else
Article.where(:draft => false)
end
@articles = query.paginate(:page => params[:page])
end
def article_params
params.require(:article).permit(:title, :draft, :content, { :category_ids => []}, :slug, :thumbnail, :opengraph_image, :summary)
end
end
<file_sep>class StaticPagesController < ApplicationController
def home
@articles = Article.where(:draft => false).limit(3)
if Event.upcoming.include_subevents.count > 0
@events = Event.upcoming.include_subevents.limit(3)
@events_type = "Upcoming Events"
elsif Event.finished.include_subevents.count > 0
@events = Event.finished.include_subevents.limit(3)
@events_type = "Previous Events"
else
@events = []
@events_type = "No Events Yet"
end
end
def contact
end
def feed
@title = "Developers Connect Philippines"
@entries =
(Event.include_subevents.all + Article.where(draft: false).all)
.sort_by(&:updated_at).reverse
@updated = unless @entries.empty?
@entries.first.updated_at
else
DateTime.now
end
respond_to do |format|
format.atom { render :layout => false }
format.rss { redirect_to feed_path(:format => :atom), :status => :moved_permanently }
end
end
def calendar
cal = Icalendar::Calendar.new
cal.timezone { |tz| tz.tzid = "Asia/Manila" }
cal.x_wr_calname = "DevCon Calendar of Events"
Event.all.each do |event_temp|
url = event_url(event_temp)
cal.event do |e|
e.dtstart = event_temp.start_at.strftime("%Y%m%dT%H%M00")
e.dtend = event_temp.end_at.strftime("%Y%m%dT%H%M00")
e.dtstamp = event_temp.updated_at.strftime("%Y%m%dT%H%M00")
e.uid = "#{event_temp.<EMAIL>"
e.summary = event_temp.name
e.description = (event_temp.summary || "") + " " + url
e.ip_class = 'PUBLIC'
e.url = url
end
end
send_data cal.to_ical, filename: "calendar.ics", type: 'text/calendar', x_sendfile: true
end
end
<file_sep>class EventsController < ApplicationController
load_resource :find_by => :slug
load_and_authorize_resource :except => [:index, :previous, :ics, :map, :destroy]
def index
@current_events = Event.current.include_subevents
@previous_events = Event.finished.include_subevents.limit(5)
@previous_event_count = Event.finished.include_subevents.count
@events = Event.upcoming.include_subevents.paginate(:page => params[:page])
end
def previous
@previous_events = Event.previous_by_month
end
def new
end
def create
if @event.save
redirect_to event_path(@event), :notice => "Event successfully created"
else
render :new
end
end
def show
@hash = Gmaps4rails.build_markers(@event.venues) do |venue, marker|
marker.lat venue.latitude
marker.lng venue.longitude
marker.infowindow render_to_string(:partial => "venues/infowindow", :layout => false, :locals => { :venue => venue })
marker.json({ :id => venue.slug })
end
end
def edit
end
def update
if @event.update_attributes(event_params)
redirect_to event_path(@event), :notice => "Event successfully updated"
else
render :edit
end
end
def ics
send_data @event.icalendar(event_url(@event)), filename: "#{@event.name}.ics", type: 'text/calendar', x_sendfile: true
end
def map
@events = Event.select("DISTINCT ON (venue_id) *").where("venue_id IS NOT NULL").order("venue_id, end_at DESC").includes(:venue)
@hash = Gmaps4rails.build_markers(@events) do |event, marker|
if event.end_at.future?
marker.picture({
:url => "//mt.google.com/vt/icon?psize=30&font=fonts/arialuni_t.ttf&color=ff304C13&name=icons/spotlight/spotlight-waypoint-a.png&ax=43&ay=48&text=%E2%80%A2",
:width => 22,
:height => 40
})
end
marker.lat event.venue.latitude
marker.lng event.venue.longitude
marker.infowindow render_to_string(:partial => "venues/infowindow_recent", :layout => false, :locals => { :event => event })
marker.json({ :id => event.venue.slug })
end
render :map, :layout => false
end
def destroy
@event.destroy
redirect_to events_path, :notice => "Event successfully deleted"
end
private
def event_params
params.require(:event).permit(:description, :description, :devcon_role, :end_at, :logo, :cover_photo,
:name, :parent_id, :slug, :start_at, :venue_id, :summary, :schedule, :rules,
:registration, :event_type, :target_attendees, :actual_attendees,
:remove_logo, :remove_cover_photo)
end
end
<file_sep># == Schema Information
#
# Table name: events
#
# id :integer not null, primary key
# name :string(255)
# slug :string(255)
# description :text
# venue_id :integer
# start_at :datetime
# end_at :datetime
# parent_id :integer
# logo :string(255)
# devcon_role :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# summary :text
# schedule :text
# rules :text
# registration :text
# published :boolean default(FALSE)
# event_type :string(255)
# target_attendees :integer
# actual_attendees :integer
# cover_photo :string(255)
#
class Event < ActiveRecord::Base
include ActionView::Helpers
belongs_to :venue
has_many :subevents, -> { order(:start_at) }, :class_name => "Event", :foreign_key => :parent_id
belongs_to :parent, :class_name => "Event"
has_many :event_partners
has_many :entities, :through => :event_partners
include SluggedResource
include Icalendar
mount_uploader :logo, ThumbnailUploader
mount_uploader :cover_photo, CoverPhotoUploader
scope :upcoming, -> { where("start_at > current_timestamp").order("start_at") }
scope :current, -> { where("start_at <= current_timestamp and end_at >= current_timestamp").order("start_at") }
scope :finished, -> { where("end_at < current_timestamp").order("end_at DESC") }
scope :include_subevents, -> { where(:parent_id => nil).includes(:subevents) }
def venues
([venue] + subevents.map { |e| e.venue }).compact.uniq
end
def effective_address
if venue.nil?
unless venues.empty?
venues[0].address
else
nil
end
else
venue.address
end
end
def self.previous_by_month
finished.include_subevents.group_by do |event|
event.start_at.strftime("%B %Y")
end
end
def google_calendar_url
start_time = self.start_at.utc.strftime("%Y%m%dT%H%M00Z")
end_time = self.end_at.utc.strftime("%Y%m%dT%H%M00Z")
values = {
action: 'TEMPLATE',
text: self.name,
dates: "#{start_time}/#{end_time}",
details: truncate(strip_tags(HTMLEntities.new.decode(self.description)), length: 200),
location: self.effective_address,
trp: true,
sprop: 'website:http://devcon.ph'
}
'http://google.com/calendar/event?' + values.to_query
end
def icalendar(event_url)
event_temp = self
cal = Calendar.new
cal.event do |e|
e.dtstart = event_temp.start_at.strftime("%Y%m%dT%H%M00")
e.dtend = event_temp.end_at.strftime("%Y%m%dT%H%M00")
e.summary = event_temp.name
e.description = event_temp.summary
e.ip_class = 'PRIVATE'
e.url = event_url
e.alarm do |a|
a.action = 'DISPLAY'
a.summary = 'Event notification'
a.trigger = '-P1DT0H0M0S'
end
end
cal.to_ical
end
end
<file_sep>class CreatePresentations < ActiveRecord::Migration
def change
create_table :presentations do |t|
t.string :name
t.string :slug
t.references :event
t.references :speaker
t.string :slide_link
t.string :video_link
t.text :description
t.timestamps
end
add_index :presentations, :slug, :unique => true
add_index :presentations, :event_id
add_index :presentations, :speaker_id
end
end
<file_sep># == Schema Information
#
# Table name: tags
#
# id :integer not null, primary key
# name :string(255)
# description :text
# created_at :datetime not null
# updated_at :datetime not null
#
require 'spec_helper'
describe Tag, :type => :model do
before do
@tag = Fabricate(:tag)
end
subject { @tag }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:description) }
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.not_to validate_presence_of(:description) }
end
<file_sep>class CreateVenues < ActiveRecord::Migration
def change
create_table :venues do |t|
t.string :name
t.string :slug
t.text :address
t.decimal :latitude, :precision => 15, :scale => 12
t.decimal :longitude, :precision => 15, :scale => 12
t.text :description
t.timestamps
end
add_index :venues, :slug, :unique => true
end
end
<file_sep>Fabricator(:article) do
title { sequence(:title) { |i| "This is a title #{i}" } }
content 'This is the content'
author { Fabricate(:author) }
end
<file_sep>atom_feed :language => 'en-US' do |feed|
feed.title @title
feed.updated @updated
@entries.each do |item|
next if item.updated_at.blank?
if item.kind_of?(Article)
feed.entry( item, :url => article_url(item) ) do |entry|
entry.title "#{item.class.to_s} | #{item.title}"
entry.summary item.summary
# the strftime is needed to work with Google Reader.
entry.updated(item.updated_at.strftime("%Y-%m-%dT%H:%M:%SZ"))
end
else
feed.entry( item, :url => event_url(item) ) do |entry|
entry.title "#{item.class.to_s} | #{item.name}"
entry.summary item.summary
# the strftime is needed to work with Google Reader.
entry.updated(item.updated_at.strftime("%Y-%m-%dT%H:%M:%SZ"))
end
end
end
end
<file_sep>require 'spec_helper'
describe VenuesController, :type => :controller do
before do
sign_in(Fabricate(:admin))
end
let :valid_attributes do
{ :name => "<NAME>", :latitude => 0, :longitude => 0 }
end
describe "GET 'index'" do
it "returns http success" do
get 'index'
expect(response).to be_success
end
end
describe "GET 'new'" do
it "returns http success" do
get 'new'
expect(response).to be_success
end
end
describe "POST 'create'" do
it "should allow creation of venues" do
post 'create', :venue => valid_attributes
expect(response).to redirect_to(Venue.last)
end
it "should handle errors on creation of venues" do
post 'create', :venue => valid_attributes.merge(:latitude => nil)
expect(response).to be_success
end
end
describe "update" do
before do
@venue = Fabricate(:venue)
end
describe "GET 'edit'" do
it "returns http success" do
get 'edit', :id => @venue.slug
expect(response).to be_success
end
end
describe "PUT 'update'" do
it "should allow updating of venues" do
put :update, :id => @venue.slug, :venue => valid_attributes
expect(response).to redirect_to(Venue.last)
end
it "should handle errors on updating of venues" do
put :update, :id => @venue.slug, :venue => valid_attributes.merge(:latitude => nil)
expect(response).to be_success
end
end
end
end
<file_sep>require 'spec_helper'
feature 'Omniauth' do
context "facebook login" do
background do
@user = Fabricate(:user)
omniauth_mock('facebook', @user.email, @user.name)
end
scenario "sign in a user" do
omniauth_signin
expect(page).to have_content(I18n.t "devise.omniauth_callbacks.success", :kind => 'facebook')
end
scenario "sign in a user with provider that doesn't exist" do
new_user = Fabricate(:user, :email => "<EMAIL>", :name => "sample")
omniauth_mock('facebook', new_user.email, new_user.name)
omniauth_signin
expect(page).to have_content(I18n.t "devise.omniauth_callbacks.success", :kind => 'facebook')
end
scenario "register a new user" do
omniauth_mock('facebook', '', '')
omniauth_signin
expect(page).to have_content('Registrations are not open yet, but please check back soon')
end
end
end
<file_sep>#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# name :string(255)
# roles_mask :integer
#
require 'spec_helper'
describe User, :type => :model do
before do
@user = Fabricate(:user)
end
subject { @user }
it { is_expected.to respond_to(:email) }
it { is_expected.to respond_to(:password) }
it { is_expected.to respond_to(:password_confirmation) }
it { is_expected.to respond_to(:encrypted_password) }
it { is_expected.to respond_to(:reset_password_token) }
it { is_expected.to respond_to(:reset_password_sent_at) }
it { is_expected.to respond_to(:remember_me) }
it { is_expected.to respond_to(:remember_created_at) }
it { is_expected.to respond_to(:sign_in_count) }
it { is_expected.to respond_to(:current_sign_in_at) }
it { is_expected.to respond_to(:last_sign_in_at) }
it { is_expected.to respond_to(:current_sign_in_ip) }
it { is_expected.to respond_to(:last_sign_in_ip) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:articles) }
it { is_expected.to respond_to(:roles) }
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:email) }
it { is_expected.not_to allow_value('user<EMAIL>').for(:email) }
it { is_expected.not_to allow_value('user_at_foo.org').for(:email) }
it { is_expected.not_to allow_value('example.<EMAIL>.').for(:email) }
it { is_expected.to allow_value('<EMAIL>').for(:email) }
it { is_expected.to allow_value('<EMAIL>').for(:email) }
it { is_expected.to allow_value('<EMAIL>').for(:email) }
it { is_expected.to allow_value('<EMAIL>').for(:email) }
it { is_expected.to validate_uniqueness_of(:email).case_insensitive }
describe 'when password is not present' do
before { @user.password = @user.password_confirmation = ' ' }
it { is_expected.not_to be_valid }
end
describe 'when password does not match confirmation' do
before { @user.password_confirmation = '<PASSWORD>' }
it { is_expected.not_to be_valid }
end
describe 'when a password is too short' do
before { @user.password = @user.password_confirmation = 'a' * 5 }
it { is_expected.to be_invalid }
end
describe 'article associations' do
before { @user.save }
let!(:older_articles) do
Fabricate(:article, :author => @user, :created_at => 1.day.ago)
end
let!(:newer_articles) do
Fabricate(:article, :author => @user, :created_at => 1.hour.ago)
end
it 'should have the right articles in the right order' do
expect(@user.articles).to eq([newer_articles, older_articles])
end
it 'should destroy associated articles' do
articles = @user.articles
@user.destroy
articles.each do |article|
expect(Article.find_by_id(article.id)).to be_nil
end
end
end
end
<file_sep>namespace :db do
desc 'Fill database with sample data'
task :populate => :environment do
5.times do |n|
name = Faker::Name.name
email = "<EMAIL>"
password = '<PASSWORD>'
User.create!(:name => name,
:email => email,
:password => <PASSWORD>,
:password_confirmation => <PASSWORD>)
end
users = User.all
5.times do
users.each do |user|
title = Faker::Company.catch_phrase
content = Faker::Lorem.sentence(20)
user.articles.create!(:title => title, :content => content)
end
end
end
end<file_sep># == Schema Information
#
# Table name: events
#
# id :integer not null, primary key
# name :string(255)
# slug :string(255)
# description :text
# venue_id :integer
# start_at :datetime
# end_at :datetime
# parent_id :integer
# logo :string(255)
# devcon_role :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# summary :text
# schedule :text
# rules :text
# registration :text
# published :boolean default(FALSE)
# event_type :string(255)
# target_attendees :integer
# actual_attendees :integer
# cover_photo :string(255)
#
require 'spec_helper'
describe Event, :type => :model do
before(:each) do
@parent_event = Fabricate(:event)
@event = Fabricate(:event, parent: @parent_event)
end
describe "#venues" do
it "should return venues" do
venues = @event.venues
expect(venues).not_to eq([])
end
it "should return venue of subevents as well" do
venues = @parent_event.venues
expect(venues).to include(@event.venue)
end
it "should return only one instance of the venue" do
@venue = Fabricate(:venue)
Fabricate(:event, venue: @venue, parent: @parent_event)
@event.venue = @venue
@event.save
expect(@parent_event.venues.count).to eq 2
end
end
describe "#google_calendar_url" do
it "should not throw errors" do
@event.google_calendar_url
end
end
describe "#icalendar" do
it "should not throw errors" do
@event.icalendar("http://example.com/events/test")
end
end
end
<file_sep>class AddOtherDetailsToEvent < ActiveRecord::Migration
def change
add_column :events, :event_type, :string
add_column :events, :target_attendees, :integer
add_column :events, :actual_attendees, :integer
end
end
<file_sep>class ApplicationController < ActionController::Base
protect_from_forgery
unless Rails.application.config.consider_all_requests_local
rescue_from Exception, with: :render_500
rescue_from ActionController::RoutingError, with: :render_404
rescue_from ActionController::UnknownController, with: :render_404
rescue_from ActiveRecord::RecordNotFound, with: :render_404
end
rescue_from CanCan::AccessDenied do |exception|
flash[:error] = 'Access denied'
redirect_to root_path
end
before_filter do
resource = controller_name.singularize.to_sym
method = "#{resource}_params"
params[resource] &&= send(method) if respond_to?(method, true)
end
private
def render_404(exception)
@not_found_path = exception.message
render_error_page 404
end
def render_500(exception)
@error = exception
logger.error @error.message
@error.backtrace.each { |e| logger.error e }
render_error_page 500
end
def render_error_page(error)
respond_to do |format|
format.html { render template: "static_pages/error_#{error}", layout: 'layouts/application', status: error }
format.all { render nothing: true, status: error}
end
end
end
<file_sep>class CreateEntities < ActiveRecord::Migration
def change
create_table :entities do |t|
t.string :name
t.string :slug
t.text :description
t.text :blurb
t.string :logo
t.string :type
t.string :level
t.timestamps
end
add_index :entities, :slug, :unique => true
add_index :entities, :type
add_index :entities, :level
end
end
<file_sep># Set the host name for URL creation
SitemapGenerator::Sitemap.default_host = "http://devcon.ph"
SitemapGenerator::Sitemap.create do
add faq_path
add contact_path
add about_path
add fact_sheet_path
add officers_path
add chapters_path
add events_main_path
add events_request_path
add notifications_path
add get_involved_path
add volunteer_path
add sponsor_path
add articles_path, :priority => 0.7
Article.all.each do |article|
add article_path(article), :lastmod => article.updated_at
end
add events_path, :priority => 0.7, :changefreq => "daily"
add previous_events_path, :priority => 0.7, :changefreq => "daily"
Event.all.each do |event|
add event_path(event), :lastmod => event.updated_at
end
end
<file_sep>require 'yaml'
begin
env_yaml = YAML.load_file(File.dirname(__FILE__) + '/../config/env.yml')
if env_hash = env_yaml[ENV['RACK_ENV'] || ENV['RAILS_ENV'] || 'development']
env_hash.each_pair do |k,v|
ENV[k] = v.to_s
end
end
rescue StandardError => e
end
<file_sep>class CreateEvents < ActiveRecord::Migration
def change
create_table :events do |t|
t.string :name
t.string :slug
t.text :description
t.references :venue
t.datetime :start_at
t.datetime :end_at
t.text :description
t.integer :parent_id
t.string :logo
t.string :devcon_role
t.timestamps
end
add_index :events, :slug, :unique => true
add_index :events, :venue_id
add_index :events, :parent_id
end
end
<file_sep>
* [](https://travis-ci.org/devcon-ph/devcon)
* [](https://codeclimate.com/github/devcon-ph/devcon)
* [](https://gemnasium.com/devcon-ph/devcon)
* [](https://coveralls.io/r/devcon-ph/devcon)
# DevCon
This is a work-in-progress revamp for the website of [Developers Connnect Philippines](http://devcon.ph)
## Developer Notes
This is a Ruby 2.1 + Rails 4.2 + PostgreSQL app so running a local copy should look something like:
git clone git://github.com/devcon-ph/devcon.git
cd devcon
bundle install
cp config/database.yml.example config/database.yml
vim config/database.yml
rake db:create
rake db:migrate
bundle exec guard
Guard will run both the Unicorn server at port 3000 and the specs.
This app uses [better\_errors](https://github.com/charliesome/better_errors) for debugging. Don't forget to set the `TRUSTED_IP` environment variable if you're not testing on your local machine.
This app also uses [rails\_panel](https://github.com/dejan/rails_panel) so you don't need to look at the development log while developing, and [rack-livereload](https://github.com/johnbintz/rack-livereload) so you don't need to reload when you make a change to the views or assets.
## Creating a user
To create an admin user, open the console (`rails c`) and enter the following:
User.create email: "test<EMAIL>", password: "<PASSWORD>", password_confirmation: "<PASSWORD>", roles: ["admin", "moderator", "author"]
## Plans
All future enhancements are logged at https://github.com/devcon-ph/devcon/issues. Anyone may fork this project and provide pull requests related to those enhancements.
## Deployment
This app has only one main branch, the `master` branch.
When deploying a new build, test it first in staging (http://beta.devcon.ph) using `bundle exec cap staging deploy`.
Once verified, you can now deploy it to the production site (http://devcon.ph) using `bundle exec cap production deploy`.
As of this writing, only [Bry](https://github.com/bryanbibat/) and [Terence](https://github.com/terenceponce) have access rights to deploy to the server.
<file_sep>require 'spec_helper'
describe "Tags pages", :type => :feature do
subject { page }
describe 'for anonymous users' do
describe 'in the index page' do
before { visit tags_path }
it { is_expected.to have_page_title 'Tags' }
it { is_expected.to have_page_heading 'Tags' }
end
end
describe 'for signed-in users' do
describe 'as a regular user' do
before do
@user = Fabricate(:user)
visit new_user_session_path
capybara_signin(@user)
end
describe 'in the index page' do
before { visit tags_path }
it { is_expected.to have_page_title 'Tags' }
it { is_expected.to have_page_heading 'Tags' }
it { is_expected.not_to have_link 'New Tag' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the show page' do
before do
@tag = Fabricate(:tag)
visit tag_path(@tag)
end
it { is_expected.to have_page_title @tag.name }
it { is_expected.to have_page_heading @tag.name }
it { is_expected.to have_link 'Back to tags' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the new page' do
before { visit new_tag_path }
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
end
describe 'in the edit page' do
before do
@tag = Fabricate(:tag)
visit edit_tag_path(@tag)
end
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
end
end
describe 'as an author' do
before do
@author = Fabricate(:author)
visit new_user_session_path
capybara_signin(@author)
end
describe 'in the index page' do
before { visit tags_path }
it { is_expected.to have_page_title 'Tags' }
it { is_expected.to have_page_heading 'Tags' }
it { is_expected.to have_link 'New Tag' }
describe 'with a list of tags' do
before do
@tag = Fabricate(:tag)
visit tags_path
end
it { is_expected.to have_link @tag.name }
it { is_expected.to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
end
describe 'in the show page' do
before do
@tag = Fabricate(:tag)
visit tag_path(@tag)
end
it { is_expected.to have_page_title @tag.name }
it { is_expected.to have_page_heading @tag.name }
it { is_expected.to have_link 'Back to tags' }
it { is_expected.to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the new page' do
before { visit new_tag_path }
it { is_expected.to have_page_title 'New tag' }
it { is_expected.to have_page_heading 'New tag' }
it { is_expected.to have_link 'Back to tags' }
describe 'with invalid information' do
it 'should not create a tag' do
expect { click_button 'Create' }.to_not change(Tag, :count)
end
describe 'on error messages' do
before { click_button 'Create' }
it { is_expected.to have_error_message 'error' }
end
end
describe 'with valid information' do
before do
fill_in 'Name', :with => 'Foobar'
fill_in 'Description', :with => 'Lolol'
end
it 'should create a tag' do
expect { click_button 'Create' }.to change(Tag, :count)
end
describe 'on success messages' do
before { click_button 'Create' }
it { is_expected.to have_success_message 'created' }
end
end
end
describe 'in the edit page' do
before do
@tag = Fabricate(:tag)
visit edit_tag_path(@tag)
end
it { is_expected.to have_page_title 'Edit tag' }
it { is_expected.to have_page_heading 'Edit tag' }
it { is_expected.to have_link 'Back to tags' }
describe 'with invalid information' do
before do
fill_in 'Name', :with => ' '
click_button 'Update'
end
it { is_expected.to have_error_message 'error' }
end
describe 'with valid information' do
before do
fill_in 'Name', :with => 'Barfoo'
click_button 'Update'
end
it { is_expected.to have_success_message 'updated' }
end
end
end
describe 'as an admin' do
before do
@user = Fabricate(:admin)
visit new_user_session_path
capybara_signin(@user)
end
describe 'in the index page' do
before do
@tag = Fabricate(:tag)
visit tags_path
click_link 'Destroy'
end
describe 'on success delete' do
it { is_expected.to have_success_message 'destroyed' }
end
end
describe 'in the show page' do
before do
@tag = Fabricate(:tag)
visit tag_path(@tag)
click_link 'Destroy'
end
describe 'on success delete' do
it { is_expected.to have_success_message 'destroyed' }
end
end
end
end
end
<file_sep>require 'spec_helper'
describe 'Authentication', :type => :feature do
subject { page }
describe 'signin page' do
before { visit new_user_session_path }
it { is_expected.to have_page_title 'Sign in' }
it { is_expected.to have_page_heading 'Sign in' }
end
describe 'signin' do
before { visit new_user_session_path }
it { is_expected.to have_page_title 'Sign in' }
it { is_expected.to have_page_heading 'Sign in' }
describe 'with invalid information' do
before { click_button 'Sign in' }
it { is_expected.to have_error_message 'Invalid' }
describe 'after visiting another page' do
before { click_link 'Home' }
it { is_expected.not_to have_selector 'div.alert.alert-error' }
end
end
describe 'with valid information' do
before do
@user = Fabricate(:user)
capybara_signin(@user)
end
it { is_expected.to have_page_title '' }
it { is_expected.to have_link 'Profile', :href => user_path(@user) }
it { is_expected.to have_link 'Settings', :href => settings_path }
it { is_expected.to have_link 'Sign out', :href => logout_path }
it { is_expected.not_to have_link 'Sign in', :href => login_path }
describe 'followed by signout' do
before { click_link 'Sign out' }
it { is_expected.to have_link 'Sign in' }
end
end
end
describe 'authorization' do
describe 'as wrong user' do
before do
@user = Fabricate(:user)
@wrong_user = Fabricate(:user, :email => '<EMAIL>')
visit new_user_session_path
capybara_signin(@user)
end
describe 'when visiting Settings page' do
before { visit edit_user_registration_path(@wrong_user) }
it { is_expected.not_to have_page_title 'Account settings' }
end
end
end
end
<file_sep>require 'spec_helper'
describe EventPartnersController, :type => :controller do
before do
sign_in(Fabricate(:admin))
@event = Fabricate(:event)
@entity = Fabricate(:entity)
end
describe "GET 'new'" do
it "returns http success" do
get 'new', :event_id => @event.slug
expect(response).to be_success
end
end
describe "POST 'create'" do
it "should allow creation of entities" do
post 'create', :event_id => @event.slug, :event_partner => { :entity_id => @entity.id, :role => "test" }
expect(response).to redirect_to(@event)
end
it "should handle errors on creation of entities" do
post 'create', :event_id => @event.slug, :event_partner => { :entity_id => nil }
expect(response).to be_success
end
end
describe "update" do
before do
@event_partner = Fabricate(:event_partner, :entity => @entity, :event => @event)
end
describe "GET 'edit'" do
it "returns http success" do
get 'edit', :event_id => @event.slug, :id => @event_partner.id
expect(response).to be_success
end
end
describe "PUT 'update'" do
it "should allow updating of entities" do
put :update, :event_id => @event.slug, :id => @event_partner.id, :event_partner => { :entity_id => @entity.id, :role => "test" }
expect(response).to redirect_to(@event)
end
it "should handle errors on updating of entitys" do
put :update, :event_id => @event.slug, :id => @event_partner.id, :event_partner => { :entity_id => nil }
expect(response).to be_success
end
end
end
end
<file_sep># == Schema Information
#
# Table name: entities
#
# id :integer not null, primary key
# name :string(255)
# slug :string(255)
# description :text
# blurb :text
# logo :string(255)
# type :string(255)
# level :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# formal_name :string(255)
# short_name :string(255)
# display :boolean default(FALSE)
#
require 'spec_helper'
describe Entity, :type => :model do
before { @entity = Fabricate.build(:entity) }
subject { @entity }
it { is_expected.to validate_presence_of(:type) }
it { is_expected.to validate_inclusion_of(:type).in_array(["school", "company", "community"]) }
it { is_expected.to validate_inclusion_of(:level).in_array(["cool", "awesome"]) }
it { is_expected.to have_many(:event_partners) }
it { is_expected.to have_many(:events).through(:event_partners) }
describe "#inheritance_column" do
it "must disable STI to use 'type' as a column name" do
expect(Entity.inheritance_column).to eq("disabled")
end
end
end
<file_sep># == Schema Information
#
# Table name: entities
#
# id :integer not null, primary key
# name :string(255)
# slug :string(255)
# description :text
# blurb :text
# logo :string(255)
# type :string(255)
# level :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# formal_name :string(255)
# short_name :string(255)
# display :boolean default(FALSE)
#
class Entity < ActiveRecord::Base
include SluggedResource
has_many :event_partners
has_many :events, :through => :event_partners
TYPES = %w{ school company community }
LEVELS = %w{ cool awesome }
validates_presence_of :type
def self.inheritance_column
"disabled"
end
validates_inclusion_of :type, :in => TYPES
validates_inclusion_of :level, :in => LEVELS, :allow_blank => true
mount_uploader :logo, LogoUploader
end
<file_sep>Fabricator(:entity) do
name "MyString"
slug "MyString"
description "MyText"
blurb "MyText"
logo "MyString"
type "school"
level "cool"
end
<file_sep>class TagsController < ApplicationController
before_filter :paginate_tags, :only => :index
load_and_authorize_resource
def index
end
def show
end
def new
end
def edit
end
def create
if @tag.save
flash[:success] = 'Successfully created a tag!'
redirect_to @tag
else
render 'new'
end
end
def update
if @tag.update_attributes(tag_params)
flash[:success] = 'Successfully updated tag!'
redirect_to @tag
else
render :action => 'edit'
end
end
def destroy
@tag.destroy
flash[:success] = 'Successfully destroyed tag!'
redirect_to tags_path
end
private
def paginate_tags
@tags = Tag.paginate(:page => params[:page])
end
def tag_params
params.require(:tag).permit(:description, :name)
end
end
<file_sep>require 'spec_helper'
describe StaticPagesController, :type => :controller do
describe "GET 'index'" do
it "returns http success" do
get 'home'
expect(response).to be_success
end
it "checks for events" do
get 'home'
expect(assigns(:events_type)).to eq("No Events Yet")
end
it "shows previous events" do
prev = Fabricate(:event, :start_at => 5.hours.ago, :end_at => 4.hours.ago)
get 'home'
expect(assigns(:events_type)).to eq("Previous Events")
expect(assigns(:events)).to eq([prev])
end
it "shows upcoming events" do
Fabricate(:event, :start_at => 5.hours.ago, :end_at => 4.hours.ago)
upcoming = Fabricate(:event, :start_at => 4.hours.from_now, :end_at => 5.hours.from_now)
get 'home'
expect(assigns(:events_type)).to eq("Upcoming Events")
expect(assigns(:events)).to eq([upcoming])
end
end
describe "GET 'feed'" do
it "returns http success" do
get 'feed', :format => :atom
expect(response).to be_success
end
it "returns http redirect for rss" do
get 'feed', :format => :rss
expect(response).to be_redirect
end
it "is successful when there are events" do
Fabricate(:event)
get 'feed', :format => :atom
expect(response).to be_success
end
it "is successful when there are articles" do
Fabricate(:article)
get 'feed', :format => :atom
expect(response).to be_success
end
end
describe "GET 'calendar'" do
it "returns http success" do
get 'calendar'
expect(response).to be_success
end
it "is successful when there are events" do
Fabricate(:event)
get 'calendar'
expect(response).to be_success
end
end
end
<file_sep>require 'spec_helper'
describe 'User', :type => :feature do
subject { page }
describe 'profile page' do
before do
@user = Fabricate(:user)
visit user_path(@user)
end
describe 'when a user has a name' do
it { is_expected.to have_page_heading @user.name }
it { is_expected.not_to have_page_heading @user.email }
end
describe 'when a user does not have a name' do
before do
@user = Fabricate(:user, :name => ' ')
visit user_path(@user)
end
it { is_expected.to have_page_heading @user.email }
end
describe 'as an author' do
before do
@author = Fabricate(:author)
@article = Fabricate(:article, :author => @author)
visit user_path(@author)
end
it { is_expected.to have_link @article.title }
it { is_expected.to have_content @author.articles.count }
end
end
#describe 'signup page' do
#before { visit new_user_session_path }
#it { should_not have_link 'Sign up' }
#end
describe 'signing up' do
before { visit new_user_registration_path }
it 'should redirect to root' do
expect(current_path).to eq(root_path)
end
#describe 'with invalid information' do
#it 'should not create a user' do
#expect { click_button 'Sign up' }.not_to change(User, :count)
#end
#end
#describe 'with valid information' do
#before do
#fill_in 'Email', :with => '<EMAIL>'
#fill_in 'Password', :with => '<PASSWORD>'
#fill_in 'Password confirmation', :with => '<PASSWORD>'
#end
#it 'should create a user' do
#expect { click_button 'Sign up' }.to change(User, :count).by(1)
#end
#describe 'after saving the user' do
#before { click_button 'Sign up' }
#it { should have_notice_message 'Welcome!' }
#it { should have_link 'Profile' }
#it { should have_link 'Settings' }
#it { should have_link 'Sign out' }
#it { should_not have_link 'Sign up' }
#it { should_not have_link 'Sign in' }
#end
#end
end
end
<file_sep>class CreateEventPartners < ActiveRecord::Migration
def change
create_table :event_partners do |t|
t.references :event
t.references :entity
t.string :role
t.text :custom_blurb
t.timestamps
end
add_index :event_partners, :event_id
add_index :event_partners, :entity_id
end
end
<file_sep># == Schema Information
#
# Table name: articles
#
# id :integer not null, primary key
# title :string(255)
# content :text
# author_id :integer
# created_at :datetime not null
# updated_at :datetime not null
# slug :string(255)
# summary :text
# thumbnail :string(255)
# published :boolean default(FALSE)
# published_at :datetime
#
require 'spec_helper'
describe Article, :type => :model do
before do
@author = Fabricate(:author)
@article = @author.articles.build(:title => 'Hello World', :content => 'Lorem Ipsum')
end
subject { @article }
it { is_expected.to respond_to(:title) }
it { is_expected.to respond_to(:content) }
it { is_expected.to respond_to(:author_id) }
describe '#author' do
subject { super().author }
it { should == @author }
end
it { is_expected.to respond_to(:comments) }
it { is_expected.to respond_to(:categories) }
it { is_expected.to respond_to(:tags) }
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:author_id) }
it { is_expected.to validate_presence_of(:content) }
describe 'with blank title' do
before { @article.title = ' ' }
it { is_expected.not_to be_valid }
end
end
<file_sep>require 'spec_helper'
describe Authentication, :type => :model do
before do
@provider = Fabricate(:authentication)
end
subject { @provider }
it { is_expected.to be_valid }
it { is_expected.to respond_to(:provider) }
it { is_expected.to respond_to(:uid) }
it { is_expected.to belong_to(:user) }
describe "#find_by_provider_and_uid" do
context "user has already registered with OAuth" do
before do
@user = Fabricate(:user)
@authentication = Fabricate(:authentication, user: @user)
@provider = @user.authentications.first.provider
@uid = @user.authentications.first.uid
end
it "should return a user when provided the proper arguments" do
expect(Authentication.find_by_provider_and_uid(@provider, @uid)).to eq(@authentication)
end
it "should return an authentication record that belongs to the proper user" do
authentication = Authentication.find_by_provider_and_uid(@provider, @uid)
expect(authentication.user).to eq(@user)
end
end
context "with invalid provider and uid" do
it "should not return anything" do
expect(Authentication.find_by_provider_and_uid("foo", "bar")).to eq(nil)
end
end
end
end
<file_sep>require 'carrierwave/test/matchers'
describe CkeditorPictureUploader do
include CarrierWave::Test::Matchers
before do
@picture = Ckeditor::Picture.new
CkeditorPictureUploader.enable_processing = true
@uploader = CkeditorPictureUploader.new(@picture, :data)
File.open(File.join(Rails.root, "app/assets/images/logo.png")) do |f|
@uploader.store!(f)
end
end
after do
CkeditorPictureUploader.enable_processing = false
@uploader.remove!
end
context 'the thumb version' do
it "should scale down a landscape image to be exactly 64 by 64 pixels" do
expect(@uploader.thumb).to have_dimensions(118, 100)
end
end
it "should make the image readable only to all and not executable" do
expect(@uploader).to have_permissions(0644)
end
it "should be the correct format" do
expect(@uploader).to be_format('png')
end
end
<file_sep>class VenuesController < ApplicationController
load_resource :find_by => :slug
load_and_authorize_resource :except => [:show]
def index
@venues = Venue.order(:name)
end
def new
end
def create
if @venue.save
redirect_to venue_path(@venue), :notice => "Venue successfully created"
else
render :new
end
end
def edit
end
def update
if @venue.update_attributes(venue_params)
redirect_to venue_path(@venue), :notice => "Venue successfully updated"
else
render :edit
end
end
def show
end
private
def venue_params
params.require(:venue).permit(:address, :description, :latitude, :longitude, :name, :slug, :directions)
end
end
<file_sep>require 'spec_helper'
describe 'Events pages', :type => :feature do
subject { page }
describe 'for anonymous users' do
describe 'in the show page' do
before do
@event = Fabricate(:event)
@event.save
visit event_path(@event)
end
it { is_expected.to have_page_title @event.name }
it { is_expected.not_to have_link 'Edit' }
end
describe 'in the map page' do
before do
@event = Fabricate(:event)
@event.save
visit map_events_path(@event)
end
it { is_expected.to have_page_title "Events Map" }
end
end
end
<file_sep>Rails.application.routes.draw do
mount Ckeditor::Engine => '/ckeditor'
devise_for :users, :controllers => { :registrations => "registrations", omniauth_callbacks: "omniauth_callbacks" }
resources :users, :only => [:show]
devise_scope :user do
get 'login', :to => 'devise/sessions#new'
delete 'logout', :to => 'devise/sessions#destroy'
get 'register', :to => 'registrations#new'
get 'settings', :to => 'devise/registrations#edit'
end
resources :articles do
end
resources :events do
collection do
get :previous
get :map
end
member do
get :ics
end
resources :event_partners, :except => [:index, :show]
end
resources :partners, :only => [:index]
resources :venues
resources :entities
resources :categories
resources :tags
get '/about', :to => 'static_pages#about', :as => 'about'
get '/about/fact-sheet', :to => 'static_pages#fact_sheet', :as => 'fact_sheet'
get '/about/officers', :to => 'static_pages#officers', :as => 'officers'
get '/about/regional-chapters', :to => 'static_pages#chapters', :as => 'chapters'
get '/devcon-events', :to => 'static_pages#events', :as => 'events_main'
get '/devcon-events/request', :to => 'static_pages#request_event', :as => 'events_request'
get '/keep-in-touch', :to => 'static_pages#keep_in_touch', :as => 'notifications'
get '/contact', :to => 'static_pages#contact'
get '/get-involved', :to => 'static_pages#get_involved', :as => 'get_involved'
get '/get-involved/volunteer', :to => 'static_pages#volunteer', :as => 'volunteer'
get '/get-involved/sponsor', :to => 'static_pages#sponsor', :as => 'sponsor'
get '/faq', :to => 'static_pages#faq', :as => 'faq'
get '/feed' => 'static_pages#feed',
:as => :feed,
:defaults => { :format => 'atom' }
get '/calendar.ics' => 'static_pages#calendar',
:as => :calendar
root :to => 'static_pages#home'
unless Rails.application.config.consider_all_requests_local
get '*not_found', to: 'static_pages#error_404'
end
end
# == Route Map
#
# Prefix Verb URI Pattern Controller#Action
# ckeditor /ckeditor Ckeditor::Engine
# new_user_session GET /users/sign_in(.:format) devise/sessions#new
# user_session POST /users/sign_in(.:format) devise/sessions#create
# destroy_user_session DELETE /users/sign_out(.:format) devise/sessions#destroy
# user_omniauth_authorize GET|POST /users/auth/:provider(.:format) omniauth_callbacks#passthru {:provider=>/facebook/}
# user_omniauth_callback GET|POST /users/auth/:action/callback(.:format) omniauth_callbacks#(?-mix:facebook)
# user_password POST /users/password(.:format) devise/passwords#create
# new_user_password GET /users/password/new(.:format) devise/passwords#new
# edit_user_password GET /users/password/edit(.:format) devise/passwords#edit
# PATCH /users/password(.:format) devise/passwords#update
# PUT /users/password(.:format) devise/passwords#update
# cancel_user_registration GET /users/cancel(.:format) registrations#cancel
# user_registration POST /users(.:format) registrations#create
# new_user_registration GET /users/sign_up(.:format) registrations#new
# edit_user_registration GET /users/edit(.:format) registrations#edit
# PATCH /users(.:format) registrations#update
# PUT /users(.:format) registrations#update
# DELETE /users(.:format) registrations#destroy
# user GET /users/:id(.:format) users#show
# login GET /login(.:format) devise/sessions#new
# logout DELETE /logout(.:format) devise/sessions#destroy
# register GET /register(.:format) registrations#new
# settings GET /settings(.:format) devise/registrations#edit
# articles GET /articles(.:format) articles#index
# POST /articles(.:format) articles#create
# new_article GET /articles/new(.:format) articles#new
# edit_article GET /articles/:id/edit(.:format) articles#edit
# article GET /articles/:id(.:format) articles#show
# PATCH /articles/:id(.:format) articles#update
# PUT /articles/:id(.:format) articles#update
# DELETE /articles/:id(.:format) articles#destroy
# previous_events GET /events/previous(.:format) events#previous
# map_events GET /events/map(.:format) events#map
# ics_event GET /events/:id/ics(.:format) events#ics
# event_event_partners POST /events/:event_id/event_partners(.:format) event_partners#create
# new_event_event_partner GET /events/:event_id/event_partners/new(.:format) event_partners#new
# edit_event_event_partner GET /events/:event_id/event_partners/:id/edit(.:format) event_partners#edit
# event_event_partner PATCH /events/:event_id/event_partners/:id(.:format) event_partners#update
# PUT /events/:event_id/event_partners/:id(.:format) event_partners#update
# DELETE /events/:event_id/event_partners/:id(.:format) event_partners#destroy
# events GET /events(.:format) events#index
# POST /events(.:format) events#create
# new_event GET /events/new(.:format) events#new
# edit_event GET /events/:id/edit(.:format) events#edit
# event GET /events/:id(.:format) events#show
# PATCH /events/:id(.:format) events#update
# PUT /events/:id(.:format) events#update
# DELETE /events/:id(.:format) events#destroy
# partners GET /partners(.:format) partners#index
# venues GET /venues(.:format) venues#index
# POST /venues(.:format) venues#create
# new_venue GET /venues/new(.:format) venues#new
# edit_venue GET /venues/:id/edit(.:format) venues#edit
# venue GET /venues/:id(.:format) venues#show
# PATCH /venues/:id(.:format) venues#update
# PUT /venues/:id(.:format) venues#update
# DELETE /venues/:id(.:format) venues#destroy
# entities GET /entities(.:format) entities#index
# POST /entities(.:format) entities#create
# new_entity GET /entities/new(.:format) entities#new
# edit_entity GET /entities/:id/edit(.:format) entities#edit
# entity GET /entities/:id(.:format) entities#show
# PATCH /entities/:id(.:format) entities#update
# PUT /entities/:id(.:format) entities#update
# DELETE /entities/:id(.:format) entities#destroy
# categories GET /categories(.:format) categories#index
# POST /categories(.:format) categories#create
# new_category GET /categories/new(.:format) categories#new
# edit_category GET /categories/:id/edit(.:format) categories#edit
# category GET /categories/:id(.:format) categories#show
# PATCH /categories/:id(.:format) categories#update
# PUT /categories/:id(.:format) categories#update
# DELETE /categories/:id(.:format) categories#destroy
# tags GET /tags(.:format) tags#index
# POST /tags(.:format) tags#create
# new_tag GET /tags/new(.:format) tags#new
# edit_tag GET /tags/:id/edit(.:format) tags#edit
# tag GET /tags/:id(.:format) tags#show
# PATCH /tags/:id(.:format) tags#update
# PUT /tags/:id(.:format) tags#update
# DELETE /tags/:id(.:format) tags#destroy
# about GET /about(.:format) static_pages#about
# fact_sheet GET /about/fact-sheet(.:format) static_pages#fact_sheet
# officers GET /about/officers(.:format) static_pages#officers
# chapters GET /about/regional-chapters(.:format) static_pages#chapters
# events_main GET /devcon-events(.:format) static_pages#events
# events_request GET /devcon-events/request(.:format) static_pages#request_event
# notifications GET /keep-in-touch(.:format) static_pages#keep_in_touch
# contact GET /contact(.:format) static_pages#contact
# get_involved GET /get-involved(.:format) static_pages#get_involved
# volunteer GET /get-involved/volunteer(.:format) static_pages#volunteer
# speaker GET /get-involved/speaker(.:format) static_pages#speaker
# sponsor GET /get-involved/sponsor(.:format) static_pages#sponsor
# faq GET /faq(.:format) static_pages#faq
# feed GET /feed(.:format) static_pages#feed {:format=>"atom"}
# calendar GET /calendar.ics(.:format) static_pages#calendar
# root GET / static_pages#home
#
# Routes for Ckeditor::Engine:
# pictures GET /pictures(.:format) ckeditor/pictures#index
# POST /pictures(.:format) ckeditor/pictures#create
# picture DELETE /pictures/:id(.:format) ckeditor/pictures#destroy
# attachment_files GET /attachment_files(.:format) ckeditor/attachment_files#index
# POST /attachment_files(.:format) ckeditor/attachment_files#create
# attachment_file DELETE /attachment_files/:id(.:format) ckeditor/attachment_files#destroy
#
<file_sep>class CreateResourcePeople < ActiveRecord::Migration
def change
create_table :resource_people do |t|
t.string :name
t.string :slug
t.text :description
t.string :main_title
t.string :thumbnail
t.timestamps
end
add_index :resource_people, :slug
end
end
<file_sep>Fabricator(:venue) do
name { sequence(:name) { |n| "Venue-#{n + 1}" } }
slug { sequence(:slug) { |n| "venue-#{n + 1}" } }
address { sequence(:address) { |n| "Address-#{n + 1}" } }
latitude "9.99"
longitude "9.99"
description { sequence(:description) { |n| "Description-#{n + 1}" } }
end
<file_sep>require 'spec_helper'
describe 'ApplicationHelper', :type => :helper do
describe '::flash_class' do
it 'should return proper CSS classes' do
expect(flash_class('notice')).to eq('alert alert-info')
expect(flash_class('success')).to eq('alert alert-success')
expect(flash_class('error')).to eq('alert alert-danger')
expect(flash_class('alert')).to eq('alert alert-danger')
end
end
describe 'full_title' do
it 'should include the page name' do
expect(full_title('foo')).to match(/^foo/)
end
it 'should include the base name' do
expect(full_title('foo')).to match(/Developers Connect/)
end
it 'should not include a dash for the home page' do
expect(full_title('')).not_to match(/\-/)
end
end
end
<file_sep>Fabricator(:event) do
name { sequence(:name) { |n| "Event-#{n + 1}" } }
slug { sequence(:slug) { |n| "event-#{n + 1}" } }
description { sequence(:description) { |n| "Best event #{n + 1}" } }
venue
start_at Time.now
end_at 5.hours.from_now
summary { sequence(:summary) { |n| "Good event #{n + 1}" } }
end
<file_sep>class UsersController < ApplicationController
def show
@user = User.find(params[:id])
@articles = @user.articles.paginate(:page => params[:page])
end
end<file_sep>module ApplicationHelper
def flash_class(level)
case level
when "notice" then 'alert alert-info'
when "success" then 'alert alert-success'
when "error" then 'alert alert-danger'
when "alert" then 'alert alert-danger'
end
end
def logo
image_tag('devcon_logo.png', :alt => 'Developers Connect')
end
def full_title(page_title)
base_title = ENV['title']
if page_title.empty?
base_title
else
"#{page_title} | #{base_title}"
end
end
def generate_description(text)
text ||= ""
text.tr("\n"," ").gsub(/\s+/, " ").strip
end
def thumbnail_url(obj)
thumbnail = (obj.has_attribute?(:thumbnail) ?
obj.thumbnail : obj.logo)
thumbnail.path == nil ? opengraph_thumb_url : thumbnail.opengraph.url
end
def opengraph_thumb_url
path_to_image("opengraph_default_thumbnail.png")
end
def article_opengraph_url(article)
if article.opengraph_image.nil?
[opengraph_thumb_url, { :width => 200, :height => 200 }]
else
[article.opengraph_image.opengraph.url, { :width => 1200, :height => 630 }]
end
end
def article_opengraph_image_src(article)
if article.opengraph_image.nil?
opengraph_thumb_url
else
article.opengraph_image.opengraph.url
end
end
def text_field_datetime(datetime)
l(datetime.nil? ? Time.zone.now : datetime)
end
end
<file_sep>Fabricator(:tag) do
name "Best Tag"
description "This is the best tag ever"
articles(:count => 1) { |attrs, i| Fabricate(:article, title: "tagged#{i}") }
end
<file_sep>source 'https://rubygems.org'
gem 'rails', '~> 4.2.0'
gem 'nokogiri', '~> 1.7.1'
gem 'bootstrap-sass', '~> 3.3.3'
gem 'devise', '~> 4.1.1'
gem 'simple_form', '~> 3.2.0'
gem 'will_paginate', '~> 3.1.0'
gem 'bootstrap-will_paginate', '0.0.10'
gem 'ckeditor', '~> 4.1.2'
gem 'meta-tags', :require => 'meta_tags'
gem 'underscore-rails'
gem 'gmaps4rails', '~> 2.1.2'
gem 'cancan', '~> 1.6.9'
gem 'babosa'
gem 'carrierwave', :github => 'carrierwaveuploader/carrierwave'
gem 'mini_magick'
gem 'htmlentities'
gem 'icalendar'
#gem 'omniauth-twitter'
gem 'omniauth-facebook'
gem 'coveralls', '~> 0.8.1', require: false
gem 'pg', '~> 0.18.0'
gem 'jquery-rails'
gem 'jquery-ui-rails', '5.0.5'
gem 'sitemap_generator'
group :development do
gem 'capistrano', '~> 3.4.0'
gem 'capistrano-rails', '~> 1.1'
gem 'capistrano-bundler'
gem 'better_errors'
gem 'binding_of_caller'
gem 'meta_request'
gem 'annotate', '~> 2.6.1'
gem 'guard'
gem 'guard-bundler'
gem 'guard-rspec'
gem 'guard-spork'
gem 'guard-livereload'
gem 'guard-unicorn'
gem 'unicorn'
gem 'spring'
end
group :development, :test do
gem 'rspec-rails', '~> 3.4.0'
gem 'pry-byebug'
gem 'fabrication', '~> 2.14.0'
gem 'faker'
gem 'database_cleaner', '~> 1.5.0'
gem 'capybara', '~> 2.5.0'
gem 'launchy'
gem 'spork', '~> 1.0.0rc4'
gem 'simplecov', :require => false
gem 'growl', require: false
gem 'libnotify', require: false
gem 'rb-inotify', :require => false
gem 'rb-fsevent', :require => false
gem 'shoulda-matchers'
gem 'quiet_assets'
gem 'rack-livereload'
gem 'parallel_tests'
end
gem 'sassc-rails'
gem 'coffee-rails', '~> 4.1.0'
gem 'uglifier', '~> 3.0.0'
<file_sep>module SluggedResource
def self.included(base)
base.class_eval do
validates_presence_of :name, :slug
validates_uniqueness_of :slug
before_validation :generate_slug
end
end
def generate_slug
self.slug ||= name.to_slug.normalize.to_s
end
def to_param
slug
end
end
<file_sep>class AddAdditionalDataToEvent < ActiveRecord::Migration
def change
add_column :events, :summary, :text
add_column :events, :schedule, :text
add_column :events, :rules, :text
add_column :events, :registration, :text
end
end
<file_sep>require 'spec_helper'
describe 'Static pages', :type => :feature do
subject { page }
shared_examples_for 'all static pages' do
it { is_expected.to have_page_title page_title }
end
describe 'Home page' do
before { visit root_path }
let(:heading) { 'Developers Connect' }
let(:page_title) { '' }
it_should_behave_like 'all static pages'
it { is_expected.not_to have_page_title 'Home -' }
end
describe 'Contact page' do
before { visit contact_path }
let(:heading) { 'Contact' }
let(:page_title) { 'Contact' }
it_should_behave_like 'all static pages'
end
it 'should have the right links on the layout' do
visit root_path
click_link 'Contact'
expect(page).to have_page_title 'Contact'
click_link 'Home'
expect(page).to have_page_title ''
click_link 'Sign in'
expect(page).to have_page_title 'Sign in'
click_link 'Articles'
expect(page).to have_page_title 'Articles'
click_link 'Upcoming Events'
expect(page).to have_page_title 'Events'
click_link 'About'
expect(page).to have_page_title 'About DevCon'
end
describe 'About page' do
before { visit about_path }
let(:heading) { 'About DevCon' }
let(:page_title) { 'About DevCon' }
it_should_behave_like 'all static pages'
end
describe 'Officers page' do
before { visit officers_path }
let(:heading) { 'DevCon Officers' }
let(:page_title) { 'DevCon Officers' }
it_should_behave_like 'all static pages'
end
describe 'Regional Chapters page' do
before { visit chapters_path }
let(:heading) { 'Regional Chapters' }
let(:page_title) { 'Regional Chapters' }
it_should_behave_like 'all static pages'
end
describe 'About DevCon Events page' do
before { visit events_main_path }
let(:heading) { 'DevCon Events' }
let(:page_title) { 'About DevCon Events' }
it_should_behave_like 'all static pages'
end
describe 'Request Event page' do
before { visit events_request_path }
let(:heading) { 'Request an Event' }
let(:page_title) { 'Request an Event' }
it_should_behave_like 'all static pages'
end
describe 'Get Involved page' do
before { visit get_involved_path }
let(:heading) { 'Get Involved' }
let(:page_title) { 'Get Involved' }
it_should_behave_like 'all static pages'
end
describe 'Volunteer page' do
before { visit volunteer_path }
let(:heading) { 'Become a Volunteer' }
let(:page_title) { 'Become a Volunteer' }
it_should_behave_like 'all static pages'
end
describe 'Sponsor page' do
before { visit sponsor_path }
let(:heading) { 'Sponsor Events' }
let(:page_title) { 'Sponsor Events' }
it_should_behave_like 'all static pages'
end
describe 'Keep in Touch page' do
before { visit notifications_path }
let(:heading) { 'Keep in Touch' }
let(:page_title) { 'Keep in Touch' }
it_should_behave_like 'all static pages'
end
describe '404 routing' do
before { visit '/test' }
let(:heading) { 'Error 404' }
let(:page_title) { 'Error 404' }
it_should_behave_like 'all static pages'
end
describe '404 RecordNotFound' do
before { visit '/events/meh' }
let(:heading) { 'Error 404' }
let(:page_title) { 'Error 404' }
it_should_behave_like 'all static pages'
end
describe '500' do
before do
expect(Event).to receive(:upcoming).and_raise('test exception')
visit root_path
end
let(:heading) { 'Error 500' }
let(:page_title) { 'Error 500' }
it_should_behave_like 'all static pages'
end
end
<file_sep>require 'spec_helper'
describe EventsController, :type => :controller do
describe "GET 'previous'" do
it "returns http success" do
get 'previous'
expect(response).to be_success
end
end
describe "GET 'event ics'" do
it "returns http success" do
event = Fabricate(:event)
event.save
get 'ics', :id => event.slug
expect(response).to be_success
end
end
end
<file_sep>class ChangeParticipantToResource < ActiveRecord::Migration
def change
remove_index :participants, :entity_id
rename_column :participants, :entity_id, :resource_person_id
add_index :participants, :resource_person_id
add_column :participants, :custom_description, :text
add_column :participants, :custom_title, :string
end
end
<file_sep>Fabricator(:user) do
email { sequence(:email) { |i| "<EMAIL>" } }
name { sequence(:name) { |i| "Example User-#{i}" } }
password '<PASSWORD>'
end
Fabricator(:admin, :from => :user) do
roles ['admin']
end
Fabricator(:author, :from => :user) do
roles ['author']
end
<file_sep>require 'spec_helper'
describe 'Articles pages', :type => :feature do
subject { page }
describe 'for anonymous users' do
describe 'in the index page' do
before { visit articles_path }
it { is_expected.to have_page_title 'Articles' }
it { is_expected.to have_page_heading 'Articles' }
it { is_expected.not_to have_link 'New article' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the show page' do
before do
@article = Fabricate(:article)
@article.categories << Fabricate(:category)
@article.save
visit article_path(@article)
end
it { is_expected.to have_page_title @article.title }
it { is_expected.to have_page_heading @article.title }
it { is_expected.to have_link @article.categories.first.name }
it { is_expected.to have_link 'Back to articles' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the new page' do
before { visit new_article_path }
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
it { is_expected.to have_url root_url }
end
describe 'in the edit page' do
before do
@article = Fabricate(:article)
visit edit_article_path(@article)
end
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
it { is_expected.to have_url root_url }
end
describe 'draft articles' do
describe 'in index' do
before do
@article = Fabricate(:article, :title => 'Draft article', :draft => true)
visit articles_path
end
it { is_expected.not_to have_link 'Draft article' }
end
describe 'in the show page' do
before do
@article = Fabricate(:article, :draft => true)
@article.categories << Fabricate(:category)
@article.save
visit article_path(@article)
end
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_url root_url }
end
end
end
describe 'for signed-in users' do
describe 'as a regular user' do
before do
@user = Fabricate(:user)
visit new_user_session_path
capybara_signin(@user)
end
describe 'in the index page' do
before { visit articles_path }
it { is_expected.to have_page_title 'Articles' }
it { is_expected.to have_page_heading 'Articles' }
it { is_expected.not_to have_link 'New article' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the show page' do
before do
@article = Fabricate(:article)
visit article_path(@article)
end
it { is_expected.to have_page_title @article.title }
it { is_expected.to have_page_heading @article.title }
it { is_expected.to have_link 'Back to articles' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the new page' do
before { visit new_article_path }
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
it { is_expected.to have_url root_url }
end
describe 'in the edit page' do
before do
@article = Fabricate(:article)
visit edit_article_path(@article)
end
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
it { is_expected.to have_url root_url }
end
end
describe 'as an author' do
before do
@author = Fabricate(:author)
visit new_user_session_path
capybara_signin(@author)
end
describe 'in the index page' do
before { visit articles_path }
it { is_expected.to have_page_title 'Articles' }
it { is_expected.to have_page_heading 'Articles' }
it { is_expected.to have_link 'New article' }
describe 'when author has an article in the list' do
before do
@article = Fabricate(:article, :author => @author)
visit articles_path
end
it { is_expected.to have_link 'Edit', :href => edit_article_path(@article) }
it { is_expected.to have_delete_link 'Destroy', article_path(@article) }
end
end
describe 'in the show page' do
before do
@article = Fabricate(:article, :author => @author)
# binding.pry
visit article_path(@article)
# save_and_open_page
end
it { is_expected.to have_page_title @article.title }
it { is_expected.to have_page_heading @article.title }
it { is_expected.to have_link 'Back to articles' }
describe 'when author is the one who made the article' do
it { is_expected.to have_link 'Edit', :href => edit_article_path(@article) }
it { is_expected.to have_delete_link 'Destroy', article_path(@article) }
end
describe 'when author is not the one who made the article' do
before do
@other_author = Fabricate(:author)
click_link 'Sign out'
click_link 'Sign in'
capybara_signin(@other_author)
visit article_path(@article)
end
it { is_expected.not_to have_link 'Edit', :href => edit_article_path(@article) }
it { is_expected.not_to have_delete_link 'Destroy', article_path(@article) }
end
end
describe 'on creating articles' do
before do
@category = Fabricate(:category)
visit new_article_path
end
it { is_expected.to have_page_title 'New article' }
it { is_expected.to have_page_heading 'New article' }
it { is_expected.to have_link 'Back to articles' }
describe 'with invalid information' do
it 'should not create an article' do
expect { click_button 'Create' }.to_not change(Article, :count)
end
describe 'on error messages' do
before { click_button 'Create' }
it { is_expected.to have_error_message 'errors' }
end
end
describe 'with valid information' do
before do
fill_in 'Title', :with => '<NAME>'
fill_in 'Content', :with => 'Lorem Ipsum'
check @category.name
end
it 'should create an article' do
expect { click_button 'Create' }.to change(Article, :count).by(1)
end
describe 'on success messages' do
before { click_button 'Create' }
it { is_expected.to have_success_message 'published' }
end
end
end
describe 'in the edit page' do
before do
@category = Fabricate(:category)
@article = Fabricate(:article, :author => @author)
visit edit_article_path(@article)
end
it { is_expected.to have_page_title 'Edit article' }
it { is_expected.to have_page_heading 'Edit article' }
it { is_expected.to have_link 'Back to articles' }
describe 'with invalid information' do
before do
fill_in 'Title', :with => ' '
fill_in 'Content', :with => ' '
click_button 'Update'
end
it { is_expected.to have_error_message 'errors' }
end
describe 'with valid information' do
before do
fill_in 'Content', :with => 'foobar'
uncheck @category.name
click_button 'Update'
end
it { is_expected.to have_success_message 'updated' }
end
end
describe 'on destroying articles' do
before do
@article = Fabricate(:article, :author => @author)
visit article_path(@article)
end
it 'should destroy the article' do
expect { click_link "Destroy" }.to change(Article, :count).by(-1)
end
it 'should have a notice message' do
click_link "Destroy"
expect(page).to have_notice_message 'destroyed'
end
end
describe 'draft articles' do
describe 'in index' do
before do
@article = Fabricate(:article, :title => 'Draft article', :draft => true)
visit articles_path
end
it { is_expected.to have_link 'Draft article' }
end
describe 'in the show page' do
before do
@article = Fabricate(:article, :draft => true)
@article.categories << Fabricate(:category)
@article.save
visit article_path(@article)
end
it { is_expected.not_to have_error_message 'Access denied' }
it { is_expected.not_to have_url root_url }
end
end
end
end
end
<file_sep>Fabricator(:event_partner) do
event
entity
role "MyString"
custom_blurb "MyText"
end
<file_sep>class AddAdditionalFieldsToArticle < ActiveRecord::Migration
def change
add_column :articles, :slug, :string
add_column :articles, :summary, :text
add_column :articles, :thumbnail, :string
add_index :articles, :slug, :unique => :true
end
end
<file_sep>class EntitiesController < ApplicationController
load_resource :find_by => :slug
load_and_authorize_resource
def index
@entities = Entity.all
end
def new
end
def create
@entity = Entity.create(entity_params)
if @entity.save
redirect_to entity_path(@entity), :notice => "Entity successfully created"
else
render :new
end
end
def edit
end
def update
if @entity.update_attributes(entity_params)
redirect_to entity_path(@entity), :notice => "Entity successfully updated"
else
render :edit
end
end
def show
@entity = Entity.find_by_slug!(params[:id])
end
private
def entity_params
params.require(:entity).permit(:blurb, :description, :level, :logo, :name, :slug, :type, :formal_name, :short_name
)
end
end
<file_sep>class CategoriesController < ApplicationController
before_filter :paginate_categories, :only => :index
load_resource :find_by => :slug
load_and_authorize_resource
def index
end
def show
end
def new
end
def edit
end
def create
if @category.save
flash[:success] = 'Successfully created a category!'
redirect_to @category
else
render 'new'
end
end
def update
if @category.update_attributes(category_params)
flash[:success] = 'Successfully updated category!'
redirect_to @category
else
render :action => 'edit'
end
end
def destroy
@category.destroy
flash[:notice] = 'Successfully destroyed category!'
redirect_to categories_path
end
private
def paginate_categories
@categories = Category.paginate(:page => params[:page])
end
def category_params
params.require(:category).permit(:description, :name, :slug)
end
end
<file_sep>include ApplicationHelper
def capybara_signin(user)
fill_in 'Email', :with => user.email
fill_in 'Password', :with => <PASSWORD>
click_button 'Sign in'
end
def omniauth_mock(provider, email, name)
OmniAuth.config.add_mock(provider, {
:uid => '123545',
:provider => provider,
:info => { :email => email, :name => name }
})
end
def omniauth_signin
visit root_path
click_link 'Sign in'
find(:xpath, "//a/img[contains(@alt,'Fblogin')]/..").click
end
RSpec::Matchers.define :have_error_message do |message|
match do |page|
page.has_selector? 'div.alert.alert-danger', :text => message
end
end
RSpec::Matchers.define :have_success_message do |message|
match do |page|
page.has_selector? 'div.alert.alert-success', :text => message
end
end
RSpec::Matchers.define :have_notice_message do |message|
match do |page|
page.has_selector? 'div.alert.alert-info', :text => message
end
end
RSpec::Matchers.define :have_page_title do |title|
match do |page|
page.has_title? full_title(title)
end
end
RSpec::Matchers.define :have_page_heading do |heading|
match do |page|
page.has_selector? 'h1', :text => heading
end
end
RSpec::Matchers.define :have_url do |url|
match do |page|
page.current_url == url
end
end
RSpec::Matchers.define :have_delete_link do |text, link|
match do |page|
page.has_selector?("a[data-method=delete][href=\"#{link}\"]", :text => text)
end
end
<file_sep># == Schema Information
#
# Table name: event_partners
#
# id :integer not null, primary key
# event_id :integer
# entity_id :integer
# role :string(255)
# custom_blurb :text
# created_at :datetime not null
# updated_at :datetime not null
#
class EventPartner < ActiveRecord::Base
validates_presence_of :entity_id, :event_id, :role
validates_uniqueness_of :entity_id, :scope => :event_id
belongs_to :event
belongs_to :entity
end
<file_sep>class RemoveUnneededTables < ActiveRecord::Migration
def up
drop_table :comments
drop_table :resource_people
drop_table :presentations
drop_table :speakers
drop_table :participants
end
end
<file_sep>require 'carrierwave/test/matchers'
describe CkeditorAttachmentFileUploader do
include CarrierWave::Test::Matchers
before do
@file = Ckeditor::AttachmentFile.new
CkeditorAttachmentFileUploader.enable_processing = true
@uploader = CkeditorAttachmentFileUploader.new(@file, :data)
File.open(File.join(Rails.root, "public/DevConFAQ.pdf")) do |f|
@uploader.store!(f)
end
end
after do
CkeditorAttachmentFileUploader.enable_processing = false
@uploader.remove!
end
it "should make the file readable only to all and not executable" do
expect(@uploader).to have_permissions(0644)
end
it "should be the correct format" do
expect(@uploader).to be_format('pdf')
end
end
<file_sep>CarrierWave.configure do |config|
if Rails.env.production?
config.asset_host = "https://assets.devcon.ph"
end
end
<file_sep>class AddPublishedAndDisplayFlags < ActiveRecord::Migration
def change
add_column :articles, :published, :boolean, :default => false
add_column :articles, :published_at, :datetime
add_index :articles, :published
add_column :events, :published, :boolean, :default => false
add_index :events, :published
add_column :resource_people, :display, :boolean, :default => false
add_index :resource_people, :display
add_column :entities, :display, :boolean, :default => false
add_index :entities, :display
add_column :venues, :display, :boolean, :default => false
add_index :venues, :display
add_column :presentations, :display, :boolean, :default => false
add_index :presentations, :display
end
end
<file_sep>Fabricator(:category) do
name { sequence(:category) { |i| "Best Category #{i}" } }
description "This is the best category evar! Nevar forget."
end
<file_sep># == Schema Information
#
# Table name: articles
#
# id :integer not null, primary key
# title :string(255)
# content :text
# author_id :integer
# created_at :datetime not null
# updated_at :datetime not null
# slug :string(255)
# summary :text
# thumbnail :string(255)
# published :boolean default(FALSE)
# published_at :datetime
#
include ActionView::Helpers::SanitizeHelper
class Article < ActiveRecord::Base
belongs_to :author, :class_name => 'User', :foreign_key => 'author_id'
has_and_belongs_to_many :categories
has_and_belongs_to_many :tags
has_many :comments, :as => :commentable
validates :title, :presence => true
validates :content, :presence => true
validates :author_id, :presence => true
validates_presence_of :slug
validates_uniqueness_of :slug
default_scope -> { order('articles.created_at DESC') }
before_validation :generate_slug, :generate_summary
mount_uploader :thumbnail, ThumbnailUploader
mount_uploader :opengraph_image, OpengraphUploader
def generate_slug
self.slug ||= title.to_slug.normalize.to_s
end
def generate_summary
if self.summary.blank?
self.summary = strip_tags(content)
end
end
def to_param
slug
end
end
<file_sep>require 'spec_helper'
describe ArticlesController, :type => :controller do
let :valid_attributes do
{ :title => "Test title", :content => "Test Content" }
end
describe "GET 'index'" do
it "returns http success" do
get 'index'
expect(response).to be_success
end
end
describe "#create" do
before do
sign_in(Fabricate(:author))
end
it "should allow creation of article" do
post :create,
:article => valid_attributes
expect(response).to redirect_to(Article.last)
end
it "should not allow saving of categories" do
category1 = Fabricate(:category)
category2 = Fabricate(:category)
post :create,
:article => valid_attributes.merge({:category_ids => [category1.id, category2.id] })
expect(assigns(:article).category_ids.size).to eq(2)
end
it "should not allow editing of author_id" do
post :create,
:article => valid_attributes.merge({:author_id => 42 })
expect(assigns(:article).author_id).not_to eq(42)
end
end
describe "#update" do
before do
@author = Fabricate(:author)
sign_in(@author)
@article = Fabricate(:article, :author => @author)
end
it "should allow creation of article" do
put :update,
:id => @article.slug,
:article => valid_attributes
expect(response).to redirect_to(Article.last)
end
it "should not allow saving of categories" do
category1 = Fabricate(:category)
category2 = Fabricate(:category)
put :update,
:id => @article.slug,
:article => valid_attributes.merge({:category_ids => [category1.id, category2.id] })
expect(assigns(:article).category_ids.size).to eq(2)
end
it "should not allow editing of author_id" do
put :update,
:id => @article.slug,
:article => valid_attributes.merge({:author_id => 42 })
expect(assigns(:article).author_id).not_to eq(42)
end
end
end
<file_sep>APP_ROOT = File.expand_path(File.dirname(File.dirname(File.dirname(__FILE__))))
ENV['BUNDLE_GEMFILE'] = File.expand_path('../../Gemfile', File.dirname(__FILE__))
require 'bundler/setup'
worker_processes 1
working_directory APP_ROOT
preload_app true
timeout 30
listen APP_ROOT + "/tmp/sockets/unicorn.sock", :backlog => 64
pid APP_ROOT + "/tmp/pids/unicorn.pid"
stderr_path APP_ROOT + "/log/unicorn.stderr.log"
stdout_path APP_ROOT + "/log/unicorn.stdout.log"
before_fork do |server, worker|
defined?(ActiveRecord::Base) && ActiveRecord::Base.connection.disconnect!
old_pid = APP_ROOT + '/tmp/pids/unicorn.pid.oldbin'
if File.exists?(old_pid) && server.pid != old_pid
begin
Process.kill("QUIT", File.read(old_pid).to_i)
rescue Errno::ENOENT, Errno::ESRCH
puts "Old master alerady dead"
end
end
end
after_fork do |server, worker|
defined?(ActiveRecord::Base) && ActiveRecord::Base.establish_connection
end
<file_sep>//
//= require jquery
//= require jquery_ujs
//= require jquery-ui
//= require jquery-ui-sliderAccess
//= require jquery-ui-timepicker-addon
//= require underscore
//= require gmaps/google
//= require ckeditor/init
//= require bootstrap
//= require forms
//= require static_pages
//= require omniauth_callbacks
<file_sep>class EventPartnersController < ApplicationController
before_filter :load_parent
def new
@event_partner = @event.event_partners.build
end
def create
@event_partner = @event.event_partners.build(event_partner_params)
if @event_partner.save
redirect_to event_path(@event), :notice => "Event Partner successfully added"
else
render :new
end
end
def edit
@event_partner = @event.event_partners.find(params[:id])
end
def update
@event_partner = @event.event_partners.find(params[:id])
if @event_partner.update_attributes(event_partner_params)
redirect_to event_path(@event), :notice => "Event Partner successfully updated"
else
render :edit
end
end
private
def load_parent
@event = Event.find_by_slug!(params[:event_id])
authorize! :edit, @event
end
def event_partner_params
params.require(:event_partner).permit(:custom_blurb, :entity_id, :role)
end
end
<file_sep>class AddOpengraphImageToArticle < ActiveRecord::Migration
def change
add_column :articles, :opengraph_image, :string
end
end
<file_sep># == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# name :string(255)
# roles_mask :integer
#
class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :token_authenticatable, :encryptable, :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable, :omniauthable
has_many :articles, :foreign_key => 'author_id', :dependent => :destroy
has_many :authentications, :dependent => :destroy
validates :email, :presence => true, :email => true
ROLES = %w[admin moderator author]
def roles=(roles)
self.roles_mask = (roles & ROLES).map { |r| 2**ROLES.index(r) }.sum
end
def roles
ROLES.reject { |r| ((roles_mask || 0) & 2**ROLES.index(r)).zero? }
end
def role?(role)
roles.include?(role.to_s)
end
def password_required?
(authentications.empty? || !password.blank) && super
end
def self.from_omniauth(auth)
user = where(email: auth.info.email).first
unless user.nil?
provider = Authentication.where(provider: auth.provider, uid: auth.uid, user_id: user).first
if provider.nil?
user.authentications.create!(provider: auth.provider, uid: auth.uid)
user.name = auth.info.name
end
end
user
end
end
<file_sep>require 'spec_helper'
describe "Categories pages", :type => :feature do
subject { page }
describe 'for anonymous users' do
describe 'in the index page' do
before { visit categories_path }
it { is_expected.to have_page_title 'Categories' }
it { is_expected.to have_page_heading 'Categories' }
end
end
describe 'for signed-in users' do
describe 'as a regular user' do
before do
@user = Fabricate(:user)
visit new_user_session_path
capybara_signin(@user)
end
describe 'in the index page' do
before { visit categories_path }
it { is_expected.to have_page_title 'Categories' }
it { is_expected.to have_page_heading 'Categories' }
it { is_expected.not_to have_link 'New Category' }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the show page' do
before do
@category = Fabricate(:category, articles: [ Fabricate(:article), Fabricate(:article) ])
visit category_path(@category)
end
it { is_expected.to have_page_title @category.name }
it { is_expected.to have_page_heading @category.name }
it { is_expected.to have_content @category.description }
it { is_expected.not_to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
it { is_expected.to have_selector('h2', :text => "Articles under #{@category.name}") }
it "should have links for all articles" do
@category.articles.each do |article|
expect(page).to have_link(article.title, :href => article_path(article))
end
end
end
describe 'in the new page' do
before { visit new_category_path }
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
end
describe 'in the edit page' do
before do
@category = Fabricate(:category)
visit edit_category_path(@category)
end
it { is_expected.to have_error_message 'Access denied' }
it { is_expected.to have_page_title '' }
end
end
describe 'as an author' do
before do
@author = Fabricate(:author)
visit new_user_session_path
capybara_signin(@author)
end
describe 'in the index page' do
before { visit categories_path }
it { is_expected.to have_page_title 'Categories' }
it { is_expected.to have_page_heading 'Categories' }
it { is_expected.to have_link 'New category' }
describe 'with a list of categories' do
before do
@category = Fabricate(:category)
visit categories_path
end
it { is_expected.to have_link @category.name }
it { is_expected.to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
end
describe 'in the show page' do
before do
@category = Fabricate(:category)
visit category_path(@category)
end
it { is_expected.to have_page_title @category.name }
it { is_expected.to have_page_heading @category.name }
it { is_expected.to have_link 'Back to categories' }
it { is_expected.to have_link 'Edit' }
it { is_expected.not_to have_link 'Destroy' }
end
describe 'in the new page' do
before { visit new_category_path }
it { is_expected.to have_page_title 'New category' }
it { is_expected.to have_page_heading 'New category' }
it { is_expected.to have_link 'Back to categories' }
describe 'with invalid information' do
it 'should not create a category' do
expect { click_button 'Create' }.to_not change(Category, :count)
end
describe 'on error messages' do
before { click_button 'Create' }
it { is_expected.to have_error_message 'error' }
end
end
describe 'with valid information' do
before do
fill_in 'Name', :with => 'Foobar'
fill_in 'Description', :with => 'A foo walks into a bar'
end
it 'should create a category' do
expect { click_button 'Create' }.to change(Category, :count).by(1)
end
describe 'on success messages' do
before { click_button 'Create' }
it { is_expected.to have_success_message 'created' }
end
end
end
describe 'in the edit page' do
before do
@category = Fabricate(:category)
visit edit_category_path(@category)
end
it { is_expected.to have_page_title 'Edit category' }
it { is_expected.to have_page_heading 'Edit category' }
it { is_expected.to have_link 'Back to categories' }
describe 'with invalid information' do
before do
fill_in 'Name', :with => ' '
click_button 'Update'
end
it { is_expected.to have_error_message 'error' }
end
describe 'with valid information' do
before do
fill_in 'Name', :with => 'barfoo'
click_button 'Update'
end
it { is_expected.to have_success_message 'updated' }
end
end
end
describe 'as an admin' do
before do
@author = Fabricate(:admin)
visit new_user_session_path
capybara_signin(@author)
end
describe 'on destroying categories' do
before do
@category = Fabricate(:category)
visit category_path(@category)
end
it 'should destroy the category' do
expect { click_link "Destroy" }.to change(Category, :count).by(-1)
end
describe 'on notice messages' do
it 'should have an error message' do
click_link "Destroy"
expect(page).to have_notice_message 'destroyed'
end
end
end
end
end
end
<file_sep>require 'spec_helper'
describe EntitiesController, :type => :controller do
before do
sign_in(Fabricate(:admin))
end
let :valid_attributes do
{ :name => "<NAME>", :type => "school" }
end
describe "GET 'index'" do
it "returns http success" do
get 'index'
expect(response).to be_success
end
end
describe "GET 'show'" do
it "returns http success" do
get 'show', :id => Fabricate(:entity).slug
expect(response).to be_success
end
end
describe "GET 'new'" do
it "returns http success" do
get 'new'
expect(response).to be_success
end
end
describe "POST 'create'" do
it "should allow creation of entities" do
post 'create', :entity => valid_attributes
expect(response).to redirect_to(Entity.last)
end
it "should handle errors on creation of entities" do
post 'create', :entity => valid_attributes.merge(:type => nil)
expect(response).to be_success
end
end
describe "update" do
before do
@entity = Fabricate(:entity)
end
describe "GET 'edit'" do
it "returns http success" do
get 'edit', :id => @entity.slug
expect(response).to be_success
end
end
describe "PUT 'update'" do
it "should allow updating of entities" do
put :update, :id => @entity.slug, :entity => valid_attributes
expect(response).to redirect_to(Entity.last)
end
it "should handle errors on updating of entitys" do
put :update, :id => @entity.slug, :entity => valid_attributes.merge(:type => nil)
expect(response).to be_success
end
end
end
end
<file_sep>Fabricator(:authentication) do
user
provider { sequence(:provider) { |n| "provider-#{n + 1}" } }
uid { sequence(:uid) { |n| "#{n + 1}" } }
end
<file_sep>class AddFormalNameToEntity < ActiveRecord::Migration
def change
add_column :entities, :formal_name, :string
end
end
<file_sep>class PartnersController < ApplicationController
def index
@awesome_partners = Entity.where(:level => "awesome")
@cool_partners = Entity.where(:level => "cool")
@schools = Entity.where("level NOT IN ['awesome', 'cool'] and type = 'school'")
@companies = Entity.where("level NOT IN ['awesome', 'cool'] and type = 'company'")
@user_groups = Entity.where("level NOT IN ['awesome', 'cool'] and type = 'user-group'")
end
end
<file_sep>class OmniauthCallbacksController < Devise::OmniauthCallbacksController
def all
omniauth = request.env["omniauth.auth"]
user = User.from_omniauth(omniauth)
unless user.nil?
flash[:notice] = I18n.t "devise.omniauth_callbacks.success", :kind => omniauth['provider']
sign_in_and_redirect user
else
flash[:alert] = "Registrations are not open yet, but please check back soon"
redirect_to root_path
end
end
# todo add other provider
alias_method :facebook, :all
end
<file_sep>require 'spec_helper'
describe 'Authentication', :type => :request do
subject { page }
describe 'authorization' do
describe 'for non-signed in users' do
before do
@author = Fabricate(:author)
end
describe 'in the Articles controller' do
describe 'when submitting to the create action' do
before { post articles_path }
specify { expect(response).to redirect_to(root_path) }
end
describe 'when submitting to the update action' do
before do
@article = Fabricate(:article, :author => @author)
put article_path(@article)
end
specify { expect(response).to redirect_to(root_path) }
end
describe 'when submitting to the destroy action' do
before do
@article = Fabricate(:article, :author => @author)
delete article_path(@article)
end
specify { expect(response).to redirect_to(root_path) }
end
end
describe 'in the Categories controller' do
describe 'when submitting to the create action' do
before { post categories_path }
specify { expect(response).to redirect_to(root_path) }
end
describe 'when submitting to the update action' do
before do
@category = Fabricate(:category)
put category_path(@category)
end
specify { expect(response).to redirect_to(root_path) }
end
describe 'when submitting to the destroy action' do
before do
@category = Fabricate(:category)
delete category_path(@category)
end
specify { expect(response).to redirect_to(root_path) }
end
end
end
end
end
| 5ca5dd139e6bfcdbaa8568b3aceac6653e915d48 | [
"Markdown",
"JavaScript",
"Ruby"
] | 77 | Ruby | devcon-ph/devcon | b5f6c434a27b3b863bcbd0eb3ffd77da5719f8c5 | 0dc10297c48a260ca7495b4521f54356bc0f0c8c |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PowerUpController : MonoBehaviour
{
public List<PowerUp> powerups;
void Update()
{
PowerUpTimer();
}
public void PowerUpTimer()
{
List<PowerUp> removeThese = new List<PowerUp>();
for (int i = 0; i < powerups.Count; i++)
{
powerups[i].timer -= Time.deltaTime;
if (powerups[i].timer <= 0)
{
removeThese.Add(powerups[i]);
}
}
for (int i = 0; i < removeThese.Count; i++)
{
Remove(removeThese[i]);
}
}
public void Apply(PowerUp powerup)
{
powerups.Add(powerup);
powerup.OnApplyPowerup(gameObject);
}
public void Remove(PowerUp powerup)
{
powerup.OnRemovePowerup(gameObject);
powerups.Remove(powerup);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class MainMenu : MonoBehaviour
{
public GameObject StartMenu;
public GameObject OptionsMenu;
public void Play()
{
//Play button function that loads the overworld scene
SceneManager.LoadScene("Overworld");
}
public void Options()
{
StartMenu.SetActive(false);
OptionsMenu.SetActive(true);
}
public void Quit()
{
//Quit button function that quits the game
Application.Quit();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class GameManager : MonoBehaviour
{
public static GameManager instance;
public ShipData playerData;
public GameObject[] enemies;
public ShipData[] enemyData;
[Header("Level Gen")]
public List<GameObject> Rooms;
public GameObject[,] Grid;
public int columns;
public int rows;
[Header("AI Spawning")]
public GameObject[] AIPersonalities;
public GameObject[] Waypoints;
public GameObject[] PlayerSpawnPoints;
public GameObject[] PowerUps;
void Start()
{
//setting components
enemies = GameObject.FindGameObjectsWithTag("Enemy");
enemyData = new ShipData[enemies.Length];
}
void Awake()
{
if (instance == null)
{
instance = this;
DontDestroyOnLoad(gameObject);
}
else
{
Destroy(gameObject);
}
//Adding shipdata componenets to the array
for (int i = 0; i < enemies.Length; i++)
{
enemyData[i] = enemies[i].GetComponent<ShipData>();
}
}
void OnSceneLoad()
{
//setting components
playerData = GameObject.FindWithTag("Player").GetComponent<ShipData>();
}
void Update()
{
if (playerData.health <= 0)
{
Destroy(playerData.gameObject);
SceneManager.LoadScene("Game Over");
}
}
public void GetSpawners()
{
PlayerSpawnPoints = GameObject.FindGameObjectsWithTag("Player Spawner");
}
public void GetWaypoints()
{
Waypoints = GameObject.FindGameObjectsWithTag("Waypoint");
}
public void SpawnPlayer()
{
playerData.tf.position = PlayerSpawnPoints[Random.Range(0, PlayerSpawnPoints.Length)].transform.position;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PassiveAIController : AIController
{
void Update()
{
switch (currentState)
{
case AIStates.Idle:
Idle();
if (Time.time > stateStartTime + 1.5f)
{
ChangeState(AIStates.Patrol);
}
break;
case AIStates.Patrol:
//Run Patrol
Patrol();
//Transitions
if (data.seesPlayer == true)
{
ChangeState(AIStates.Flee);
}
break;
case AIStates.Flee:
Flee(playerData.tf);
if (Vector3.Distance(data.tf.position, playerData.tf.position) > 150f)
{
ChangeState(AIStates.Idle);
}
break;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class HealthPowerup : PowerUp
{
public override void OnApplyPowerup(GameObject target)
{
ShipData data = target.GetComponent<ShipData>();
data.health = data.maxHealth;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
public class LevelGenerator : MonoBehaviour
{
public GameManager GM;
private const int roomSize = 60;
public bool isMapOfTheDay;
public int DateToInt(DateTime currentDate)
{
// Add up all the values of date to return an int
return currentDate.Year + currentDate.Month + currentDate.Day + currentDate.Hour + currentDate.Minute + currentDate.Second + currentDate.Millisecond;
}
void Awake()
{
if (isMapOfTheDay)
{
UnityEngine.Random.seed = DateToInt(DateTime.Now.Date);
GenerateLevel();
}
else
{
UnityEngine.Random.seed = DateToInt(DateTime.Now);
GenerateLevel();
}
}
void GenerateLevel()
{
GM.Grid = new GameObject[GM.columns,GM.rows];
for (int col = 0; col < GM.columns; col++)
{
for (int row = 0; row < GM.rows; row++)
{
//Instantiating, moving, naming, and childing rooms
GM.Grid[col, row] = Instantiate(GM.Rooms[UnityEngine.Random.Range(0, GM.Rooms.Count)]);
GM.Grid[col, row].transform.position = new Vector3(col * roomSize, 0, row * roomSize);
GM.Grid[col, row].name = "Room: [" + col + "," + row + "]";
GM.Grid[col, row].GetComponent<Transform>().parent = this.transform;
//Open the doors
RoomScript roomScript = GM.Grid[col, row].GetComponent<RoomScript>();
if (row != GM.rows - 1)
{
roomScript.northDoor.SetActive(false);
}
if (row != 0)
{
roomScript.southDoor.SetActive(false);
}
if (col != GM.columns - 1)
{
roomScript.eastDoor.SetActive(false);
}
if (col != 0)
{
roomScript.westDoor.SetActive(false);
}
}
GM.GetWaypoints();
GM.GetSpawners();
GM.SpawnPlayer();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PowerupSpawner : MonoBehaviour
{
public GameManager GM;
public float spawnTimeSec;
private int index;
void Start()
{
GM = GameObject.FindWithTag("GameController").GetComponent<GameManager>();
Instantiate(GM.PowerUps[Random.Range(0, GM.PowerUps.Length)], gameObject.transform);
}
IEnumerator SpawnPowerupTimer()
{
yield return new WaitForSeconds(spawnTimeSec);
Instantiate(GM.PowerUps[Random.Range(0, GM.PowerUps.Length)], gameObject.transform);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AIPeronsality : MonoBehaviour
{
public AIPersonality Personality;
public GameObject Ship;
public Renderer shipColor;
public enum AIPersonality
{
Adaptive, Agressive, Passive, BountyHunter
}
void Start()
{
Ship = this.gameObject;
if (Personality == AIPersonality.Adaptive)
{
Ship.AddComponent<AdaptiveAIController>();
}
if (Personality == AIPersonality.Agressive)
{
Ship.AddComponent<AgressiveAIController>();
}
if (Personality == AIPersonality.Passive)
{
Ship.AddComponent<PassiveAIController>();
}
if (Personality == AIPersonality.BountyHunter)
{
Ship.AddComponent<BountyShipAIController>();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AdaptiveAIController : AIController
{
void Update()
{
//Switch states based on current state
switch (currentState)
{
case AIStates.Idle:
//Run Idle
Idle();
//Switch the state after 3 seconds
if (Time.time > stateStartTime + 1.5f)
{
ChangeState(AIStates.Patrol);
}
break;
case AIStates.Patrol:
//Run Patrol
Patrol();
//Transitions
if (data.seesPlayer == true && data.health > data.maxHealth / 2)
{
ChangeState(AIStates.Chase);
}
if (data.seesPlayer == true && data.health < data.maxHealth / 2)
{
ChangeState(AIStates.Flee);
}
break;
case AIStates.Chase:
//Do Chase
Chase();
//Transition
if (data.seesPlayer == false)
{
ChangeState(AIStates.Idle);
}
if (Vector3.Distance(data.tf.position, playerData.tf.position) <= 50f)
{
ChangeState(AIStates.Shoot);
}
if (data.health < data.maxHealth / 2)
{
ChangeState(AIStates.Flee);
}
break;
case AIStates.Shoot:
//Do Shoot
Shoot();
//Transition
if (Vector3.Distance(data.tf.position, playerData.tf.position) > 50f && data.seesPlayer == true)
{
ChangeState(AIStates.Chase);
}
if (data.health < data.maxHealth / 2)
{
ChangeState(AIStates.Flee);
}
break;
case AIStates.Flee:
//Do Flee
Flee(playerData.tf);
//Transition
if (Vector3.Distance(data.tf.position, playerData.tf.position) > 200f)
{
ChangeState(AIStates.Repair);
}
break;
case AIStates.Repair:
//Do Repair
Repair();
//Transition
if (data.health == data.maxHealth)
{
ChangeState(AIStates.Patrol);
}
break;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class RoomScript : MonoBehaviour
{
public GameManager GM;
public GameObject northDoor;
public GameObject southDoor;
public GameObject eastDoor;
public GameObject westDoor;
public Transform AISpawner;
public Transform PowerupSpawnerTransform;
public GameObject PowerUpSpawner;
public Transform[] Waypoints;
void Awake()
{
GM = GameObject.FindWithTag("GameController").GetComponent<GameManager>();
SpawnAI();
}
public void SpawnAI()
{
GameObject AI = Instantiate(GM.AIPersonalities[Random.Range(0, GM.AIPersonalities.Length)], AISpawner);
AI.GetComponent<ShipData>().waypoints = Waypoints;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ShipData : MonoBehaviour
{
[Header("Components")]
public Transform tf;
public ShipMover mover;
public GameObject bullet;
public GameObject bulletInst;
public ShipData playerData;
[Header("Ship Variables")]
public float moveSpeed;
public float rotateSpeed;
public float health;
public float maxHealth;
[Header("Player Variables")]
public float score;
public float bounty;
[Header("Bullet Variables")]
public float bulletSpeed;
public float bulletDmg;
public float shootCooldown;
public bool canShoot;
[Header("AI Variables")]
public Transform[] waypoints;
public int waypointIndex;
public bool seesPlayer;
public float pointValue;
public float bountyValue;
void Awake()
{
playerData = GameObject.FindWithTag("Player").GetComponent<ShipData>();
}
// Start is called before the first frame update
void Start()
{
canShoot = true;
}
// Update is called once per frame
void Update()
{
if (health <= 0 && this.gameObject.tag == "Enemy")
{
Destroy(this.gameObject);
playerData.score += pointValue;
playerData.bounty += bountyValue;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class BountyHunterSpawner : MonoBehaviour
{
public ShipData playerData;
public GameObject BountyShip;
public bool hasSpawned;
void Update()
{
if (playerData.bounty >= 4 && hasSpawned == false)
{
SpawnBountyHunter();
hasSpawned = true;
}
}
void SpawnBountyHunter()
{
Instantiate(BountyShip, this.gameObject.transform);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Pickup : MonoBehaviour
{
public PowerupSpawner PUSpawn;
void Start()
{
PUSpawn = GetComponentInParent<PowerupSpawner>();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[System.Serializable]
public class ShootPowerup : PowerUp
{
public float oldCooldown;
public float newCooldown;
public override void OnApplyPowerup(GameObject target)
{
ShipData data = target.GetComponent<ShipData>();
oldCooldown = data.shootCooldown;
data.shootCooldown = newCooldown;
}
public override void OnRemovePowerup(GameObject target)
{
ShipData data = target.GetComponent<ShipData>();
data.shootCooldown = oldCooldown;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class CameraScript : MonoBehaviour
{
public Transform camera;
public Transform player;
void Update()
{
camera.position = new Vector3(player.position.x, 50, player.position.z);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class BountyShipAIController : AIController
{
void Update()
{
switch (currentState)
{
case AIStates.Idle:
Idle();
if (Time.time > stateStartTime + 2f)
{
ChangeState(AIStates.Chase);
}
break;
case AIStates.Chase:
Chase();
if (Vector3.Distance(data.tf.position, playerData.tf.position) <= 50f)
{
ChangeState(AIStates.Shoot);
}
break;
case AIStates.Shoot:
Shoot();
if (Vector3.Distance(data.tf.position, playerData.tf.position) > 50f)
{
ChangeState(AIStates.Chase);
}
break;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class HealthPickUp : Pickup
{
public HealthPowerup powerup;
void OnTriggerEnter(Collider other)
{
PowerUpController PUC = other.GetComponent<PowerUpController>();
if (PUC != null)
{
Destroy(gameObject);
PUSpawn.StartCoroutine("SpawnPowerupTimer");
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ShipMover : MonoBehaviour
{
public ShipData data;
private CharacterController CharacterController;
// Start is called before the first frame update
void Start()
{
CharacterController = GetComponent<CharacterController>();
}
public void SimpleMove(Vector3 directionToMove)
{
//Move the character
CharacterController.Move(new Vector3(directionToMove.x, 0, directionToMove.z) * data.moveSpeed);
}
public void Rotate(float direction)
{
//Rotate the character
data.tf.Rotate(new Vector3(0, direction * data.rotateSpeed * Time.deltaTime, 0));
}
public void RotateTowards(Vector3 lookVector)
{
//Find Vector to target
Vector3 vectorToTarget = lookVector;
// Find quaternion to look down that vector
Quaternion targetQuaternion = Quaternion.LookRotation(vectorToTarget, data.tf.up);
//Limit the rotation to only Y
targetQuaternion.x = 0;
targetQuaternion.z = 0;
// set the rotation
data.tf.rotation = Quaternion.RotateTowards(data.tf.rotation, targetQuaternion, data.rotateSpeed * Time.deltaTime);
}
public IEnumerator Shoot()
{
//Set shooting to false
data.canShoot = false;
//Instantiate the bullet
GameObject newBullet = Instantiate(data.bullet, data.bulletInst.transform.position, data.tf.rotation);
//Move the bullet to the player's forward
newBullet.GetComponent<Rigidbody>().AddForce(data.tf.transform.forward * data.bulletSpeed);
//Destroy the gameobject in 3 seconds
Destroy(newBullet, 3.0f);
// Wait for 1 second
yield return new WaitForSeconds(data.shootCooldown);
data.canShoot = true;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
//This script will enable the AI to see the player
public class AISight : MonoBehaviour
{
[Header("Components")]
public ShipData data;
public Transform tf;
[Header("Variables")]
public float radius;
[Range(0,360)]
public float angle;
public LayerMask objectMask;
public LayerMask ObstacleMask;
public List<Transform> visibleObjects = new List<Transform>();
void Awake()
{
data = GetComponent<ShipData>();
tf = GetComponent<Transform>();
StartCoroutine("FindObjectsDelay", .2f);
}
IEnumerator FindObjectsDelay(float delay)
{
while (true)
{
yield return new WaitForSeconds(delay);
FindVisibleObjects();
}
}
void FindVisibleObjects()
{
visibleObjects.Clear();
Collider[] objectsInViewRadius = Physics.OverlapSphere(transform.position, radius, objectMask);
for (int i = 0; i < objectsInViewRadius.Length; i++)
{
Transform objects = objectsInViewRadius[i].transform;
Vector3 dirToObjects = (objects.position - tf.position).normalized;
if (Vector3.Angle(tf.forward, dirToObjects) < angle / 2)
{
float dstToObjects = Vector3.Distance(tf.position, objects.position);
if (!Physics.Raycast(tf.position, dirToObjects, dstToObjects, ObstacleMask))
{
visibleObjects.Add(objects);
}
}
}
if (visibleObjects.Contains(GameObject.FindWithTag("Player").transform))
{
data.seesPlayer = true;
}
else
{
data.seesPlayer = false;
}
}
public Vector3 DirfromAngle(float angleInDegrees, bool angleIsGlobal)
{
if (!angleIsGlobal)
{
//angle of degrees
angleInDegrees += transform.eulerAngles.y;
//(note unity always has 0 on top and clockwise till sin (90-X) = cos (X) )
}
return new Vector3(Mathf.Sin(angleInDegrees * Mathf.Deg2Rad), 0, Mathf.Cos(angleInDegrees * Mathf.Deg2Rad));
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
[CustomEditor(typeof(AISight))]
public class AISightEditor : Editor
{
void OnSceneGUI()
{
AISight fow = (AISight)target;
Handles.color = Color.white;
Handles.DrawWireArc(fow.transform.position, Vector3.up, Vector3.forward, 360, fow.radius);
Vector3 viewAngleA = fow.DirfromAngle(-fow.angle / 2, false);
Vector3 viewAngleB = fow.DirfromAngle(fow.angle / 2, false);
Handles.DrawLine(fow.transform.position, fow.transform.position + viewAngleA * fow.angle);
Handles.DrawLine(fow.transform.position, fow.transform.position + viewAngleB * fow.angle);
Handles.color = Color.red;
foreach (Transform visibleobjects in fow.visibleObjects)
{
Handles.DrawLine(fow.transform.position, visibleobjects.position);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AIController : MonoBehaviour
{
[Header("Components")]
public ShipData data;
public ShipData playerData;
[Header("Variables")]
public float stateStartTime;
public AIStates currentState;
[Header("Object Avoidance")]
public AvoidStates currentAvoidState;
public float feelerDistance = 25f;
public float avoidMoveTime = 4;
public float startAvoidTime;
// States for FSM
public enum AIStates
{
Idle, Patrol, Chase, Flee, Shoot, Repair
}
public enum AvoidStates
{
None, Turn, Move
}
void Start()
{
data = GetComponent<ShipData>();
playerData = GameObject.FindWithTag("Player").GetComponent<ShipData>();
}
//Method to change states
public void ChangeState(AIStates newState)
{
stateStartTime = Time.time;
currentState = newState;
}
public void ChangeAvoidState(AvoidStates newState)
{
startAvoidTime = Time.time;
currentAvoidState = newState;
}
public void Idle()
{
//Not much to do here
}
//Method to move the AI towards a target
public void Seek(Transform seekTarget)
{
switch (currentAvoidState)
{
case AvoidStates.None:
//Here is the actual seeking
Vector3 targetVector = (seekTarget.position - data.tf.position).normalized;
data.mover.RotateTowards(targetVector);
data.mover.SimpleMove(data.tf.forward);
if (isBlocked())
{
//Change the avoid state
ChangeAvoidState(AvoidStates.Turn);
}
break;
case AvoidStates.Turn:
//Turn to try and get around the obstacles
data.mover.Rotate(1);
//once were not blocked
if (!isBlocked())
{
ChangeAvoidState(AvoidStates.Move);
}
break;
case AvoidStates.Move:
//Move!
data.mover.SimpleMove(data.tf.forward);
//If you get blocked
if (isBlocked())
{
ChangeAvoidState(AvoidStates.Turn);
}
if (Time.time > startAvoidTime + avoidMoveTime)
{
ChangeAvoidState(AvoidStates.None);
}
break;
}
}
public void Flee(Transform fleeTarget)
{
switch (currentAvoidState)
{
case AvoidStates.None:
Vector3 targetVector = (fleeTarget.position - data.tf.position);
Vector3 awayVector = -targetVector;
data.mover.RotateTowards(awayVector);
data.mover.SimpleMove(data.tf.forward);
if (isBlocked())
{
ChangeAvoidState(AvoidStates.Turn);
}
break;
case AvoidStates.Turn:
data.mover.Rotate(1);
if (!isBlocked())
{
ChangeAvoidState(AvoidStates.Move);
}
break;
case AvoidStates.Move:
data.mover.SimpleMove(data.tf.forward);
if (isBlocked())
{
ChangeAvoidState(AvoidStates.Turn);
}
if (Time.time > startAvoidTime + avoidMoveTime)
{
ChangeAvoidState(AvoidStates.None);
}
break;
}
}
public void Patrol()
{
Seek(data.waypoints[data.waypointIndex]);
if (Vector3.Distance(data.tf.position, data.waypoints[data.waypointIndex].position) <= 0.3f)
{
data.waypointIndex++;
}
if (data.waypointIndex >= data.waypoints.Length)
{
data.waypointIndex = 0;
}
}
public bool isBlocked()
{
if (Physics.Raycast(data.tf.position, data.tf.forward, feelerDistance))
{
return true;
}
return false;
}
public void Chase()
{
Seek(playerData.tf);
}
public void Shoot()
{
Vector3 targetVector = (playerData.tf.position - data.tf.position).normalized;
data.mover.RotateTowards(targetVector);
if (data.canShoot == true)
{
data.mover.StartCoroutine("Shoot");
}
}
public void Repair()
{
if (Time.time >= stateStartTime + 60f)
{
data.health = data.maxHealth;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[System.Serializable]
public class SpeedPowerUp : PowerUp
{
public float boostSpeed;
public override void OnApplyPowerup(GameObject target)
{
ShipData data = target.GetComponent<ShipData>();
data.moveSpeed += boostSpeed;
}
public override void OnRemovePowerup(GameObject target)
{
ShipData data = target.GetComponent<ShipData>();
data.moveSpeed -= boostSpeed;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class Planet : MonoBehaviour
{
void OnTriggerStay(Collider other)
{
if (other.tag == "Player")
{
if (Input.GetKey(KeyCode.E))
{
SceneManager.LoadScene("Planet");
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class InputController : MonoBehaviour
{
public ShipData data;
private void Awake()
{
data = GameObject.FindWithTag("Player").GetComponent<ShipData>();
}
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
//Start the game without moving
Vector3 directionToMove = Vector3.zero;
//Press W and call the ship mover forward
if (Input.GetKey(KeyCode.W))
{
directionToMove += data.tf.forward;
}
//Press S and call the ship mover backwards
if (Input.GetKey(KeyCode.S))
{
directionToMove -= data.tf.forward;
}
if (Input.GetKey(KeyCode.A))
{
data.mover.Rotate(-data.rotateSpeed * Time.deltaTime);
}
if (Input.GetKey(KeyCode.D))
{
data.mover.Rotate(data.rotateSpeed * Time.deltaTime);
}
if (Input.GetKey(KeyCode.Space) && data.canShoot == true)
{
data.mover.StartCoroutine("Shoot");
}
// After inputs, tell mover to move the character
data.mover.SimpleMove(directionToMove);
}
}
| e5a623791e42fd25666df43f3a465d5013da2e45 | [
"C#"
] | 24 | C# | wilmccla/GPE205-Game | 634b757f122da444a64191a31a7b40d991a131b7 | 1272e09a8143daa9a69e99622b6d256ab8c47599 |
refs/heads/master | <repo_name>GeraldineCh/FacebookLogin<file_sep>/README.md
# Facebook LogIn
## Lección 42 - AJAX y API calls
<file_sep>/js/components/dashboard.js
'use strict';
const Dashboard = () => {
const dashboard = $('<div class="dashboard">Dashboard Component</div>');
if(state.user) {
const name = $('<h1>'+state.user.name+'</h1>');
const email = $('<p>'+state.user.email+'</p>');
dashboard.append(name);
dashboard.append(email);
const logout = $('<button>Salir</button>');
logout.on('click', _=> {
FB.logout(response => {
state.user = null;
state.doRender();
});
});
dashboard.append(logout);
}
return dashboard;
} | 238767271785260edf0b92f7ccfda776017dbaeb | [
"Markdown",
"JavaScript"
] | 2 | Markdown | GeraldineCh/FacebookLogin | a367be79ff9e44d2b28d43adb1ae5d0f801379d6 | e0424b228e74a7f14fc4c29f390a0e12c12e454b |
refs/heads/master | <file_sep>#ifndef _CPU_H
#define _CPU_H
#include <stdbool.h>
#include <stdint.h>
#include <SDL2/SDL.h>
typedef struct {
uint16_t opcode; //an opcode is two bytes
uint8_t memory[4096]; //has 4K memory
uint8_t V[16]; //general purpose regsters, VF doubles as a a carry flag
uint16_t I; //index register
uint16_t pc; //program counter
uint8_t gfx[64][32]; //pixels of screen
uint8_t delay_timer;
uint8_t sound_timer;
uint16_t stack[16];
uint16_t sp;
uint8_t keys[16]; //hex keypad
unsigned char key;
bool draw_flag;
} chip8;
//0x000-0x1FF - Chip 8 interpreter (contains font set in emu)
//0x050-0x0A0 - Used for the built in 4x5 pixel font set (0-F)
//0x200-0xFFF - Program ROM and work RAM
void initialise_cpu(chip8 * cpu);
bool load_rom(chip8 * cpu, const char *name);
void emulate_cycle(chip8 * cpu);
void clear_screen(chip8 *cpu);
void update_timers(chip8 * cpu);
#endif<file_sep>#include "dbg.h"
#include "cpu.h"
#include "opcodes.h"
#include <string.h>
#include <stdlib.h>
#include <time.h>
#include <SDL2_mixer/SDL_mixer.h>
Mix_Music * beep_sound = NULL;
Mix_Chunk * beep_scratch = NULL;
unsigned char font_set[80] =
{
0xF0, 0x90, 0x90, 0x90, 0xF0, // 0
0x20, 0x60, 0x20, 0x20, 0x70, // 1
0xF0, 0x10, 0xF0, 0x80, 0xF0, // 2
0xF0, 0x10, 0xF0, 0x10, 0xF0, // 3
0x90, 0x90, 0xF0, 0x10, 0x10, // 4
0xF0, 0x80, 0xF0, 0x10, 0xF0, // 5
0xF0, 0x80, 0xF0, 0x90, 0xF0, // 6
0xF0, 0x10, 0x20, 0x40, 0x40, // 7
0xF0, 0x90, 0xF0, 0x90, 0xF0, // 8
0xF0, 0x90, 0xF0, 0x10, 0xF0, // 9
0xF0, 0x90, 0xF0, 0x90, 0x90, // A
0xE0, 0x90, 0xE0, 0x90, 0xE0, // B
0xF0, 0x80, 0x80, 0x80, 0xF0, // C
0xE0, 0x90, 0x90, 0x90, 0xE0, // D
0xF0, 0x80, 0xF0, 0x80, 0xF0, // E
0xF0, 0x80, 0xF0, 0x80, 0x80 // F
};
void initialise_cpu(chip8 * cpu) {
memset(cpu->memory, 0, 4096); //reset memory
memset(cpu->V, 0, 16); //reset registers
memset(cpu->stack, 0, 16); //reset stack
memset(cpu->keys, 0, 16); //reset keys
//defaults
cpu->I = 0;
cpu->pc = 0x200;
cpu->sp = 0;
cpu->delay_timer = 0;
cpu->sound_timer = 0;
cpu->opcode = 0;
// load fonts
for (int i = 0; i < 80; i++) {
cpu->memory[i] = font_set[i];
}
clear_screen(cpu);
srand(time(NULL)); //reset random seed
if(Mix_OpenAudio(441000, MIX_DEFAULT_FORMAT, 2, 2048) < 0) {
log_err("SDL_mixer error.");
}
beep_sound = Mix_LoadMUS("beep.wav");
if(beep_sound == NULL) {
log_err("Loading of sound failed.");
}
}
bool load_rom(chip8 * cpu, const char *rom_name) {
FILE * file = fopen(rom_name, "rb"); //open file in binary mode
if(!file) {
log_err("File does not exist.");
return false;
}
fseek(file, 0, SEEK_END);
unsigned long buffer_size = ftell(file);
rewind(file);
log_info("Read %lu bytes from %s", buffer_size, rom_name);
char *buffer = (char *) malloc((buffer_size + 1) * sizeof(char)); //allocate memory for buffer
fread(buffer, buffer_size, 1, file);
for(int i = 0; i < buffer_size; i++) {
cpu->memory[512 + i] = buffer[i];
}
return true;
}
void emulate_cycle(chip8 * cpu) {
update_timers(cpu);
//fetch opcode
cpu->opcode = cpu->memory[cpu->pc] << 8 | cpu->memory[cpu->pc + 1];
decode_opcode(cpu);
}
void clear_screen(chip8 * cpu) {
memset(cpu->gfx, 0, 64 * 32);
cpu->draw_flag = true;
}
void update_timers(chip8 * cpu) {
if(cpu->delay_timer > 0) {
--cpu->delay_timer;
}
if(cpu->sound_timer > 0) {
if(cpu->sound_timer == 1) {
printf("BEEP\n");
Mix_PlayMusic(beep_sound, -1);
--cpu->sound_timer;
}
}
}
void handle_input(chip8 * cpu, unsigned char key) {
cpu->key = key;
}
<file_sep>CFLAGS=-Wall -g -I/System/Library/Frameworks/GLUT.framework/Headers -I/System/Library/Frameworks/SDL2_mixer.framework/Headers
LDFLAGS=-framework OpenGL -framework GLUT -framework SDL2_mixer
build: ch8
ch8: ch8.c cpu.c
clean:
rm -f main
rm -f cpu
rm -f ch8
<file_sep># Chip-Emul8
<p>My first little emulator of a Chip-8<p>
<h2>The Idea</h2>
<p>So I first decided to code this Chip-8 emulator around a week ago as I've really wanted to get into emulation and as far as I was aware - writing one is the best way to learn about it. And I can definitely agree. I learnt a lot when going through this and searching the net for info, how to do certain things, using libraries etc and it has all been a great experience. The plan was that this would introduce me to the basics of emulation so I can progress to coding a Gameboy DMG-01 emulator... eventually.</p>
<h2>What I Did</h2>
<p>I started where most people probably would: how to code a chip 8 emulator search in Google. I found the multigesture post linked at the bottom and I got going. I modelled the CPU, added the emulators basic functions and it was all good. I then tweaked around with some of the modelling for example using the <stdint.h> header so I could model the registers as uint8_t rather than unsigned char. These little things just made it a bit easier to understand and keep track of everything. After that I went on to begin decoding the opcodes using the Cowgod's technical reference linked down below (as best as I could) and then took a look at a couple of the other emulators down below as reference on some of the instructions I was unsure about. When I finished writing them for the first time and had fixed the small syntax errors etc. I ran the program and it was fine! I couldn't believe that it actually worked (wasn't what I was expecting but then again there are only 36 instructions). I went on to looking at how I was going to draw the graphics. From what I had looked up I was either going to use SDL2 or GLUT. After playing round with both I decided to go with GLUT - primarily because it ships with OSX but also because it just seemed a bit easier to setup and initialise. To draw the graphics on screen was fairly simple as the Chip-8 has monochrome graphics and a 64 x 32 pixel display. Just iterate through my screen array and draw the pixels. On first run the picture was a mess. So I spent around an hour going back through my opcodes optimising some and correcting others and now that is all done so here we have the Chip-Emul8.</p>
<h2>Plans</h2>
<p>The only thing I have planned for now is to implement sounds, that's it.</p>
<h2>Screenshots</h2>
<h3>Mac</h3>
<img src="Screenshots/Pong-Chip8 Mac.png"/>
<h3>Linux</h3>
<img src="Screenshots/linux-screenshot.png"/>
<h2>How To Use It</h2>
<p>Currently I've only tested this on Mac and I believe you have to download GLUT on other OSs. First off, download the repo and open it up. Navigate to the directory and run</p>
<h3>Mac</h3>
<h4>FYI: If you have updated to sierra, some of the GLUT library has been fully deprecated and so will no longer compile.</h4>
```make build```
<p>After that, to use the emulator type</p>
```./ch8 ./<path to your chip8 rom>```
<p>Controls... I'm not exactly sure as to the controls for all the games but the keys will be one of: 1, 2, 3, 4, q, w, e, r, t, y, a, s, d, f, z, x, c, v. To quit the emulator press esc.</p>
<h3>Windows</h3>
<p>To be done..</p>
<h3>Linux</h3>
<p>Exact same as Mac.</p>
<h5>Feel free to use the code so long as you give credit to me.</h5>
<h3>Here are some useful links:</h3>
<h4>About the CHIP-8:</h4>
<p>https://en.wikipedia.org/wiki/CHIP-8#Virtual_machine_description</p>
<h4>Chip-8 Opcodes:</h4>
<p>http://devernay.free.fr/hacks/chip8/C8TECH10.HTM</p>
<h4>Writing an emulator:</h4>
<p>http://www.multigesture.net/articles/how-to-write-an-emulator-chip-8-interpreter/</p>
<p>http://mattmik.com/files/chip8/mastering/chip8.html</p>
<p>http://emulator101.com/</p>
<p>http://fms.komkon.org/EMUL8/HOWTO.html</p>
<h4>ROMS:</h4>
<p>http://www.zophar.net/</p>
<h4>Reading in files as byte arrays:</h4>
<p>http://www.linuxquestions.org/questions/programming-9/c-howto-read-binary-file-into-buffer-172985/</p>
<h4>Some other good emulators:</h4>
<p>https://github.com/eshyong/Chip-8-Emulator</p>
<p>https://github.com/bryangarza/chip8</p>
<p>https://github.com/prophittcorey/iC8</p>
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <GLUT/glut.h>
#include "cpu.h"
#include "dbg.h"
chip8 cpu;
void initGL(); //initialise GL
void draw_square(float x_coord, float y_coord); //draw pixel
void render(); //update render
void idle(); //idling function
void handle_key_press(unsigned char key, int x, int y);
void handle_key_release(unsigned char key, int x, int y);
int main(int argc, char const *argv[])
{
const char * game = argv[1];
if(!game) {
log_info("Usage: ./ch8 ./<name of game>");
exit(1);
}
initialise_cpu(&cpu);
load_rom(&cpu, game);
glutInit(&argc, argv); //initialise glut
//initialise display and window
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
glutInitWindowPosition(100, 100);
glutInitWindowSize(640, 320);
glutCreateWindow("Chip-Emul8");
initGL();
//handle key presses and releases
glutKeyboardFunc(handle_key_press);
glutKeyboardUpFunc(handle_key_release);
//draw function
glutDisplayFunc(render);
//idle function (to redraw)
glutIdleFunc(idle);
glutMainLoop();
return 0;
}
void handle_key_press(unsigned char key, int x, int y) {
unsigned char input;
switch(key) {
case 27:
log_info("Exiting emulator...");
exit(0);
case '1':
// only certain key values are passed to the chip
input = 0x0001;
break;
case '2':
input = 0x0002;
break;
case '3':
input = 0x0003;
break;
case '4':
input = 0x000C;
break;
case 'q':
input = 0x0004;
break;
case 'w':
input = 0x0005;
break;
case 'e':
input = 0x0006;
break;
case 'r':
input = 0x000D;
break;
case 'a':
input = 0x0007;
break;
case 's':
input = 0x0008;
break;
case 'd':
input = 0x0009;
break;
case 'f':
input = 0x000E;
break;
case 'z':
input = 0x000A;
break;
case 'x':
input = 0x0000;
break;
case 'c':
input = 0x000B;
break;
case 'v':
input = 0x000F;
break;
default:
break;
}
handle_input(&cpu, input);
}
void handle_key_release(unsigned char key, int x, int y) {
handle_input(&cpu, 0x00FF);
}
void draw_square(float x_coord, float y_coord) {
glBegin(GL_QUADS);
glColor3f(1.0f, 1.0f, 1.0f);
glVertex2f(x_coord, y_coord);
glVertex2f(x_coord + 10, y_coord);
glVertex2f(x_coord + 10, y_coord + 10);
glVertex2f(x_coord, y_coord + 10);
glEnd();
}
void render() {
glClear(GL_COLOR_BUFFER_BIT); //clears screen
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
emulate_cycle(&cpu);
int i, j;
for(i = 0; i < 640; i++) {
for(j = 0; j < 320; j++) {
if(cpu.gfx[i][j] == 1) {
draw_square((float)(i * 10), (float) (j * 10));
}
}
}
glutSwapBuffers(); //smooth animation
}
void idle() {
glutPostRedisplay(); //calls to redraw screen
}
void initGL() {
// sets up GLUT window for 2D drawing
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0, 640, 320, 0.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// clears screen color
glClearColor(0.f, 0.f, 0.f, 1.f);
}
<file_sep>#ifndef _opcodes_h
#define _opcodes_h
#include <stdint.h>
#include <stdlib.h>
//instructions site: http://devernay.free.fr/hacks/chip8/C8TECH10.HTM#00E0
#define X(x) ((x & 0x0F00) >> 8)
#define Y(x) ((x & 0x00F0) >> 4)
#define N(x) (x & 0x000F)
#define KK(x) (x & 0x00FF)
#define NNN(x) (x & 0x0FFF)
void decode_opcode(chip8 * cpu);
void decode_opcode(chip8 * cpu) {
switch(cpu->opcode & 0xF000) {
case 0x0000: //0nnn
switch(cpu->opcode) {
case 0x00E0: //00E0: CLS - clear the display
clear_screen(cpu);
cpu->pc += 2;
break;
case 0x00EE: //00EE: RET - return from a subroutine, sets PC = stack[sp] then sp--
cpu->sp -= 1;
cpu->pc = cpu->stack[cpu->sp];
cpu->pc += 2;
break;
default:
log_err("Unknown opcode: %X", cpu->opcode);
cpu->pc += 2;
break;
}
break;
case 0x1000: //1nnn: JP addr - jump to location nnn
cpu->pc = NNN(cpu->opcode);
break;
case 0x2000: //2nnn: CALL addr - call subroutine at nnn
//add error checking to prevent stck overflow
cpu->stack[cpu->sp] = cpu->pc;
cpu->sp += 1;
cpu->pc = NNN(cpu->opcode);
break;
case 0x3000: //3xkk: SE Vx, byte - skip next instruction if Vx == kk
cpu->pc += cpu->V[X(cpu->opcode)] == KK(cpu->opcode) ? 4 : 2; //Ternary truth test, read X ? Y : Z as "if X then Y else Z".
break;
case 0x4000: //4XKK: SNE Vx, byte - skip next instruction if Vx != kk
cpu->pc += cpu->V[X(cpu->opcode)] != KK(cpu->opcode) ? 4 : 2;
break;
case 0x5000: //5XY0: SE Vx, Vy - skips next instrution if Vx == Vy
cpu->pc += cpu->V[X(cpu->opcode)] == cpu->V[Y(cpu->opcode)] ? 4 : 2;
break;
case 0x6000: //6XKK: LD Vx, byte - set V == KK
cpu->V[X(cpu->opcode)] = KK(cpu->opcode);
cpu->pc += 2;
break;
case 0x7000: //7XKK: ADD Vx, byte - set Vx = Vx + kk
cpu->V[X(cpu->opcode)] += KK(cpu->opcode);
cpu->pc += 2;
break;
case 0x8000:
switch(cpu->opcode & 0x000F) {
case 0x0000: //8XY0: LD Vx, Vy - set Vx = Vy
cpu->V[X(cpu->opcode)] = cpu->V[Y(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0001: //8XY1: OR Vx, Vy - set Vx = Vx Or Vy
cpu->V[X(cpu->opcode)] |= cpu->V[Y(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0002: //8XY2: AND Vx, Vy - set Vx = Vx AND Vy
cpu->V[X(cpu->opcode)] &= cpu->V[Y(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0003: //8XY3: XOR Vx, Vy - set Vx = Vx XOR Vy
cpu->V[X(cpu->opcode)] ^= cpu->V[Y(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0004: //8XY4: ADD Vx, Vy - set Vx = Vx + Vy, set VF = carry
{
int vx = cpu->V[X(cpu->opcode)];
int vy = cpu->V[Y(cpu->opcode)];
int result = vx + vy;
cpu->V[0xF] = result > 255 ? 1 : 0; //VF is doubled as a carry flag register
//only the lower 8 bits are stored ad kept in Vx
cpu->V[X(cpu->opcode)] = result &0xFF;
cpu->pc += 2;
}
break;
case 0x0005: //8XY5: SUB Vx, Vy - set Vx = Vx - Vy, set VF = NOT borrow
{
int vx = cpu->V[X(cpu->opcode)];
int vy = cpu->V[Y(cpu->opcode)];
cpu->V[0xF] = vx < vy ? 0 : 1;
cpu->V[X(cpu->opcode)] = vx -vy;
cpu->pc += 2;
}
break;
case 0x0006: //8XY6: SHR Vx {, Vy} - Vx = Vx SHR 1.
cpu->V[0xF] = cpu->V[X(cpu->opcode)] & 0x01;
cpu->V[X(cpu->opcode)] >>= 1;
cpu->pc += 2;
break;
case 0x0007: //8XY7: SUBN Vx, Vy - set Vx = Vy - Vx, set VF NOT borrow
{
int vx = cpu->V[X(cpu->opcode)];
int vy = cpu->V[Y(cpu->opcode)];
cpu->V[0xF] = vx > vy ? 0 : 1;
cpu->V[X(cpu->opcode)] = vy - vx;
cpu->pc += 2;
}
break;
case 0x000E: //8XYE: SHL Vx {, Vy} - set Vx = Vx SHL 1
cpu->V[0xF] = (cpu->V[X(cpu->opcode)] & 0x80) >> 7;
cpu->V[X(cpu->opcode)] <<= 1;
cpu->pc += 2;
break;
default:
log_err("Unknown opcode: %X", cpu->opcode);
cpu->pc += 2;
}
break;
case 0x9000: //9XY0: SNE Vx,Vy - skip next instruction if Vx != VY
cpu->pc += cpu->V[X(cpu->opcode)] != cpu->V[Y(cpu->opcode)] ? 4 : 2;
break;
case 0xA000: //ANNN: LD I, addr set = NNN
cpu->I = NNN(cpu->opcode);
cpu->pc += 2;
break;
case 0xB000: //BNNN: JP v0, addr - jump to location nnn + v0
cpu->pc = NNN(cpu->opcode) + cpu->V[0x0];
break;
case 0xC000: //CXKK - RNDVx, byte - set Vx = random byte AND KK
cpu->V[X(cpu->opcode)] = (rand() % 256) & KK(cpu->opcode);
cpu->pc += 2;
break;
case 0xD000: //DXYN: DRW Vx, Vy, nibble - display n-byte sprite starting at memory location I at (Vx, Vy), set VF = collision.
{
int height = cpu->opcode &0x000F;
int x_coord = cpu->V[X(cpu->opcode)];
int y_coord = cpu->V[Y(cpu->opcode)];
// because the sprite is represented by hexadecimal numbers
// bitwise operators are necessary to obtain each pixel
int ands[8] = { 128, 64, 32, 16, 8, 4, 2, 1 };
cpu->V[0xF] = 0;
for(int i = 0; i < height; i++) {
for(int j = 0; j < 8; j++) {
if(x_coord + j == 64) {
x_coord = -j;
}
if(y_coord + i == 32) {
y_coord = -i;
}
if(cpu->gfx[x_coord + j][y_coord + i] == 1 && ((cpu->memory[cpu->I + i] & ands[j]) >> (8 - j - 1)) == 1) {
cpu->V[0xF] = 1;
}
cpu->gfx[x_coord + j][y_coord + i] = cpu->gfx[x_coord + j][y_coord + i] ^ ((cpu->memory[cpu->I + i] & ands[j]) >> (8 - j - 1));
}
x_coord = cpu->V[X(cpu->opcode)];
y_coord = cpu->V[Y(cpu->opcode)];
}
cpu->draw_flag = true;
cpu->pc += 2;
}
break;
case 0xE000:
switch(cpu->opcode & 0x00FF) {
case 0x009E: //EX9E: SKP Vx - skip next instruction if key with the value of Vx is pressed.
cpu->pc += cpu->key == cpu->V[X(cpu->opcode)] ? 4 : 2;
break;
case 0x00A1: //EXA1: SKNP Vx - Skip next instruction if key with the value of Vx is not pressed.
cpu->pc += cpu->key != cpu->V[X(cpu->opcode)] ? 4 : 2;
break;
default:
log_err("Unknown opcode %X", cpu->opcode);
exit(1);
}
break;
case 0xF000:
switch(cpu->opcode & 0x00FF) {
case 0x0007: //FX07: LD Vx, DT - Set Vx = delay timer value.
cpu->V[X(cpu->opcode)] = cpu->delay_timer;
cpu->pc += 2;
break;
case 0x000A: //Ld Vx, K - Wait for a key press, store the value of the key in Vx.
{
if(cpu->key == 0) {
return;
} else {
cpu->V[X(cpu->opcode)] = cpu->key;
}
cpu->pc += 2;
}
break;
case 0x0015: //FX15: LD T, Vx - set delay timer = Vx
cpu->delay_timer = cpu->V[X(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0018: //FX18: LD ST, Vx - set sound timer = Vx
cpu->sound_timer = cpu->V[X(cpu->opcode)];
cpu->pc += 2;
break;
case 0x001E: //FX1E: ADD I, Vx - set I = I + Vx
cpu->I += cpu->V[X(cpu->opcode)];
cpu->pc += 2;
break;
case 0x0029: //FX29: LD , Vx - set I = location of sprite for digit Vx.
cpu->I = cpu->V[X(cpu->opcode)] * 5;
cpu->pc += 2;
break;
case 0x0033: //FX33: LD B, Vx - Store BCD representation of Vx in memory locations I, I+1, and I+2.
cpu->memory[cpu->I] = cpu->V[X(cpu->opcode)] / 100; //store the hundreds value of vx in memory location I
cpu->memory[cpu->I + 1] = (cpu->V[X(cpu->opcode)] % 10) % 10; //store the tens value of vx in memory location I + 1
cpu->memory[cpu->I + 2] = cpu->V[X(cpu->opcode)] % 10; //store the units value of vx in memory location I + 2
cpu->pc += 2;
break;
case 0x0055: //FX55: LD [I], Vx - Store registers V0 through Vx in memory starting at location I.
for (int i = 0; i < X(cpu->opcode) + 1; i++) {
cpu->memory[cpu->I + i] = cpu->V[i];
}
cpu->I = cpu->I + X(cpu->opcode) + 1;
cpu->pc += 2;
break;
case 0x0065: //FX6: Ld Vx, [I] - Read registers V0 through Vx from memory starting at location I.
for (int i = 0; i < X(cpu->opcode) + 1; i++) {
cpu->V[i] = cpu->memory[cpu->I + i];
}
cpu->I = cpu->I + X(cpu->opcode) + 1;
cpu->pc += 2;
break;
default:
log_err("Unknown opcode %X", cpu->opcode);
exit(1);
}
break;
default:
log_err("Unknown opcode %X", cpu->opcode);
exit(1);
}
}
#endif | 41889c77c99dcb907d9f5d7622fcb06f5897c8cc | [
"Markdown",
"C",
"Makefile"
] | 6 | C | shnupta/Chip-Emul8 | 71a8f562ebb3dd1040924d53447720f3ee69c5c6 | 06bd9091769b5a4fb247d3a7cec3d284f1061b64 |
refs/heads/main | <file_sep>n = input("Enter a number: ")
factorial = 1
if int(n) >= 1:
for i in range(1, int(n) + 1):
factorial = factorial * i
print("%s! = %s" % (i,factorial ))
<file_sep># assignment
assignment of python
| 7f84979f5417be60ac93e4954c1c36d1a76fab86 | [
"Markdown",
"Python"
] | 2 | Python | Ehsannoor/main.py | 30e5bdcd601b914960a6461c4424ebd957d883f6 | a23cb786290e95ed8c9dd7978216a20bda28a660 |
refs/heads/master | <file_sep>jdbc.driver =com.mysql.jdbc.Driver
jdbc.url = jdbc:mysql://localhost:3306/20190105
jdbc.username = root
jdbc.password = 123<file_sep>package com.gdd.controller;
/**
* @Author: 郭冬
* @Date: 2019/4/12 0012 15:10
* @Description:
*/
public class UserController {
}
<file_sep>package com.gdd.po;
/**
* @Author: 郭冬
* @Date: 2019/1/5 0005 14:37
* @Description: user用户表的pojo对象
*/
public class User {
private int uid;
private String userName;
private String email;
private String passWord;
private String imguser;
private String phone;
private String dis;
public int getUid() {
return uid;
}
public void setUid(int uid) {
this.uid = uid;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassWord() {
return passWord;
}
public void setPassWord(String passWord) {
this.passWord = passWord;
}
public String getImguser() {
return imguser;
}
public void setImguser(String imguser) {
this.imguser = imguser;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getDis() {
return dis;
}
public void setDis(String dis) {
this.dis = dis;
}
}
<file_sep>package com.gdd.service.Impl;
/**
* @Author: 郭冬
* @Date: 2019/4/12 0012 15:13
* @Description:
*/
public class UserServiceImpl {
}
<file_sep>package com.gdd.mapper;
/**
* @Author: 郭冬
* @Date: 2019/4/12 0012 15:17
* @Description:
*/
public interface UserMapper {
}
| 9c208d41a9f936ac9739a1126390781a696240ad | [
"Java",
"INI"
] | 5 | INI | gdd951023/Graduation-Project | 5b294ff3e16f97f11987e49c84d43559d8e68ec4 | 59c14e90d17634848670b9a4a783b89511247f82 |
refs/heads/master | <repo_name>seung-lab/AnnotationFrameworkClient<file_sep>/docs/guide/state.rst
JSON Neuroglancer State Service
===============================
We store the JSON description of a Neuroglancer state in a simple
database at the JSON Service. This is a convenient way to build states
to distribute to people, or pull states to parse work by individuals.
The JSON Client is at ``client.state``
.. code:: python
client.state
Retrieving a state
^^^^^^^^^^^^^^^^^^
JSON states are found simply by their ID, which you get when uploading a
state. You can download a state with ``get_state_json``.
.. code:: python
example_id = 4845531975188480
example_state = client.state.get_state_json(test_id)
example_state['layers'][0]
Uploading a state
^^^^^^^^^^^^^^^^^
You can also upload states with ``upload_state_json``. If you do this,
the state id is returned by the function. Note that there is no easy way
to query what you uploaded later, so be VERY CAREFUL with this state id
if you wish to see it again.
*Note: If you are working with a Neuroglancer Viewer object or similar,
in order to upload, use viewer.state.to_json() to generate this
representation.*
.. code:: python
example_state['layers'][0]['name'] = 'example_name'
new_id = client.state.upload_state_json(example_state)
.. code:: python
test_state = client.state.get_state_json(new_id)
test_state['layers'][0]['name']
Generating a Neuroglancer URL
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Once you have a state ID, you want to turn it into a well-formatted link.
So you don't have to remember all the endpoints, we can do this from the state client.
.. code:: python
ngl_base = 'neuromancer-seung-import.appspot.com'
client.state.build_neuroglancer_url(new_id, ngl_base)
Note that the neuroglancer base can be found in the info service under ``client.info.viewer_site()``.
<file_sep>/docs/index.rst
Welcome to CAVEclient's documentation!
=====================================================
.. toctree::
:maxdepth: 2
:caption: Contents:
guide/intro
guide/framework
guide/authentication
guide/annotation
guide/chunkedgraph
guide/info
guide/schemas
guide/state
guide/materialization
API
---
.. toctree::
:maxdepth: 2
:caption: API:
api/caveclient
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
<file_sep>/caveclient/emannotationschemas.py
from .base import ClientBase, _api_endpoints, handle_response
from .endpoints import schema_common, schema_api_versions, schema_endpoints_common
from .auth import AuthClient
import requests
server_key = "emas_server_address"
def SchemaClient(server_address=None, auth_client=None, api_version="latest"):
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
server_key,
server_address,
schema_endpoints_common,
schema_api_versions,
auth_header,
)
SchemaClient = client_mapping[api_version]
return SchemaClient(
server_address=server_address,
auth_header=auth_header,
api_version=api_version,
endpoints=endpoints,
server_name=server_key,
)
class SchemaClientLegacy(ClientBase):
def __init__(
self, server_address, auth_header, api_version, endpoints, server_name
):
super(SchemaClientLegacy, self).__init__(
server_address, auth_header, api_version, endpoints, server_name
)
def get_schemas(self):
"""Get the available schema types
Returns
-------
list
List of schema types available on the Schema service.
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["schema"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def schema_definition(self, schema_type):
"""Get the definition of a specified schema_type
Parameters
----------
schema_type : str
Name of a schema_type
Returns
-------
json
Schema definition
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["schema_type"] = schema_type
url = self._endpoints["schema_definition"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
client_mapping = {
1: SchemaClientLegacy,
2: SchemaClientLegacy,
"latest": SchemaClientLegacy,
}
<file_sep>/docs/guide/framework.rst
CAVEclient: One client for all services
=============================================
The CAVE Framework consists of a number of different
services, each with a specific set of tasks that it can perform through
REST endpoints.
The CAVEclient is designed to ease programmatic interaction
with all of the various endpoints.
In addition, most programmatic access requires the use of authentication tokens.
In order to collect a given server, datastack name, and user token together into a coherent package that can be used
on multiple endpoints, the CAVEclient builds
appropriately configured clients for each of the specific services.
Each of the individual services has their own specific documentation as well.
Global and Local Services
~~~~~~~~~~~~~~~~~~~~~~~~~
There are two categories of data in CAVE: Global and local.
Local services are associated with a single so-called "datastack", which refers to a precise collection of imagery and segmentation data that function together.
For example, EM imagery and a specific pychunkedgraph segmentation would be one datastack, while the same EM imagery but an initial static segmentation would be another.
Datastacks are refered to by a short name, for instance ``pinky100_public_flat_v185``.
Global services are those that are potentially shared across multiple different specific datastacks.
These include the info service, which can describe the properties of all available datastacks,
the authentication service, and the state service that hosts neuroglancer states.
Global services are associated with a particular URL (by default ``http://globalv1.daf-apis.com``),
but not a single datastack.
Initializing a CAVEclient
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Assuming that the services are on ``http://globalv1.daf-apis.com``
and authentication tokens are either not being used or set up with
default values (see :doc:`authentication`), a simple CAVEclient that can
only access global services can be initialized:
.. code:: python
from caveclient import CAVEclient
client = CAVEclient()
Just to confirm that this works, let’s see if we can get the EM image
source from the InfoService.
If you get a list of names of datastacks, all is good. If you have not yet set up an
authentication token or you get an authentication error, look at :ref:`new-token`
for information about how to set up your auth token.
.. code:: python
client.info.get_datastacks()
If you have a specific datastack you want to use, you can inititialize your CAVEclient with it.
This gives you access to the full range of client functions.
.. code:: python
client = CAVEclient(datastack_name='my_datastack')
Accessing specific clients
~~~~~~~~~~~~~~~~~~~~~~~~~~
Each client can be acccessed as a property of the main client. See the documentation at left for the capabilities of each. Assuming your client is named ``client``, the subclients for each service are:
* Authentication Service : ``client.auth``
* AnnotationEngine : ``client.annotation``
* PyChunkedGraph : ``client.chunkedgraph``
* InfoService : ``client.info``
* EM Annotation Schemas : ``client.schemas``
* JSON Neuroglancer State Service : ``client.state``
<file_sep>/caveclient/base.py
import urllib
import requests
import json
import logging
import webbrowser
class AuthException(Exception):
pass
def _raise_for_status(r):
http_error_msg = ""
if isinstance(r.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538)
try:
reason = r.reason.decode("utf-8")
except UnicodeDecodeError:
reason = r.reason.decode("iso-8859-1")
else:
reason = r.reason
if 400 <= r.status_code < 500:
http_error_msg = "%s Client Error: %s for url: %s content: %s" % (
r.status_code,
reason,
r.url,
r.content,
)
json_data = None
if r.headers.get("content-type") == "application/json":
json_data = r.json()
if r.status_code == 403:
if json_data:
if "error" in json_data.keys():
if json_data["error"] == "missing_tos":
webbrowser.open(json_data["data"]["tos_form_url"])
elif 500 <= r.status_code < 600:
http_error_msg = "%s Server Error: %s for url: %s content:%s" % (
r.status_code,
reason,
r.url,
r.content,
)
if http_error_msg:
raise requests.HTTPError(http_error_msg, response=r)
warning = r.headers.get("Warning")
if warning:
logging.warning(warning)
def handle_response(response, as_json=True):
"""Deal with potential errors in endpoint response and return json for default case"""
_raise_for_status(response)
_check_authorization_redirect(response)
if as_json:
return response.json()
else:
return response
def _check_authorization_redirect(response):
if len(response.history) == 0:
pass
else:
first_url = response.history[0].url
urlp = urllib.parse.urlparse(first_url)
raise AuthException(
f"""You have not setup a token to access
{first_url}
with the current auth configuration.\n
Read the documentation at
https://caveclient.readthedocs.io/en/latest/guide/authentication.html
or follow instructions under
client.auth.get_new_token() for how to set a valid API token.
after initializing a global client with
client=CAVEclient(server_address="{urlp.scheme +"://"+ urlp.netloc}")"""
)
def _api_versions(server_name, server_address, endpoints_common, auth_header):
"""Asks a server what API versions are available, if possible"""
url_mapping = {server_name: server_address}
url_base = endpoints_common.get("get_api_versions", None)
if url_base is not None:
url = url_base.format_map(url_mapping)
response = requests.get(url, headers=auth_header)
_raise_for_status(response)
return response.json()
else:
return None
def _api_endpoints(
api_version,
server_name,
server_address,
endpoints_common,
endpoint_versions,
auth_header,
):
"Gets the latest client API version"
if api_version == "latest":
try:
avail_vs_server = _api_versions(
server_name, server_address, endpoints_common, auth_header
)
avail_vs_server = set(avail_vs_server)
except:
avail_vs_server = None
avail_vs_client = set(endpoint_versions.keys())
if avail_vs_server is None:
api_version = max(avail_vs_client)
else:
api_version = max(avail_vs_client.intersection(avail_vs_server))
endpoints = endpoints_common.copy()
ep_to_add = endpoint_versions.get(api_version, None)
if ep_to_add is None:
raise ValueError("No corresponding API version")
endpoints.update(ep_to_add)
return endpoints, api_version
class ClientBase(object):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
verify=True,
):
self._server_address = server_address
self._default_url_mapping = {server_name: self._server_address}
self.verify = verify
self.session = requests.Session()
self.session.verify = verify
head_val = auth_header.get("Authorization", None)
if head_val is not None:
token = head_val.split(" ")[1]
cookie_obj = requests.cookies.create_cookie(
name="middle_auth_token", value=token
)
self.session.cookies.set_cookie(cookie_obj)
self.session.headers.update(auth_header)
self._api_version = api_version
self._endpoints = endpoints
@property
def default_url_mapping(self):
return self._default_url_mapping
@property
def server_address(self):
return self._server_address
@property
def api_version(self):
return self._api_version
@staticmethod
def raise_for_status(r):
"""Raises :class:`HTTPError`, if one occurred."""
_raise_for_status(r)
class ClientBaseWithDataset(ClientBase):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
dataset_name,
verify=True,
):
super(ClientBaseWithDataset, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_name,
verify=verify,
)
self._dataset_name = dataset_name
@property
def dataset_name(self):
return self._dataset_name
class ClientBaseWithDatastack(ClientBase):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
datastack_name,
verify=True,
):
super(ClientBaseWithDatastack, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_name,
verify=verify,
)
self._datastack_name = datastack_name
@property
def datastack_name(self):
return self._datastack_name
<file_sep>/caveclient/chunkedgraph.py
import numpy as np
import pandas as pd
import requests
import datetime
import time
import json
import pytz
from . import endpoints
from . import infoservice
from .endpoints import (
chunkedgraph_api_versions,
chunkedgraph_endpoints_common,
default_global_server_address,
)
from .base import _api_endpoints, _api_versions, ClientBase, handle_response
from .auth import AuthClient
from typing import Iterable
from urllib.parse import urlencode
import networkx as nx
SERVER_KEY = "cg_server_address"
class CGEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, np.uint64):
return int(obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
return json.JSONEncoder.default(self, obj)
def package_bounds(bounds):
if bounds.shape != (3, 2):
raise ValueError(
"Bounds must be a 3x2 matrix (x,y,z) x (min,max) in chunkedgraph resolution voxel units"
)
bounds_str = []
for b in bounds:
bounds_str.append("-".join(str(b2) for b2 in b))
bounds_str = "_".join(bounds_str)
return bounds_str
def package_timestamp(timestamp):
if timestamp is None:
query_d = None
else:
query_d = {"timestamp": time.mktime(timestamp.timetuple())}
return query_d
def ChunkedGraphClient(
server_address=None,
table_name=None,
auth_client=None,
api_version="latest",
timestamp=None,
verify=True,
):
if server_address is None:
server_address = default_global_server_address
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
SERVER_KEY,
server_address,
chunkedgraph_endpoints_common,
chunkedgraph_api_versions,
auth_header,
)
ChunkedClient = client_mapping[api_version]
return ChunkedClient(
server_address,
auth_header,
api_version,
endpoints,
SERVER_KEY,
timestamp=timestamp,
table_name=table_name,
verify=verify,
)
class ChunkedGraphClientV1(ClientBase):
"""ChunkedGraph Client for the v1 API"""
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_key=SERVER_KEY,
timestamp=None,
table_name=None,
verify=True,
):
super(ChunkedGraphClientV1, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_key,
verify=verify,
)
self._default_url_mapping["table_id"] = table_name
self._default_timestamp = timestamp
self._table_name = table_name
self._segmentation_info = None
@property
def default_url_mapping(self):
return self._default_url_mapping.copy()
@property
def table_name(self):
return self._table_name
def _process_timestamp(self, timestamp):
"""Process timestamp with default logic"""
if timestamp is None:
if self._default_timestamp is not None:
return self._default_timestamp
else:
return datetime.datetime.utcnow()
else:
return timestamp
def get_roots(self, supervoxel_ids, timestamp=None, stop_layer=None):
"""Get the root id for a specified supervoxel
Parameters
----------
supervoxel_ids : np.array(np.uint64)
Supervoxel ids values
timestamp : datetime.datetime, optional
UTC datetime to specify the state of the chunkedgraph at which to query, by default None. If None, uses the current time.
stop_layer : int or None, optional
If True, looks up ids only up to a given stop layer. Default is None.
Returns
-------
np.array(np.uint64)
Root IDs containing each supervoxel.
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["get_roots"].format_map(endpoint_mapping)
query_d = package_timestamp(self._process_timestamp(timestamp))
if stop_layer is not None:
query_d["stop_layer"] = stop_layer
data = np.array(supervoxel_ids, dtype=np.uint64).tobytes()
response = self.session.post(url, data=data, params=query_d)
handle_response(response, as_json=False)
return np.frombuffer(response.content, dtype=np.uint64)
def get_root_id(self, supervoxel_id, timestamp=None, level2=False):
"""Get the root id for a specified supervoxel
Parameters
----------
supervoxel_id : np.uint64
Supervoxel id value
timestamp : datetime.datetime, optional
UTC datetime to specify the state of the chunkedgraph at which to query, by default None. If None, uses the current time.
Returns
-------
np.uint64
Root ID containing the supervoxel.
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["supervoxel_id"] = supervoxel_id
url = self._endpoints["handle_root"].format_map(endpoint_mapping)
query_d = package_timestamp(self._process_timestamp(timestamp))
if level2:
query_d["stop_layer"] = 2
response = self.session.get(url, params=query_d)
return np.int64(handle_response(response, as_json=True)["root_id"])
def get_merge_log(self, root_id):
"""Get the merge log (splits and merges) for an object
Parameters
----------
root_id : np.uint64
Object root id to look up
Returns
-------
list
List of merge events in the history of the object.
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["merge_log"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def get_change_log(self, root_id, filtered=True):
"""Get the change log (splits and merges) for an object
Parameters
----------
root_id : np.uint64
Object root id to look up
Returns
-------
list
List of split and merge events in the object history
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["change_log"].format_map(endpoint_mapping)
params = {"filtered": filtered}
response = self.session.get(url, params=params)
return handle_response(response)
def get_tabular_change_log(self, root_ids, filtered=True):
"""Get a detailed changelog for neurons
Parameters
----------
root_ids : list of np.uint64
Object root ids to look up
Returns
-------
dict of dataframe
"""
root_ids = [int(r) for r in np.unique(root_ids)]
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_ids"] = root_ids
url = self._endpoints["tabular_change_log"].format_map(endpoint_mapping)
params = {"filtered": filtered}
data = json.dumps({"root_ids": root_ids}, cls=CGEncoder)
response = self.session.get(url, data=data, params=params)
res_dict = handle_response(response)
changelog_dict = {}
for k in res_dict.keys():
changelog_dict[int(k)] = pd.DataFrame(json.loads(res_dict[k]))
return changelog_dict
def get_leaves(self, root_id, bounds=None, stop_layer: int = None):
"""Get all supervoxels for a root_id
Parameters
----------
root_id : np.uint64
Root id to query
bounds: np.array or None, optional
If specified, returns supervoxels within a 3x2 numpy array of bounds [[minx,maxx],[miny,maxy],[minz,maxz]]
If None, finds all supervoxels.
stop_layer: int, optional
If specified, returns chunkedgraph nodes at layer =stop_layer
default will be stop_layer=1 (supervoxels)
Returns
-------
list
List of supervoxel ids (or nodeids if stop_layer>1)
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["leaves_from_root"].format_map(endpoint_mapping)
query_d = {}
if bounds is not None:
query_d["bounds"] = package_bounds(bounds)
if stop_layer is not None:
query_d["stop_layer"] = int(stop_layer)
response = self.session.get(url, params=query_d)
return np.int64(handle_response(response)["leaf_ids"])
def do_merge(self, supervoxels, coords, resolution=(4, 4, 40)):
"""Perform a merge on the chunkeded graph
Args:
supervoxels (iterable): a N long list of supervoxels to merge
coords (np.array): a Nx3 array of coordinates of the supervoxels in units of resolution
resolution (tuple, optional): what to multiple the coords by to get nm. Defaults to (4,4,40).
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["do_merge"].format_map(endpoint_mapping)
data = []
for svid, coor in zip(supervoxels, coords):
row = np.concatenate([[svid], np.array(coor) * resolution])
data.append(row)
params = {"priority": False}
response = self.session.post(
url,
data=json.dumps(data, cls=CGEncoder),
params=params,
headers={"Content-Type": "application/json"},
)
handle_response(response)
def get_children(self, node_id):
"""Get the children of a node in the hierarchy
Parameters
----------
node_id : np.uint64
Node id to query
Returns
-------
list
List of np.uint64 ids of child nodes.
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = node_id
url = self._endpoints["handle_children"].format_map(endpoint_mapping)
response = self.session.get(url)
return np.array(handle_response(response)["children_ids"], dtype=np.int64)
def get_contact_sites(self, root_id, bounds, calc_partners=False):
"""Get contacts for a root id
Parameters
----------
root_id : np.uint64
Object root id
bounds: np.array
Bounds within a 3x2 numpy array of bounds [[minx,maxx],[miny,maxy],[minz,maxz]] for which to find contacts. Running this query without bounds is too slow.
calc_partners : bool, optional
If True, get partner root ids. By default, False.
Returns
-------
dict
Dict relating ids to contacts
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["contact_sites"].format_map(endpoint_mapping)
query_d = {}
if bounds is not None:
query_d["bounds"] = package_bounds(bounds)
query_d["partners"] = calc_partners
response = self.session.get(url, json=[root_id], params=query_d)
contact_d = handle_response(response)
return {int(k): v for k, v in contact_d.items()}
def find_path(self, root_id, src_pt, dst_pt, precision_mode=False):
"""find a path between two locations on a root_id using the supervoxel lvl2 graph.
Args:
root_id (np.int64): the root id to search on
src_pt (np.array): len(3) xyz location of the start location in nm
dst_pt ([type]): len(3) xyz location of the end location in nm
precision_mode (bool, optional): Whether to perform the search in precision mode. Defaults to False.
Returns:
centroids_list: centroids
l2_path: l2_path
failed_l2_ids: failed_l2_ids
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["find_path"].format_map(endpoint_mapping)
query_d = {}
query_d["precision_mode"] = precision_mode
nodes = [[root_id] + src_pt.tolist(), [root_id] + dst_pt.tolist()]
response = self.session.post(
url,
data=json.dumps(nodes, cls=CGEncoder),
params=query_d,
headers={"Content-Type": "application/json"},
)
resp_d = handle_response(response)
centroids = np.array(resp_d["centroids_list"])
failed_l2_ids = np.array(resp_d["failed_l2_ids"], dtype=np.uint64)
l2_path = np.array(resp_d["l2_path"])
return centroids, l2_path, failed_l2_ids
def get_subgraph(self, root_id, bounds):
"""Get subgraph of root id within a bounding box
Args:
root_id ([int64]): root (or seg_id/node_id) of chunkedgraph to query
bounds ([np.array]): 3x2 bounding box (x,y,z)x (min,max) in chunkedgraph coordinates
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["get_subgraph"].format_map(endpoint_mapping)
query_d = {}
if bounds is not None:
query_d["bounds"] = package_bounds(bounds)
response = self.session.get(url, params=query_d)
rd = handle_response(response)
return np.int64(rd["nodes"]), np.double(rd["affinities"]), np.int32(rd["areas"])
def level2_chunk_graph(self, root_id):
"""Get graph of level 2 chunks, the smallest agglomeration level above supervoxels.
Parameters
----------
root_id : int
Root id of object
Returns
-------
edge_list : list
Edge array of level 2 ids
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
url = self._endpoints["lvl2_graph"].format_map(endpoint_mapping)
r = handle_response(self.session.get(url))
return r["edge_graph"]
def remesh_level2_chunks(self, chunk_ids):
"""Submit specific level 2 chunks to be remeshed in case of a problem.
Parameters
----------
chunk_ids : list
List of level 2 chunk ids.
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["remesh_level2_chunks"].format_map(endpoint_mapping)
data = {"new_lvl2_ids": [int(x) for x in chunk_ids]}
r = self.session.post(url, json=data)
r.raise_for_status()
def get_operation_details(self, operation_ids: Iterable[int]):
"""get the details of a list of operations
Args:
operation_ids (Iterable[int]): list of operation IDss
Returns:
dict: a dict of dictss of operation info, keys are operationids
values are a dictionary of operation info for the operation
"""
if isinstance(operation_ids, np.ndarray):
operation_ids = operation_ids.tolist()
endpoint_mapping = self.default_url_mapping
url = self._endpoints["operation_details"].format_map(endpoint_mapping)
query_d = {"operation_ids": operation_ids}
query_str = urlencode(query_d)
url = url + "?" + query_str
r = self.session.get(url)
r.raise_for_status()
return r.json()
def get_lineage_graph(
self, root_id, timestamp_past=None, timestamp_future=None, as_nx_graph=False
):
"""Returns the lineage graph for a root id, optionally cut off in the past or the future.
Parameters
----------
root_id : int
Object root id
timestamp_past : datetime.datetime or None, optional
Cutoff for the lineage graph backwards in time. By default, None.
timestamp_future : datetime.datetime or None, optional
Cutoff for the lineage graph going forwards in time. By default, None.
as_nx_graph: bool
if True, a networkx graph is returned
Returns
-------
dict
Dictionary describing the lineage graph and operations for the root id.
"""
endpoint_mapping = self.default_url_mapping
endpoint_mapping["root_id"] = root_id
data = {}
if timestamp_past is not None:
data["timestamp_past"] = time.mktime(timestamp_past.timetuple())
if timestamp_future is not None:
data["timestamp_future"] = time.mktime(timestamp_future.timetuple())
url = self._endpoints["handle_lineage_graph"].format_map(endpoint_mapping)
r = handle_response(self.session.get(url, params=data))
if as_nx_graph:
return nx.node_link_graph(r)
else:
return r
def get_latest_roots(self, root_id, timestamp_future=None):
"""Returns root ids that are the latest successors of a given root id.
Parameters
----------
root_id : int
Object root id
timestamp_future : datetime.datetime or None, optional
Cutoff for the search going forwards in time. By default, None.
Returns
-------
np.ndarray
1d array with all latest successors
"""
lineage_graph = self.get_lineage_graph(
root_id,
timestamp_past=self.get_root_timestamps([root_id])[0],
timestamp_future=timestamp_future,
as_nx_graph=True,
)
out_degree_dict = dict(lineage_graph.out_degree)
nodes = np.array(list(out_degree_dict.keys()))
out_degrees = np.array(list(out_degree_dict.values()))
return nodes[out_degrees == 0]
def get_original_roots(self, root_id, timestamp_past=None):
"""Returns root ids that are the latest successors of a given root id.
Parameters
----------
root_id : int
Object root id
timestamp_past : datetime.datetime or None, optional
Cutoff for the search going backwards in time. By default, None.
Returns
-------
np.ndarray
1d array with all latest successors
"""
lineage_graph = self.get_lineage_graph(
root_id,
timestamp_past=timestamp_past,
timestamp_future=self.get_root_timestamps([root_id])[0],
as_nx_graph=True,
)
in_degree_dict = dict(lineage_graph.in_degree)
nodes = np.array(list(in_degree_dict.keys()))
in_degrees = np.array(list(in_degree_dict.values()))
return nodes[in_degrees == 0]
def is_latest_roots(self, root_ids, timestamp=None):
"""Check whether these root_ids are still a root at this timestamp
Parameters
----------
root_ids ([type]): root ids to check
timestamp (datetime.dateime, optional): timestamp to check whether these IDs are valid root_ids. Defaults to None (assumes now).
Returns:
np.array[np.Boolean]: boolean array of whether these are valid root_ids
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["is_latest_roots"].format_map(endpoint_mapping)
if timestamp is None:
timestamp = self._default_timestamp
if timestamp is not None:
query_d = {"timestamp": time.mktime(timestamp.timetuple())}
else:
query_d = None
data = {"node_ids": root_ids}
r = handle_response(
self.session.post(url, data=json.dumps(data, cls=CGEncoder), params=query_d)
)
return np.array(r["is_latest"], np.bool)
def get_root_timestamps(self, root_ids):
"""Retrieves timestamps when roots where created.
Parameters
----------
root_ids: Iterable,
Iterable of seed root ids.
Returns
-------
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["root_timestamps"].format_map(endpoint_mapping)
data = {"node_ids": root_ids}
r = handle_response(
self.session.post(url, data=json.dumps(data, cls=CGEncoder))
)
return np.array(
[
pytz.UTC.localize(datetime.datetime.fromtimestamp(ts))
for ts in r["timestamp"]
]
)
def get_past_ids(self, root_ids, timestamp_past=None, timestamp_future=None):
"""For a set of root ids, get the list of ids at a past or future time point that could contain parts of the same object.
Parameters
----------
root_ids: Iterable,
Iterable of seed root ids.
timestamp_past : datetime.datetime or None, optional
Time of a point in the past for which to look up root ids. Default is None.
timestamp_future : datetime.datetime or None, optional
Time of a point in the future for which to look up root ids. Not implemented on the server currently. Default is None.
Returns
-------
dict
Dict with keys `future_id_map` and `past_id_map`. Each is a dict whose keys are the supplied root ids and whose values
are the list of related root ids at the past/future time stamp.
"""
endpoint_mapping = self.default_url_mapping
params = {}
if timestamp_past is not None:
params["timestamp_past"] = time.mktime(timestamp_past.timetuple())
if timestamp_future is not None:
params["timestamp_future"] = time.mktime(timestamp_future.timetuple())
data = {"root_ids": np.array(root_ids, dtype=np.uint64)}
url = self._endpoints["past_id_mapping"].format_map(endpoint_mapping)
r = handle_response(
self.session.get(url, data=json.dumps(data, cls=CGEncoder), params=params)
)
# Convert id keys as strings to ints
past_keys = list(r["past_id_map"].keys())
for k in past_keys:
dat = r["past_id_map"].pop(k)
r["past_id_map"][int(k)] = dat
fut_keys = list(r["future_id_map"].keys())
for k in fut_keys:
dat = r["future_id_map"].pop(k)
r["future_id_map"][int(k)] = dat
return r
def get_delta_roots(
self,
timestamp_past: datetime.datetime,
timestamp_future: datetime.datetime = datetime.datetime.utcnow(),
):
"""get the list of roots that have changed between timetamp_past and timestamp_future
Args:
timestamp_past (datetime.datetime): past timepoint to query
timestamp_future (datetime.datetime, optional): future timepoint to query. Defaults to datetime.datetime.utcnow().
Returns:
old_roots (np.ndarray): roots that have expired in that interval
new_roots (np.ndarray): roots that are new in that interval
"""
endpoint_mapping = self.default_url_mapping
params = {
"timestamp_past": time.mktime(timestamp_past.timetuple()),
"timestamp_future": time.mktime(timestamp_future.timetuple()),
}
url = self._endpoints["delta_roots"].format_map(endpoint_mapping)
r = handle_response(self.session.get(url, params=params))
return np.array(r["old_roots"]), np.array(r["new_roots"])
@property
def cloudvolume_path(self):
return self._endpoints["cloudvolume_path"].format_map(self.default_url_mapping)
@property
def segmentation_info(self):
"""Complete segmentation metadata"""
if self._segmentation_info is None:
url = self._endpoints["info"].format_map(self.default_url_mapping)
response = self.session.get(url)
self._segmentation_info = handle_response(response)
return self._segmentation_info
@property
def base_resolution(self):
"""MIP 0 resolution for voxels assumed by the ChunkedGraph
Returns
-------
list
3-long list of x/y/z voxel dimensions in nm
"""
return self.segmentation_info["scales"][0].get("resolution")
client_mapping = {
1: ChunkedGraphClientV1,
"latest": ChunkedGraphClientV1,
}
<file_sep>/docs/guide/schemas.rst
EMAnnotationSchemas
===================
The EMAnnotationSchemas client lets one look up the available schemas
and how they are defined. This is mostly used for programmatic
interactions between services, but can be useful when looking up schema
definitions for new tables.
Get the list of schema
^^^^^^^^^^^^^^^^^^^^^^
One can get the list of all available schema with the ``schema`` method.
Currently, new schema have to be generated on the server side, although
we aim to have a generic set available to use.
.. code:: python
client.schema.schema()
View a specific schema
^^^^^^^^^^^^^^^^^^^^^^
The details of each schema can be viewed with the ``schema_definition``
method, formatted as per JSONSchema.
.. code:: python
example_schema = client.schema.schema_definition('microns_func_coreg')
example_schema
This is mostly useful for programmatic interaction between services at
the moment, but can also be used to inspect the expected form of an
annotation by digging into the format.
.. code:: python
example_schema['definitions']['FunctionalCoregistration']
<file_sep>/docs/guide/intro.rst
Getting Started
===============
AnnotationFramework client is a package for simplifying interactions with HTML services associated with the CAVE (Connectome Annotation Versioning Engine), which includes:
- `pychunkedgraph <https://www.github.com/seung-lab/pychunkedgraph>`_ (For tracking dynamic segmentations)
- `NeuroglancerJsonServer <https://www.github.com/seung-lab/NeuroglancerJsonServer>`_ (For posting/getting neuroglancer json states)
- `AnnotationFrameworkInfoService <https://www.github.com/seung-lab/AnnotationFrameworkInfoService>`_ (For storing datastack metadata information)
- `EmAnnotationSchemas <https://www.github.com/seung-lab/EmAnnotationSchemas>`_ (For storing an extensible set of schemas for annotating EM data)
- `AnnotatationEngine <https://www.github.com/seung-lab/AnnotationEngine>`_ (For storing annotations on EM data)
Installation
~~~~~~~~~~~~
The CAVEclient can be installed with pip:
.. code-block:: bash
$ pip install caveclient
Assumptions
~~~~~~~~~~~
The code is setup to work flexibly with any deployment of these services, but you need to specify the server_address if that address is not
https://globalv1.daf-apis.com/ for each client when initializing it.
Similarly, the clients can query the info service for metadata to simplify the interaction with a datastack, but you have to specify a datastack name.
<file_sep>/docs/guide/authentication.rst
Authentication Service
======================
Authentication tokens are generally needed for programmatic access to
our services. The AuthClient handles storing and
loading your token or tokens and inserting it into requests in other
clients.
We can access the auth client from ``client.auth``. Once you have saved
a token, you probably won’t interact with this client very often,
however it has some convenient features for saving new tokens the first
time. Let’s see if you have a token already. Probably not.
.. code:: python
client = CAVEclient()
auth = client.auth
print(f"My current token is: {auth.token}")
.. _new-token:
Getting a new token
^^^^^^^^^^^^^^^^^^^
To get a new token, you will need to manually aquire it. For convenience,
the function ``get_new_token()`` provides instructions for how to get and
save the token.
By default, the token is saved to
``~/.cloudvolume/secrets/cave-secret.json`` as a string under
the key ``token``. This makes it compatible by default with
`Cloudvolume <https://github.com/seung-lab/cloud-volume>`_ projects, which
can come in handy. The following steps will save a token to the default
location.
.. code:: python
auth.get_new_token()
.. code:: python
new_token = '<KEY>' #This is the text you see after you visit the website.
auth.save_token(token=new_token)
print(f"My token is now: {auth.token}")
Note that requesting a new token will invalidate your previous token on the
same project. If you want to use the same token across different computers,
you will need to share the same token information.
Loading saved tokens
^^^^^^^^^^^^^^^^^^^^
Try opening ``~/.cloudvolume/secrets/cave-secret.json`` to see
what we just created.
If we had wanted to use a different file or a different json key, we
could have specified that in auth.save_token.
Because we used the default values, this token is used automatically
when we intialize a new CAVEclient. If we wanted to use a different
token file, token key, or even directly specify a token we could do so
here.
.. code:: python
client = CAVEclient(datastack_name)
print(f"Now my basic token is: {client.auth.token}")
client_direct = CAVEclient(datastack_name, auth_token='<PASSWORD>')
print(f"A directly specified token is: {client_direct.auth.token}")
If you use a CAVEclient, the AuthClient and its token will be
automatically applied to any other services without further use.<file_sep>/caveclient/jsonservice.py
from .base import ClientBase, _api_versions, _api_endpoints, handle_response
from .auth import AuthClient
from .endpoints import (
jsonservice_common,
jsonservice_api_versions,
default_global_server_address,
)
import requests
import json
import re
server_key = "json_server_address"
def JSONService(
server_address=None,
auth_client=None,
api_version="latest",
ngl_url=None,
):
"""Client factory to interface with the JSON state service.
Parameters
----------
server_address : str, optional
URL to the JSON state server.
If None, set to the default global server address.
By default None.
auth_client : An Auth client, optional
An auth client with a token for the same global server, by default None
api_version : int or 'latest', optional
Which endpoint API version to use or 'latest'. By default, 'latest' tries to ask
the server for which versions are available, if such functionality exists, or if not
it defaults to the latest version for which there is a client. By default 'latest'
ngl_url : str or None, optional
Default neuroglancer deployment URL. Only used for V1 and later.
"""
if server_address is None:
server_address = default_global_server_address
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
server_key,
server_address,
jsonservice_common,
jsonservice_api_versions,
auth_header,
)
JSONClient = client_mapping[api_version]
return JSONClient(
server_address=server_address,
auth_header=auth_header,
api_version=api_version,
endpoints=endpoints,
server_name=server_key,
ngl_url=ngl_url,
)
class JSONServiceV1(ClientBase):
def __init__(
self, server_address, auth_header, api_version, endpoints, server_name, ngl_url
):
super(JSONServiceV1, self).__init__(
server_address, auth_header, api_version, endpoints, server_name
)
self._ngl_url = ngl_url
@property
def state_service_endpoint(self):
"""Endpoint URL for posting JSON state"""
url_mapping = self.default_url_mapping
return self._endpoints["upload_state"].format_map(url_mapping)
@property
def ngl_url(self):
return self._ngl_url
@ngl_url.setter
def ngl_url(self, new_ngl_url):
self._ngl_url = new_ngl_url
def get_state_json(self, state_id):
"""Download a Neuroglancer JSON state
Parameters
----------
state_id : int
ID of a JSON state uploaded to the state service.
Returns
-------
dict
JSON specifying a Neuroglancer state.
"""
url_mapping = self.default_url_mapping
url_mapping["state_id"] = state_id
url = self._endpoints["get_state"].format_map(url_mapping)
response = self.session.get(url)
handle_response(response, as_json=False)
return json.loads(response.content)
def upload_state_json(self, json_state, state_id=None, timestamp=None):
"""Upload a Neuroglancer JSON state
Parameters
----------
json_state : dict
JSON-formatted Neuroglancer state
state_id : int
ID of a JSON state uploaded to the state service.
Using a state_id is an admin feature.
timestamp: time.time
Timestamp for json state date. Requires state_id.
Returns
-------
int
state_id of the uploaded JSON state
"""
url_mapping = self.default_url_mapping
if state_id is None:
url = self._endpoints["upload_state"].format_map(url_mapping)
else:
url_mapping = self.default_url_mapping
url_mapping["state_id"] = state_id
url = self._endpoints["upload_state_w_id"].format_map(url_mapping)
response = self.session.post(url, data=json.dumps(json_state))
handle_response(response, as_json=False)
response_re = re.search(".*\/(\d+)", str(response.content))
return int(response_re.groups()[0])
def build_neuroglancer_url(self, state_id, ngl_url=None):
"""Build a URL for a Neuroglancer deployment that will automatically retrieve specified state.
If the datastack is specified, this is prepopulated from the info file field "viewer_site".
If no ngl_url is specified in either the function or the client, only the JSON state url is returned.
Parameters
----------
state_id : int
State id to retrieve
ngl_url : str
Base url of a neuroglancer deployment. If None, defaults to the value for the datastack or the client.
If no value is found, only the URL to the JSON state is returned.
Returns
-------
str
The full URL requested
"""
if ngl_url is None:
ngl_url = self.ngl_url
if ngl_url is None:
ngl_url = ""
parameter_text = ""
elif ngl_url[-1] == "/":
parameter_text = "?json_url="
else:
parameter_text = "/?json_url="
url_mapping = self.default_url_mapping
url_mapping["state_id"] = state_id
get_state_url = self._endpoints["get_state"].format_map(url_mapping)
url = ngl_url + parameter_text + get_state_url
return url
client_mapping = {
1: JSONServiceV1,
}
<file_sep>/changelog.md
# Changelog
## [Unreleased]
### Added
- **JSONStateService**: Neuroglancer URL can be specified for the client under the property `ngl_url`.
For a FrameworkClient with a datastack name, the value is set using the `viewer_site` field from the info client.
### Changed
- **JSONStateService**: In `build_neuroglancer_url`, if `ngl_url` is None the url will be pulled from the default client value.
If there is the default value is None, only the URL to the JSON file will be returned.
## [2.0.1] - 2020-10-20
### Fixed
- **AuthClient** : Token creation and setting is more robust. Directories are created if not previously present.
## [2.0.0]
### Added
- First release of the unified FrameworkClient and system-wide authentication.<file_sep>/docs/api/caveclient.rst
caveclient package
=================================
Submodules
----------
caveclient.annotationengine module
-------------------------------------------------
.. automodule:: caveclient.annotationengine
:members:
:undoc-members:
:show-inheritance:
caveclient.auth module
-------------------------------------
.. automodule:: caveclient.auth
:members:
:undoc-members:
:show-inheritance:
caveclient.base module
-------------------------------------
.. automodule:: caveclient.base
:members:
:undoc-members:
:show-inheritance:
caveclient.chunkedgraph module
---------------------------------------------
.. automodule:: caveclient.chunkedgraph
:members:
:undoc-members:
:show-inheritance:
caveclient.emannotationschemas module
----------------------------------------------------
.. automodule:: caveclient.emannotationschemas
:members:
:undoc-members:
:show-inheritance:
caveclient.endpoints module
------------------------------------------
.. automodule:: caveclient.endpoints
:members:
:undoc-members:
:show-inheritance:
caveclient.format\_utils module
----------------------------------------------
.. automodule:: caveclient.format_utils
:members:
:undoc-members:
:show-inheritance:
caveclient.frameworkclient module
------------------------------------------------
.. automodule:: caveclient.frameworkclient
:members:
:undoc-members:
:show-inheritance:
caveclient.infoservice module
--------------------------------------------
.. automodule:: caveclient.infoservice
:members:
:undoc-members:
:show-inheritance:
caveclient.jsonservice module
--------------------------------------------
.. automodule:: caveclient.jsonservice
:members:
:undoc-members:
:show-inheritance:
caveclient.materializationengine module
------------------------------------------------------
.. automodule:: caveclient.materializationengine
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: caveclient
:members:
:undoc-members:
:show-inheritance:
<file_sep>/tests/conftest.py
import pytest
import responses
from caveclient import CAVEclient
import os
from caveclient import endpoints
TEST_GLOBAL_SERVER = os.environ.get("TEST_SERVER", "https://test.cave.com")
TEST_LOCAL_SERVER = os.environ.get("TEST_LOCAL_SERVER", "https://local.cave.com")
TEST_DATASTACK = os.environ.get("TEST_DATASTACK", "test_stack")
test_info = {
"viewer_site": "http://neuromancer-seung-import.appspot.com/",
"aligned_volume": {
"name": "test_volume",
"image_source": f"precomputed://https://{TEST_LOCAL_SERVER}/test-em/v1",
"id": 1,
"description": "This is a test only dataset.",
},
"synapse_table": "test_synapse_table",
"description": "This is the first test datastack. ",
"local_server": TEST_LOCAL_SERVER,
"segmentation_source": f"graphene://https://{TEST_LOCAL_SERVER}/segmentation/table/test_v1",
"soma_table": "test_soma",
"analysis_database": None,
}
@pytest.fixture()
@responses.activate
def myclient():
url_template = endpoints.infoservice_endpoints_v2["datastack_info"]
mapping = {"i_server_address": TEST_GLOBAL_SERVER, "datastack_name": TEST_DATASTACK}
url = url_template.format_map(mapping)
responses.add(responses.GET, url, json=test_info, status=200)
client = CAVEclient(TEST_DATASTACK, server_address=TEST_GLOBAL_SERVER)
return client
<file_sep>/synapse_example.py
import pandas as pd
import os
import numpy as np
import time
from caveclient import annotationengine as ae
HOME = os.path.expanduser("~")
def load_synapses(path=HOME + "/Downloads/pinky100_final.df", scaling=(1, 1, 1)):
"""Test scenario using real synapses"""
scaling = np.array(list(scaling))
df = pd.read_csv(path)
locs = np.array(df[["presyn_x", "centroid_x", "postsyn_x"]])
mask = ~np.any(np.isnan(locs), axis=1)
df = df[mask]
df["pre_pt.position"] = list(
(np.array(df[["presyn_x", "presyn_y", "presyn_z"]]) / scaling).astype(np.int)
)
df["ctr_pt.position"] = list(
(np.array(df[["centroid_x", "centroid_y", "centroid_z"]]) / scaling).astype(
np.int
)
)
df["post_pt.position"] = list(
(np.array(df[["postsyn_x", "postsyn_y", "postsyn_z"]]) / scaling).astype(np.int)
)
df = df[["pre_pt.position", "ctr_pt.position", "post_pt.position", "size"]]
return df
def insert_synapses(syn_df, datastack_name="pinky100", annotation_type="synapse"):
ac = ae.AnnotationClient(datastack_name=datastack_name)
ac.bulk_import_df(annotation_type, syn_df)
if __name__ == "__main__":
print("LOADING synapses")
time_start = time.time()
syn_df = load_synapses()
print("Time for loading: %.2fmin" % ((time.time() - time_start) / 60))
time_start = time.time()
insert_synapses(syn_df)
print("Time for inserting: %.2fmin" % ((time.time() - time_start) / 60))
<file_sep>/caveclient/frameworkclient.py
from .annotationengine import AnnotationClient
from .auth import AuthClient, default_token_file
from .chunkedgraph import ChunkedGraphClient
from .emannotationschemas import SchemaClient
from .infoservice import InfoServiceClient
from .jsonservice import JSONService
from .materializationengine import MaterializationClient
from .l2cache import L2CacheClient
from .endpoints import default_global_server_address
class GlobalClientError(Exception):
pass
class CAVEclient(object):
def __new__(
cls,
datastack_name=None,
server_address=None,
auth_token_file=None,
auth_token_key=None,
auth_token=None,
global_only=False,
):
if global_only or datastack_name is None:
return CAVEclientGlobal(
server_address=server_address,
auth_token_file=auth_token_file,
auth_token_key=auth_token_key,
auth_token=auth_token,
)
else:
return CAVEclientFull(
datastack_name=datastack_name,
server_address=server_address,
auth_token_file=auth_token_file,
auth_token_key=auth_token_key,
auth_token=auth_token,
)
class CAVEclientGlobal(object):
"""A manager for all clients sharing common datastack and authentication information.
This client wraps all the other clients and keeps track of the things that need to be consistent across them.
To instantiate a client:
.. code:: python
client = CAVEclient(datastack_name='my_datastack',
server_address='www.myserver.com',
auth_token_file='~/.mysecrets/secrets.json')
Then
* client.info is an InfoService client (see infoservice.InfoServiceClient)
* client.auth handles authentication
* client.state is a neuroglancer state client (see jsonservice.JSONService)
* client.schema is an EM Annotation Schemas client (see emannotationschemas.SchemaClient)
All subclients are loaded lazily and share the same datastack name, server address, and auth tokens (where used).
Parameters
----------
server_address : str or None
URL of the framework server. If None, chooses the default server www.dynamicannotationframework.com.
Optional, defaults to None.
auth_token_file : str or None
Path to a json file containing the auth token. If None, uses the default location. See Auth client documentation.
Optional, defaults to None.
auth_token_key : str
Dictionary key for the token in the the JSON file.
Optional, default is 'token'.
auth_token : str or None
Direct entry of an auth token. If None, uses the file arguments to find the token.
Optional, default is None.
"""
def __init__(
self,
server_address=None,
auth_token_file=None,
auth_token_key=None,
auth_token=None,
):
if server_address is None:
server_address = default_global_server_address
self._server_address = server_address
self._auth_config = {}
self.change_auth(
auth_token_file=auth_token_file,
auth_token_key=auth_token_key,
auth_token=auth_token,
)
def change_auth(self, auth_token_file=None, auth_token_key=None, auth_token=None):
"""Change the authentication token and reset services.
Parameters
----------
auth_token_file : str, optional
New auth token json file path, by default None, which defaults to the existing state.
auth_token_key : str, optional
New dictionary key under which the token is stored in the json file, by default None,
which defaults to the existing state.
auth_token : str, optional
Direct entry of a new token, by default None.
"""
if auth_token_file is None:
auth_token_file = self._auth_config.get("auth_token_file", None)
if auth_token_key is None:
auth_token_key = self._auth_config.get("auth_token_key", None)
self._auth_config = {
"token_file": auth_token_file,
"token_key": auth_token_key,
"token": auth_token,
"server_address": self._server_address,
}
self._reset_services()
def _reset_services(self):
self._auth = None
self._info = None
self._state = None
self._schema = None
@property
def server_address(self):
return self._server_address
@property
def auth(self):
if self._auth is None:
self._auth = AuthClient(**self._auth_config)
return self._auth
@property
def info(self) -> InfoServiceClient:
if self._info is None:
self._info = InfoServiceClient(
server_address=self.server_address,
datastack_name=self.datastack_name,
auth_client=self.auth,
)
return self._info
@property
def state(self):
if self._state is None:
self._state = JSONService(
server_address=self.server_address, auth_client=self.auth
)
return self._state
@property
def schema(self):
if self._schema is None:
self._schema = SchemaClient(
server_address=self.server_address, auth_client=self.auth
)
return self._schema
def _no_local_functionality(self):
raise GlobalClientError(
"Client in global-only mode because no datastack was set."
)
@property
def annotation(self):
self._no_local_functionality()
@property
def chunkedgraph(self):
self._no_local_functionality()
@property
def datastack_name(self):
return None
class CAVEclientFull(CAVEclientGlobal):
"""A manager for all clients sharing common datastack and authentication information.
This client wraps all the other clients and keeps track of the things that need to be consistent across them.
To instantiate a client:
.. code:: python
client = CAVEclient(datastack_name='my_datastack',
server_address='www.myserver.com',
auth_token_file='~/.mysecrets/secrets.json')
Then
* client.info is an InfoService client (see infoservice.InfoServiceClient)
* client.state is a neuroglancer state client (see jsonservice.JSONService)
* client.schema is an EM Annotation Schemas client (see emannotationschemas.SchemaClient)
* client.chunkedgraph is a Chunkedgraph client (see chunkedgraph.ChunkedGraphClient)
* client.annotation is an Annotation DB client (see annotationengine.AnnotationClient)
All subclients are loaded lazily and share the same datastack name, server address, and auth tokens where used.
Parameters
----------
datastack_name : str, optional
Datastack name for the services. Almost all services need this and will not work if it is not passed.
server_address : str or None
URL of the framework server. If None, chooses the default server www.dynamicannotationframework.com.
Optional, defaults to None.
auth_token_file : str or None
Path to a json file containing the auth token. If None, uses the default location. See Auth client documentation.
Optional, defaults to None.
auth_token_key : str
Dictionary key for the token in the the JSON file.
Optional, default is 'token'.
auth_token : str or None
Direct entry of an auth token. If None, uses the file arguments to find the token.
Optional, default is None.
"""
def __init__(
self,
datastack_name=None,
server_address=None,
auth_token_file=default_token_file,
auth_token_key="token",
auth_token=None,
):
super(CAVEclientFull, self).__init__(
server_address=server_address,
auth_token_file=auth_token_file,
auth_token_key=auth_token_key,
auth_token=auth_token,
)
self._datastack_name = datastack_name
self._chunkedgraph = None
self._annotation = None
self._materialize = None
self._l2cache = None
self.local_server = self.info.local_server()
av_info = self.info.get_aligned_volume_info()
self._aligned_volume_name = av_info["name"]
def _reset_services(self):
self._auth = None
self._info = None
self._state = None
self._schema = None
self._chunkedgraph = None
self._annotation = None
self._materialize = None
self._l2cache = None
@property
def datastack_name(self):
return self._datastack_name
@property
def chunkedgraph(self):
if self._chunkedgraph is None:
seg_source = self.info.segmentation_source()
table_name = seg_source.split("/")[-1]
self._chunkedgraph = ChunkedGraphClient(
table_name=table_name,
server_address=self.local_server,
auth_client=self.auth,
)
return self._chunkedgraph
@property
def annotation(self):
if self._annotation is None:
self._annotation = AnnotationClient(
server_address=self.local_server,
aligned_volume_name=self._aligned_volume_name,
auth_client=self.auth,
)
return self._annotation
@property
def materialize(self):
if self._materialize is None:
self._materialize = MaterializationClient(
server_address=self.local_server,
auth_client=self.auth,
datastack_name=self._datastack_name,
cg_client=self.chunkedgraph,
synapse_table=self.info.get_datastack_info().get("synapse_table", None),
)
return self._materialize
@property
def state(self):
if self._state is None:
self._state = JSONService(
server_address=self.server_address,
auth_client=self.auth,
ngl_url=self.info.viewer_site(),
)
return self._state
@property
def l2cache(self):
if self._l2cache is None:
seg_source = self.info.segmentation_source()
table_name = seg_source.split("/")[-1]
self._l2cache = L2CacheClient(
server_address=self.local_server,
auth_client=self.auth,
table_name=table_name,
)
return self._l2cache
<file_sep>/caveclient/l2cache.py
from .base import ClientBase, _api_endpoints, handle_response
from .endpoints import (
l2cache_common,
l2cache_api_versions,
l2cache_endpoints_common,
l2cache_endpoints_v1,
)
from .auth import AuthClient
import requests
import json
import warnings
server_key = "l2cache_server_address"
def L2CacheClient(
server_address=None, table_name=None, auth_client=None, api_version="latest"
):
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
server_key,
server_address,
l2cache_endpoints_common,
l2cache_api_versions,
auth_header,
)
L2CacheClient = client_mapping[api_version]
return L2CacheClient(
server_address=server_address,
auth_header=auth_header,
api_version=api_version,
endpoints=endpoints,
server_name=server_key,
table_name=table_name,
)
class L2CacheClientLegacy(ClientBase):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
table_name=None,
):
super(L2CacheClientLegacy, self).__init__(
server_address, auth_header, api_version, endpoints, server_name
)
warnings.warn("L2Cache is in an experimental stage", UserWarning)
self._default_url_mapping["table_id"] = table_name
@property
def default_url_mapping(self):
return self._default_url_mapping.copy()
def get_l2data(self, l2_ids, attributes=None):
"""Gets the data for L2 ids
Returns
-------
dict
keys are l2 ids, values are data
"""
query_d = {"int64_as_str": False}
if attributes is not None:
query_d["attribute_names"] = ",".join(attributes)
endpoint_mapping = self.default_url_mapping
url = self._endpoints["l2cache_data"].format_map(endpoint_mapping)
response = self.session.post(
url,
data=json.dumps(
{"l2_ids": l2_ids},
),
params=query_d,
)
return handle_response(response)
def cache_metadata(self):
"""Retrieves the meta data for the cache
Parameters
----------
Returns
-------
dict
keys are attribute names, values are datatypes
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["l2cache_meta"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
client_mapping = {
1: L2CacheClientLegacy,
"latest": L2CacheClientLegacy,
}
<file_sep>/docs/guide/annotation.rst
AnnotationEngine
================
The AnnotationClient is used to interact with the AnnotationEngine
service to create tables from existing schema, upload new data, and
download existing annotations. Note that annotations in the
AnnotationEngine are not linked to any particular segmentation, and thus
do not include any root ids. An annotation client is accessed with
``client.annotation``.
Getting existing tables
^^^^^^^^^^^^^^^^^^^^^^^
A list of the existing tables for the datastack can be found at with
``get_tables``.
.. code:: python
all_tables = client.annotation.get_tables()
all_tables[0]
Each table has three main properties that can be useful to know:
* ``table_name`` : The table name, used to refer to it when uploading or downloading annotations. This is also passed through to the table in the Materialized database.
* ``schema_name`` : The name of the table’s schema from EMAnnotationSchemas (see below).
* ``max_annotation_id`` : An upper limit on the number of annotations already contained in the table.
Downloading annotations
^^^^^^^^^^^^^^^^^^^^^^^
You can download the JSON representation of a data point through the
``get_annotation`` method. This can be useful if you need to look up
information on unmaterialized data, or to see what a properly templated
annotation looks like.
.. code:: python
table_name = all_tables[0]['table_name'] # 'ais_analysis_soma'
annotation_id = 100
client.annotation.get_annotation(annotation_id=annotation_id, table_name=table_name)
Create a new table
^^^^^^^^^^^^^^^^^^
One can create a new table with a specified schema with the
``create_table`` method:
.. code:: python
client.annotation.create_table(table_name='test_table',
schema_name='microns_func_coreg')
New data can be generated as a dict or list of dicts following the
schema and uploaded with ``post_annotation``. For example, a
``microns_func_coreg`` point needs to have: \* ``type`` set to
``microns_func_coreg`` \* ``pt`` set to a dict with ``position`` as a
key and the xyz location as a value. \* ``func_id`` set to an integer.
The following could would create a new annotation and then upload it to the service. Note that you get back the annotation id(s) of what you uploaded.
.. code:: python
new_data = {'type': 'microns_func_coreg',
'pt': {'position': [1,2,3]},
'func_id': 0}
client.annotation.post_annotation(table_name='test_table', data=[new_data])
There are methods to simplify annotation uploads if you have a pandas dataframe
whose structure mirrors the struction of the annotation schema you want to upload
.. code:: python
import pandas as pd
df = pd.DataFrame([{'id':0,
'type': 'microns_func_coreg',
'pt_position': [1,2,3]},
'func_id': 0},
{'id':1,
'type': 'microns_func_coreg',
'pt_position': [3,2,1]},
'func_id': 2}])
client.annotation.post_annotation_df('test_table', df)
Note that here I specified the IDs of my annotations, which you can do,
but then its up to you to assure that the IDs don't collide with other IDs.
If you leave them blank then the service will assign the IDs for you.
There is a similar method for updating
:func:`caveclient.annotationengine.AnnotationClientV2.update_annotation_df`
<file_sep>/caveclient/infoservice.py
from .base import (
ClientBaseWithDataset,
ClientBaseWithDatastack,
_api_versions,
_api_endpoints,
handle_response,
)
from .auth import AuthClient
from .endpoints import (
infoservice_common,
infoservice_api_versions,
default_global_server_address,
)
from .format_utils import (
output_map_raw,
output_map_precomputed,
output_map_graphene,
format_raw,
)
import requests
from warnings import warn
SERVER_KEY = "i_server_address"
def InfoServiceClient(
server_address=None,
datastack_name=None,
auth_client=None,
api_version="latest",
verify=True,
):
if server_address is None:
server_address = default_global_server_address
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
SERVER_KEY,
server_address,
infoservice_common,
infoservice_api_versions,
auth_header,
)
InfoClient = client_mapping[api_version]
return InfoClient(
server_address,
auth_header,
api_version,
endpoints,
SERVER_KEY,
datastack_name,
verify=verify,
)
class InfoServiceClientV2(ClientBaseWithDatastack):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
datastack_name,
verify=True,
):
super(InfoServiceClientV2, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_name,
datastack_name,
verify=verify,
)
self.info_cache = dict()
if datastack_name is not None:
ds_info = self.get_datastack_info(datastack_name=datastack_name)
self._aligned_volume_name = ds_info["aligned_volume"]["name"]
self._aligned_volume_id = ds_info["aligned_volume"]["id"]
else:
self._aligned_volume_name = None
self._aligned_volume_id = None
@property
def aligned_volume_name(self):
return self._aligned_volume_name
@property
def aligned_volume_id(self):
return self._aligned_volume_id
def get_datastacks(self):
"""Query which datastacks are available at the info service
Returns
-------
list
List of datastack names
"""
endpoint_mapping = self.default_url_mapping
url = self._endpoints["datastacks"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def get_datastack_info(self, datastack_name=None, use_stored=True):
"""Gets the info record for a datastack
Parameters
----------
datastack_name : str, optional
datastack to look up. If None, uses the one specified by the client. By default None
use_stored : bool, optional
If True and the information has already been queried for that datastack, then uses the cached version. If False, re-queries the infromation. By default True
Returns
-------
dict or None
The complete info record for the datastack
"""
if datastack_name is None:
datastack_name = self.datastack_name
if datastack_name is None:
raise ValueError("No Dataset set")
if (not use_stored) or (datastack_name not in self.info_cache):
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
url = self._endpoints["datastack_info"].format_map(endpoint_mapping)
response = self.session.get(url)
self.raise_for_status(response)
self.info_cache[datastack_name] = handle_response(response)
return self.info_cache.get(datastack_name, None)
def _get_property(
self,
info_property,
datastack_name=None,
use_stored=True,
format_for="raw",
output_map=output_map_raw,
):
if datastack_name is None:
datastack_name = self.datastack_name
if datastack_name is None:
raise ValueError("No Dataset set")
self.get_datastack_info(datastack_name=datastack_name, use_stored=use_stored)
value = self.info_cache[datastack_name].get(info_property, None)
return output_map.get(format_for, format_raw)(value)
def get_aligned_volumes(self):
endpoint_mapping = self.default_url_mapping
url = self._endpoints["aligned_volumes"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def get_aligned_volume_info(self, datastack_name: str = None, use_stored=True):
"""Gets the info record for a aligned_volume
Parameters
----------
datastack_name : str, optional
datastack_name to look up. If None, uses the one specified by the client. By default None
use_stored : bool, optional
If True and the information has already been queried for that dataset, then uses the cached version. If False, re-queries the infromation. By default True
Returns
-------
dict or None
The complete info record for the aligned_volume
"""
return self._get_property(
"aligned_volume", datastack_name=datastack_name, use_stored=use_stored
)
def get_aligned_volume_info_by_id(
self, aligned_volume_id: int = None, use_stored=True
):
if aligned_volume_id is None:
aligned_volume_id = self._aligned_volume_id
if aligned_volume_id is None:
raise ValueError(
"Must specify aligned_volume_id or provide datastack_name in init"
)
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_id"] = aligned_volume_id
url = self._endpoints["aligned_volume_by_id"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def local_server(self, datastack_name=None, use_stored=True):
return self._get_property(
"local_server",
datastack_name=datastack_name,
use_stored=use_stored,
output_map=output_map_raw,
)
def annotation_endpoint(self, datastack_name=None, use_stored=True):
"""AnnotationEngine endpoint for a dataset.
Parameters
----------
datastack_name : str or None, optional
Name of the datastack to look up. If None, uses the value specified by the client. Default is None.
use_stored : bool, optional
If True, uses the cached value if available. If False, re-queries the InfoService. Default is True.
Returns
-------
str
Location of the AnnotationEngine
"""
local_server = self.local_server(
datastack_name=datastack_name, use_stored=use_stored
)
return local_server + "/annotation"
def image_source(self, datastack_name=None, use_stored=True, format_for="raw"):
"""Cloud path to the imagery for the dataset
Parameters
----------
datastack_name : str or None, optional
Name of the datastack to look up. If None, uses the value specified by the client. Default is None.
use_stored : bool, optional
If True, uses the cached value if available. If False, re-queries the InfoService. Default is True.
format_for : 'raw', 'cloudvolume', or 'neuroglancer', optional
Formats the path for different uses.
If 'raw' (default), the path in the InfoService is passed along.
If 'cloudvolume', a "precomputed://gs://" type path is converted to a full https URL.
If 'neuroglancer', a full https URL is converted to a "precomputed://gs://" type path.
Returns
-------
str
Formatted cloud path to the flat segmentation
"""
av_info = self.get_aligned_volume_info(
datastack_name=datastack_name, use_stored=use_stored
)
return av_info["image_source"]
def synapse_segmentation_source(
self, datastack_name=None, use_stored=True, format_for="raw"
):
"""Cloud path to the synapse segmentation for a dataset
Parameters
----------
datastack_name : str or None, optional
Name of the dataset to look up. If None, uses the value specified by the client. Default is None.
use_stored : bool, optional
If True, uses the cached value if available. If False, re-queries the InfoService. Default is True.
format_for : 'raw', 'cloudvolume', or 'neuroglancer', optional
Formats the path for different uses.
If 'raw' (default), the path in the InfoService is passed along.
If 'cloudvolume', a "precomputed://gs://" type path is converted to a full https URL.
If 'neuroglancer', a full https URL is converted to a "precomputed://gs://" type path.
Returns
-------
str
Formatted cloud path to the synapse segmentation
"""
return self._get_property(
"synapse_segmentation_source",
datastack_name=datastack_name,
use_stored=use_stored,
format_for=format_for,
output_map=output_map_precomputed,
)
def segmentation_source(
self, datastack_name=None, format_for="raw", use_stored=True
):
"""Cloud path to the chunkgraph-backed Graphene segmentation for a dataset
Parameters
----------
datastack_name : str or None, optional
Name of the datastack to look up. If None, uses the value specified by the client. Default is None.
use_stored : bool, optional
If True, uses the cached value if available. If False, re-queries the InfoService. Default is True.
format_for : 'raw', 'cloudvolume', or 'neuroglancer', optional
Formats the path for different uses.
If 'raw' (default), the path in the InfoService is passed along.
If 'cloudvolume', a "graphene://https://" type path is used
If 'neuroglancer', a "graphene://https://" type path is used, as needed by Neuroglancer.
Returns
-------
str
Formatted cloud path to the Graphene segmentation
"""
return self._get_property(
"segmentation_source",
datastack_name=datastack_name,
use_stored=use_stored,
output_map=output_map_raw,
)
def refresh_stored_data(self):
"""Reload the stored info values from the server."""
for ds in self.info_cache.keys():
self.get_datastack_info(datastack_name=ds, use_stored=False)
def viewer_site(self, datastack_name=None, use_stored=True):
"""Get the base Neuroglancer URL for the dataset"""
return self._get_property(
"viewer_site",
datastack_name=datastack_name,
use_stored=use_stored,
format_for="raw",
)
client_mapping = {
2: InfoServiceClientV2,
"latest": InfoServiceClientV2,
}
<file_sep>/caveclient/materializationengine.py
import logging
from cachetools import cached, TTLCache
from typing import ValuesView
from numpy.lib.function_base import iterable
from numpy.lib.twodim_base import _trilu_indices_form_dispatcher
import caveclient
from .base import (
ClientBaseWithDataset,
ClientBaseWithDatastack,
ClientBase,
_api_versions,
_api_endpoints,
handle_response,
)
from .auth import AuthClient
from .endpoints import materialization_api_versions, materialization_common
from .infoservice import InfoServiceClientV2
import requests
import time
import json
import numpy as np
from datetime import date, datetime, timezone, tzinfo
import pyarrow as pa
import itertools
from collections.abc import Iterable
from typing import Union
from .timeit import TimeIt
from IPython.display import HTML
import pandas as pd
import pytz
import warnings
SERVER_KEY = "me_server_address"
def concatenate_position_columns(df, inplace=False):
"""function to take a dataframe with xyz position columns and replace them
with one column per position with an xyz numpy array. Edits occur
Args:
df (pd.DataFrame): dataframe to alter
inplace (bool): whether to perform edits in place
Returns:
pd.DataFrame: [description]
"""
if inplace:
df2 = df
else:
df2 = df.copy()
grps = itertools.groupby(df2.columns, key=lambda x: x[:-2])
for base, g in grps:
gl = list(g)
t = "".join([k[-1:] for k in gl])
if t == "xyz":
df2[base] = [np.array(x) for x in df2[gl].values.tolist()]
if inplace:
df2.drop(gl, axis=1, inplace=inplace)
else:
df2 = df2.drop(gl, axis=1, inplace=inplace)
return df2
class MEEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, np.uint64):
return int(obj)
if isinstance(obj, np.int64):
return int(obj)
if isinstance(obj, (datetime, date)):
return obj.isoformat()
return json.JSONEncoder.default(self, obj)
def convert_timestamp(ts: datetime):
if isinstance(ts, datetime):
if ts.tzinfo is None:
return pytz.UTC.localize(ts)
else:
return ts.astimezone(timezone.utc)
dt = datetime.strptime(ts, "%Y-%m-%dT%H:%M:%S.%f")
return dt.replace(tzinfo=timezone.utc)
def MaterializationClient(
server_address,
datastack_name=None,
auth_client=None,
cg_client=None,
synapse_table=None,
api_version="latest",
version=None,
verify=True,
):
"""Factory for returning AnnotationClient
Parameters
----------
server_address : str
server_address to use to connect to (i.e. https://minniev1.microns-daf.com)
datastack_name : str
Name of the datastack.
auth_client : AuthClient or None, optional
Authentication client to use to connect to server. If None, do not use authentication.
api_version : str or int (default: latest)
What version of the api to use, 0: Legacy client (i.e www.dynamicannotationframework.com)
2: new api version, (i.e. minniev1.microns-daf.com)
'latest': default to the most recent (current 2)
cg_client: caveclient.chunkedgraph.ChunkeGraphClient
chunkedgraph client for live materializations
synapse_table: str
default synapse table for queries
version : default version to query
if None will default to latest version
Returns
-------
ClientBaseWithDatastack
List of datastack names for available datastacks on the annotation engine
"""
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
SERVER_KEY,
server_address,
materialization_common,
materialization_api_versions,
auth_header,
)
MatClient = client_mapping[api_version]
return MatClient(
server_address,
auth_header,
api_version,
endpoints,
SERVER_KEY,
datastack_name,
cg_client=cg_client,
synapse_table=synapse_table,
version=version,
verify=verify,
)
class MaterializatonClientV2(ClientBase):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
datastack_name,
cg_client=None,
synapse_table=None,
version=None,
verify=True,
):
super(MaterializatonClientV2, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_name,
verify=verify,
)
self._datastack_name = datastack_name
if version is None:
version = self.most_recent_version()
self._version = version
self.cg_client = cg_client
self.synapse_table = synapse_table
@property
def datastack_name(self):
return self._datastack_name
@property
def version(self):
return self._version
@property
def homepage(self):
url = (
f"{self._server_address}/materialize/views/datastack/{self._datastack_name}"
)
return HTML(f'<a href="{url}" target="_blank">Materialization Engine</a>')
@version.setter
def version(self, x):
if int(x) in self.get_versions():
self._version = int(x)
else:
raise ValueError("Version not in materialized database")
def most_recent_version(self, datastack_name=None):
"""get the most recent version of materialization
for this datastack name
Args:
datastack_name (str, optional): datastack name to find most
recent materialization of.
If None, uses the one specified in the client.
"""
versions = self.get_versions(datastack_name=datastack_name)
return np.max(np.array(versions))
def get_versions(self, datastack_name=None):
"""get versions available
Args:
datastack_name ([type], optional): [description]. Defaults to None.
"""
if datastack_name is None:
datastack_name = self.datastack_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
url = self._endpoints["versions"].format_map(endpoint_mapping)
response = self.session.get(url)
self.raise_for_status(response)
return response.json()
def get_tables(self, datastack_name=None, version=None):
"""Gets a list of table names for a datastack
Parameters
----------
datastack_name : str or None, optional
Name of the datastack, by default None.
If None, uses the one specified in the client.
Will be set correctly if you are using the framework_client
version: int or None, optional
the version to query, else get the tables in the most recent version
Returns
-------
list
List of table names
"""
if datastack_name is None:
datastack_name = self.datastack_name
if version is None:
version = self.version
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
endpoint_mapping["version"] = version
# TODO fix up latest version
url = self._endpoints["tables"].format_map(endpoint_mapping)
response = self.session.get(url)
self.raise_for_status(response)
return response.json()
def get_annotation_count(self, table_name: str, datastack_name=None, version=None):
"""Get number of annotations in a table
Parameters
----------
table_name (str):
name of table to mark for deletion
datastack_name: str or None, optional,
Name of the datastack_name. If None, uses the one specified in the client.
version: int or None, optional
the version to query, else get the tables in the most recent version
Returns
-------
int
number of annotations
"""
if datastack_name is None:
datastack_name = self.datastack_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["table_count"].format_map(endpoint_mapping)
response = self.session.get(url)
self.raise_for_status(response)
return response.json()
def get_version_metadata(self, version: int = None, datastack_name: str = None):
"""get metadata about a version
Args:
version (int, optional): version number to get metadata about. Defaults to client default version.
datastack_name (str, optional): datastack to query. Defaults to client default datastack.
"""
if datastack_name is None:
datastack_name = self.datastack_name
if version is None:
version = self.version
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
endpoint_mapping["version"] = version
url = self._endpoints["version_metadata"].format_map(endpoint_mapping)
response = self.session.get(url)
d = handle_response(response)
d["time_stamp"] = convert_timestamp(d["time_stamp"])
d["expires_on"] = convert_timestamp(d["expires_on"])
return d
def get_timestamp(self, version: int = None, datastack_name: str = None):
"""Get datetime.datetime timestamp for a materialization version.
Parameters
----------
version : int or None, optional
Materialization version, by default None. If None, defaults to the value set in the client.
datastack_name : str or None, optional
Datastack name, by default None. If None, defaults to the value set in the client.
Returns
-------
datetime.datetime
Datetime when the materialization version was frozen.
"""
meta = self.get_version_metadata(version=version, datastack_name=datastack_name)
return convert_timestamp(meta["time_stamp"])
@cached(cache=TTLCache(maxsize=100, ttl=60 * 60 * 12))
def get_versions_metadata(self, datastack_name=None):
"""get the metadata for all the versions that are presently available and valid
Args:
datastack_name (str, optional): datastack to query. If None, defaults to the value set in the client.
Returns:
list[dict]: a list of metadata dictionaries
"""
if datastack_name is None:
datastack_name = self.datastack_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
url = self._endpoints["versions_metadata"].format_map(endpoint_mapping)
response = self.session.get(url)
d = handle_response(response)
for md in d:
md["time_stamp"] = convert_timestamp(md["time_stamp"])
md["expires_on"] = convert_timestamp(md["expires_on"])
return d
def get_table_metadata(
self, table_name: str, datastack_name=None, version: int = None
):
"""Get metadata about a table
Parameters
----------
table_name (str):
name of table to mark for deletion
datastack_name: str or None, optional,
Name of the datastack_name.
If None, uses the one specified in the client.
Returns
-------
json
metadata about table
"""
if datastack_name is None:
datastack_name = self.datastack_name
if version is None:
version = self.version
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
endpoint_mapping["table_name"] = table_name
endpoint_mapping["version"] = version
url = self._endpoints["metadata"].format_map(endpoint_mapping)
response = self.session.get(url)
metadata_d = handle_response(response)
vx = metadata_d.pop("voxel_resolution_x", None)
vy = metadata_d.pop("voxel_resolution_y", None)
vz = metadata_d.pop("voxel_resolution_z", None)
metadata_d["voxel_resolution"] = [vx, vy, vz]
return metadata_d
# def get_annotation(self, table_name, annotation_ids,
# materialization_version=None,
# datastack_name=None):
# """ Retrieve an annotation or annotations by id(s) and table name.
# Parameters
# ----------
# table_name : str
# Name of the table
# annotation_ids : int or iterable
# ID or IDS of the annotation to retreive
# materialization_version: int or None
# materialization version to use
# If None, uses the one specified in the client
# datastack_name : str or None, optional
# Name of the datastack_name.
# If None, uses the one specified in the client.
# Returns
# -------
# list
# Annotation data
# """
# if materialization_version is None:
# materialization_version = self.version
# if datastack_name is None:
# datastack_name = self.datastack_name
# endpoint_mapping = self.default_url_mapping
# endpoint_mapping["datastack_name"] = datastack_name
# endpoint_mapping["table_name"] = table_name
# endpoint_mapping["version"] = materialization_version
# url = self._endpoints["annotations"].format_map(endpoint_mapping)
# try:
# iter(annotation_ids)
# except TypeError:
# annotation_ids = [annotation_ids]
# params = {
# 'annotation_ids': ",".join([str(a) for a in annotation_ids])
# }
# response = self.session.get(url, params=params)
# self.raise_for_status(response)
# return response.json()
def _format_query_components(
self,
datastack_name,
version,
tables,
select_columns,
suffixes,
filter_in_dict,
filter_out_dict,
filter_equal_dict,
filter_spatial_dict,
return_pyarrow,
split_positions,
offset,
limit,
):
endpoint_mapping = self.default_url_mapping
endpoint_mapping["datastack_name"] = datastack_name
endpoint_mapping["version"] = version
data = {}
query_args = {}
query_args["return_pyarrow"] = return_pyarrow
query_args["split_positions"] = split_positions
if len(tables) == 1:
endpoint_mapping["table_name"] = tables[0]
url = self._endpoints["simple_query"].format_map(endpoint_mapping)
else:
data["tables"] = tables
url = self._endpoints["join_query"].format_map(endpoint_mapping)
if filter_in_dict is not None:
data["filter_in_dict"] = filter_in_dict
if filter_out_dict is not None:
data["filter_notin_dict"] = filter_out_dict
if filter_equal_dict is not None:
data["filter_equal_dict"] = filter_equal_dict
if filter_spatial_dict is not None:
data["filter_spatial_dict"] = filter_spatial_dict
if select_columns is not None:
data["select_columns"] = select_columns
if offset is not None:
data["offset"] = offset
if suffixes is not None:
data["suffixes"] = suffixes
if limit is not None:
assert limit > 0
data["limit"] = limit
if return_pyarrow:
encoding = ""
else:
encoding = "gzip"
return url, data, query_args, encoding
def query_table(
self,
table: str,
filter_in_dict=None,
filter_out_dict=None,
filter_equal_dict=None,
filter_spatial_dict=None,
join_args=None,
select_columns=None,
offset: int = None,
limit: int = None,
datastack_name: str = None,
return_df: bool = True,
split_positions: bool = False,
materialization_version: int = None,
timestamp: datetime = None,
):
"""generic query on materialization tables
Args:
table: 'str'
filter_in_dict (dict , optional):
keys are column names, values are allowed entries.
Defaults to None.
filter_out_dict (dict, optional):
keys are column names, values are not allowed entries.
Defaults to None.
filter_equal_dict (dict, optional):
inner layer: keys are column names, values are specified entry.
Defaults to None.
filter_spatial (dict, optional):
inner layer: keys are column names, values are bounding boxes
as [[min_x, min_y,min_z],[max_x, max_y, max_z]]
Expressed in units of the voxel_resolution of this dataset.
offset (int, optional): offset in query result
limit (int, optional): maximum results to return (server will set upper limit, see get_server_config)
select_columns (list of str, optional): columns to select. Defaults to None.
suffixes: (list[str], optional): suffixes to use on duplicate columns
offset (int, optional): result offset to use. Defaults to None.
will only return top K results.
datastack_name (str, optional): datastack to query.
If None defaults to one specified in client.
return_df (bool, optional): whether to return as a dataframe
default True, if False, data is returned as json (slower)
split_positions (bool, optional): whether to break position columns into x,y,z columns
default False, if False data is returned as one column with [x,y,z] array (slower)
materialization_version (int, optional): version to query.
If None defaults to one specified in client.
timestamp (datetime.datetime, optional): timestamp to query
If passsed will do a live query. Error if also passing a materialization version
Returns:
pd.DataFrame: a pandas dataframe of results of query
"""
if timestamp is not None:
if materialization_version is not None:
raise ValueError("cannot specify timestamp and materialization version")
else:
return self.live_query(
table,
timestamp,
filter_in_dict=filter_in_dict,
filter_out_dict=filter_out_dict,
filter_equal_dict=filter_equal_dict,
filter_spatial_dict=filter_spatial_dict,
join_args=join_args,
select_columns=select_columns,
offset=offset,
limit=limit,
datastack_name=datastack_name,
split_positions=split_positions,
post_filter=True,
)
if materialization_version is None:
materialization_version = self.version
if datastack_name is None:
datastack_name = self.datastack_name
url, data, query_args, encoding = self._format_query_components(
datastack_name,
materialization_version,
[table],
select_columns,
None,
{table: filter_in_dict} if filter_in_dict is not None else None,
{table: filter_out_dict} if filter_out_dict is not None else None,
{table: filter_equal_dict} if filter_equal_dict is not None else None,
{table: filter_spatial_dict} if filter_spatial_dict is not None else None,
return_df,
True,
offset,
limit,
)
response = self.session.post(
url,
data=json.dumps(data, cls=MEEncoder),
headers={"Content-Type": "application/json", "Accept-Encoding": encoding},
params=query_args,
stream=~return_df,
)
self.raise_for_status(response)
if return_df:
with warnings.catch_warnings():
warnings.simplefilter(action="ignore", category=FutureWarning)
warnings.simplefilter(action="ignore", category=DeprecationWarning)
df = pa.deserialize(response.content)
if split_positions:
return df
else:
return concatenate_position_columns(df, inplace=True)
else:
return response.json()
def join_query(
self,
tables,
filter_in_dict=None,
filter_out_dict=None,
filter_equal_dict=None,
filter_spatial_dict=None,
join_args=None,
select_columns=None,
offset: int = None,
limit: int = None,
suffixes: list = None,
datastack_name: str = None,
return_df: bool = True,
split_positions: bool = False,
materialization_version: int = None,
):
"""generic query on materialization tables
Args:
tables: list of lists with length 2 or 'str'
list of two lists: first entries are table names, second
entries are the columns used for the join
filter_in_dict (dict of dicts, optional):
outer layer: keys are table names
inner layer: keys are column names, values are allowed entries.
Defaults to None.
filter_out_dict (dict of dicts, optional):
outer layer: keys are table names
inner layer: keys are column names, values are not allowed entries.
Defaults to None.
filter_equal_dict (dict of dicts, optional):
outer layer: keys are table names
inner layer: keys are column names, values are specified entry.
Defaults to None.
filter_spatial (dict of dicts, optional):
outer layer: keys are table names:
inner layer: keys are column names, values are bounding boxes
as [[min_x, min_y,min_z],[max_x, max_y, max_z]]
Expressed in units of the voxel_resolution of this dataset.
Defaults to None
select_columns (list of str, optional): columns to select. Defaults to None.
offset (int, optional): result offset to use. Defaults to None.
will only return top K results.
limit (int, optional): maximum results to return (server will set upper limit, see get_server_config)
suffixes (list[str], optional): suffixes to use for duplicate columns same order as tables
datastack_name (str, optional): datastack to query.
If None defaults to one specified in client.
return_df (bool, optional): whether to return as a dataframe
default True, if False, data is returned as json (slower)
split_positions (bool, optional): whether to break position columns into x,y,z columns
default False, if False data is returned as one column with [x,y,z] array (slower)
materialization_version (int, optional): version to query.
If None defaults to one specified in client.
Returns:
pd.DataFrame: a pandas dataframe of results of query
"""
if materialization_version is None:
materialization_version = self.version
if datastack_name is None:
datastack_name = self.datastack_name
url, data, query_args, encoding = self._format_query_components(
datastack_name,
materialization_version,
tables,
select_columns,
suffixes,
filter_in_dict,
filter_out_dict,
filter_equal_dict,
filter_spatial_dict,
return_df,
True,
offset,
limit,
)
response = self.session.post(
url,
data=json.dumps(data, cls=MEEncoder),
headers={"Content-Type": "application/json", "Accept-Encoding": encoding},
params=query_args,
stream=~return_df,
)
self.raise_for_status(response)
if return_df:
with warnings.catch_warnings():
warnings.simplefilter(action="ignore", category=FutureWarning)
warnings.simplefilter(action="ignore", category=DeprecationWarning)
df = pa.deserialize(response.content)
if split_positions:
return df
else:
return concatenate_position_columns(df, inplace=True)
def map_filters(self, filters, timestamp, timestamp_past):
"""translate a list of filter dictionaries
from a point in the future, to a point in the past
Args:
filters (list[dict]): filter dictionaries with
timestamp ([type]): [description]
timestamp_past ([type]): [description]
Returns:
[type]: [description]
"""
timestamp = convert_timestamp(timestamp)
timestamp_past = convert_timestamp(timestamp_past)
new_filters = []
root_ids = []
for filter_dict in filters:
if filter_dict is not None:
for col, val in filter_dict.items():
if col.endswith("root_id"):
if not isinstance(val, (Iterable, np.ndarray)):
root_ids.append([val])
else:
root_ids.append(val)
# if they are all None then we can safely return now
if len(root_ids) == 0:
return [None, None, None], {}
root_ids = np.unique(np.concatenate(root_ids))
filter_timed_end = self.cg_client.is_latest_roots(root_ids, timestamp=timestamp)
filter_timed_start = self.cg_client.get_root_timestamps(root_ids) < timestamp
filter_timestamp = np.logical_and(filter_timed_start, filter_timed_end)
if not np.all(filter_timestamp):
roots_too_old = root_ids[~filter_timed_end]
roots_too_recent = root_ids[~filter_timed_start]
if len(roots_too_old) > 0:
too_old_str = f"{roots_too_old} are expired, "
else:
too_old_str = ""
if len(roots_too_recent) > 0:
too_recent_str = f"{roots_too_recent} are too recent, "
else:
too_recent_str = ""
raise ValueError(
f"Timestamp incompatible with IDs: {too_old_str}{too_recent_str}use chunkedgraph client to find valid ID(s)"
)
id_mapping = self.cg_client.get_past_ids(
root_ids, timestamp_past=timestamp_past, timestamp_future=timestamp
)
for filter_dict in filters:
if filter_dict is None:
new_filters.append(filter_dict)
else:
new_dict = {}
for col, root_ids in filter_dict.items():
if col.endswith("root_id"):
if not isinstance(root_ids, (Iterable, np.ndarray)):
new_dict[col] = id_mapping["past_id_map"][root_ids]
else:
new_dict[col] = np.concatenate(
[id_mapping["past_id_map"][v] for v in root_ids]
)
else:
new_dict[col] = root_ids
new_filters.append(new_dict)
return new_filters, id_mapping["future_id_map"]
def _update_rootids(self, df: pd.DataFrame, timestamp: datetime, future_map: dict):
# post process the dataframe to update all the root_ids columns
# with the most up to date get roots
if len(future_map) == 0:
future_map = None
if future_map is not None:
# pyarrow can make dataframes read only. Copying resets that.
df = df.copy()
sv_columns = [c for c in df.columns if c.endswith("supervoxel_id")]
with TimeIt("is_latest_roots"):
all_root_ids = np.empty(0, dtype=np.int64)
# go through the columns and collect all the root_ids to check
# to see if they need updating
for sv_col in sv_columns:
root_id_col = sv_col[: -len("supervoxel_id")] + "root_id"
# use the future map to update rootIDs
if future_map is not None:
df[root_id_col].replace(future_map, inplace=True)
all_root_ids = np.append(all_root_ids, df[root_id_col].values.copy())
uniq_root_ids = np.unique(all_root_ids)
del all_root_ids
uniq_root_ids = uniq_root_ids[uniq_root_ids != 0]
logging.info(f"uniq_root_ids {uniq_root_ids}")
is_latest_root = self.cg_client.is_latest_roots(
uniq_root_ids, timestamp=timestamp
)
latest_root_ids = uniq_root_ids[is_latest_root]
latest_root_ids = np.concatenate([[0], latest_root_ids])
# go through the columns and collect all the supervoxel ids to update
all_svids = np.empty(0, dtype=np.int64)
all_is_latest = []
all_svid_lengths = []
for sv_col in sv_columns:
with TimeIt(f"find svids {sv_col}"):
root_id_col = sv_col[: -len("supervoxel_id")] + "root_id"
svids = df[sv_col].values
root_ids = df[root_id_col]
is_latest_root = np.isin(root_ids, latest_root_ids)
all_is_latest.append(is_latest_root)
n_svids = len(svids[~is_latest_root])
all_svid_lengths.append(n_svids)
logging.info(f"{sv_col} has {n_svids} to update")
all_svids = np.append(all_svids, svids[~is_latest_root])
logging.info(f"num zero svids: {np.sum(all_svids==0)}")
logging.info(f"all_svids dtype {all_svids.dtype}")
logging.info(f"all_svid_lengths {all_svid_lengths}")
with TimeIt("get_roots"):
# find the up to date root_ids for those supervoxels
updated_root_ids = self.cg_client.get_roots(all_svids, timestamp=timestamp)
del all_svids
# loop through the columns again replacing the root ids with their updated
# supervoxelids
k = 0
for is_latest_root, n_svids, sv_col in zip(
all_is_latest, all_svid_lengths, sv_columns
):
with TimeIt(f"replace_roots {sv_col}"):
root_id_col = sv_col[: -len("supervoxel_id")] + "root_id"
root_ids = df[root_id_col].values.copy()
uroot_id = updated_root_ids[k : k + n_svids]
k += n_svids
root_ids[~is_latest_root] = uroot_id
# ran into an isssue with pyarrow producing read only columns
df[root_id_col] = None
df[root_id_col] = root_ids
return df
def live_query(
self,
table: str,
timestamp: datetime,
filter_in_dict=None,
filter_out_dict=None,
filter_equal_dict=None,
filter_spatial_dict=None,
join_args=None,
select_columns=None,
offset: int = None,
limit: int = None,
datastack_name: str = None,
split_positions: bool = False,
post_filter: bool = True,
):
"""generic query on materialization tables
Args:
table: 'str'
timestamp (datetime.datetime): time to materialize (in utc)
pass datetime.datetime.utcnow() for present time
filter_in_dict (dict , optional):
keys are column names, values are allowed entries.
Defaults to None.
filter_out_dict (dict, optional):
keys are column names, values are not allowed entries.
Defaults to None.
filter_equal_dict (dict, optional):
inner layer: keys are column names, values are specified entry.
Defaults to None.
filter_spatial (dict, optional):
inner layer: keys are column names, values are bounding boxes
as [[min_x, min_y,min_z],[max_x, max_y, max_z]]
Expressed in units of the voxel_resolution of this dataset.
Defaults to None
offset (int, optional): offset in query result
limit (int, optional): maximum results to return (server will set upper limit, see get_server_config)
select_columns (list of str, optional): columns to select. Defaults to None.
suffixes: (list[str], optional): suffixes to use on duplicate columns
offset (int, optional): result offset to use. Defaults to None.
will only return top K results.
datastack_name (str, optional): datastack to query.
If None defaults to one specified in client.
split_positions (bool, optional): whether to break position columns into x,y,z columns
default False, if False data is returned as one column with [x,y,z] array (slower)
post_filter (bool, optional): whether to filter down the result based upon the filters specified
if false, it will return the query with present root_ids in the root_id columns,
but the rows will reflect the filters translated into their past IDs.
So if, for example, a cell had a false merger split off since the last materialization.
those annotations on that incorrect portion of the cell will be included if this is False,
but will be filtered down if this is True. (Default=True)
Returns:
pd.DataFrame: a pandas dataframe of results of query
"""
timestamp = convert_timestamp(timestamp)
return_df = True
if self.cg_client is None:
raise ValueError("You must have a cg_client to run live_query")
if datastack_name is None:
datastack_name = self.datastack_name
with TimeIt("find_mat_version"):
# we want to find the most recent materialization
# in which the timestamp given is in the future
mds = self.get_versions_metadata()
materialization_version = None
# make sure the materialization's are increasing in ID/time
for md in sorted(mds, key=lambda x: x["id"]):
ts = md["time_stamp"]
if timestamp > ts:
materialization_version = md["version"]
timestamp_start = ts
# if none of the available versions are before
# this timestamp, then we cannot support the query
if materialization_version is None:
raise (
ValueError(
"""The timestamp you passed is not recent enough
for the materialization versions that are available"""
)
)
# first we want to translate all these filters into the IDss at the
# most recent materialization
with TimeIt("map_filters"):
past_filters, future_map = self.map_filters(
[filter_in_dict, filter_out_dict, filter_equal_dict],
timestamp,
timestamp_start,
)
past_filter_in_dict, past_filter_out_dict, past_equal_dict = past_filters
if past_equal_dict is not None:
# when doing a filter equal in the past
# we translate it to a filter_in, as 1 ID might
# be multiple IDs in the past.
# so we want to update the filter_in dict
cols = [col for col in past_equal_dict.keys()]
for col in cols:
if col.endswith("root_id"):
if past_filter_in_dict is None:
past_filter_in_dict = {}
past_filter_in_dict[col] = past_equal_dict.pop(col)
if len(past_equal_dict) == 0:
past_equal_dict = None
with TimeIt("package query"):
url, data, query_args, encoding = self._format_query_components(
datastack_name,
materialization_version,
[table],
None,
None,
{table: past_filter_in_dict}
if past_filter_in_dict is not None
else None,
{table: past_filter_out_dict}
if past_filter_out_dict is not None
else None,
{table: past_equal_dict} if past_equal_dict is not None else None,
{table: filter_spatial_dict}
if filter_spatial_dict is not None
else None,
True,
True,
offset,
limit,
)
logging.debug(f"query_args: {query_args}")
logging.debug(f"query data: {data}")
with TimeIt("query materialize"):
response = self.session.post(
url,
data=json.dumps(data, cls=MEEncoder),
headers={
"Content-Type": "application/json",
"Accept-Encoding": encoding,
},
params=query_args,
stream=~return_df,
verify=self.verify,
)
self.raise_for_status(response)
with TimeIt("deserialize"):
with warnings.catch_warnings():
warnings.simplefilter(action="ignore", category=FutureWarning)
warnings.simplefilter(action="ignore", category=DeprecationWarning)
df = pa.deserialize(response.content)
if not split_positions:
concatenate_position_columns(df, inplace=True)
# post process the dataframe to update all the root_ids columns
# with the most up to date get roots
df = self._update_rootids(df, timestamp, future_map)
# apply the original filters to remove rows
# from this result which are not relevant
if post_filter:
with TimeIt("post_filter"):
if filter_in_dict is not None:
for col, val in filter_in_dict.items():
df = df[df[col].isin(val)]
if filter_out_dict is not None:
for col, val in filter_out_dict.items():
df = df[~df[col].isin(val)]
if filter_equal_dict is not None:
for col, val in filter_equal_dict.items():
df = df[df[col] == val]
return df
def synapse_query(
self,
pre_ids: Union[int, Iterable, np.ndarray] = None,
post_ids: Union[int, Iterable, np.ndarray] = None,
bounding_box=None,
bounding_box_column: str = "post_pt_position",
timestamp: datetime = None,
remove_autapses: bool = True,
include_zeros: bool = True,
limit: int = None,
offset: int = None,
split_positions: bool = False,
synapse_table: str = None,
datastack_name: str = None,
materialization_version: int = None,
):
"""query synapses
Args:
pre_ids (Union[int, Iterable, optional): pre_synaptic cell(s) to query. Defaults to None.
post_ids (Union[int, Iterable, optional): post synaptic cell(s) to query. Defaults to None.
timestamp (datetime.datetime, optional): timestamp to query (optional).
If passed recalculate query at timestamp, do not pass with materialization_verison
bounding_box: [[min_x, min_y, min_z],[max_x, max_y, max_z]] bounding box to filter
synapse locations. Expressed in units of the voxel_resolution of this dataset (optional)
bounding_box_column (str, optional): which synapse location column to filter by (Default to "post_pt_position")
remove_autapses (bool, optional): post-hoc filter out synapses. Defaults to True.
include_zeros (bool, optional): whether to include synapses to/from id=0 (out of segmentation). Defaults to True.
limit (int, optional): number of synapses to limit, Defaults to None (server side limit applies)
offset (int, optional): number of synapses to offset query, Defaults to None (no offset).
split_positions (bool, optional): whether to return positions as seperate x,y,z columns (faster)
defaults to False
synapse_table (str, optional): synapse table to query. If None, defaults to self.synapse_table.
datastack_name: (str, optional): datastack to query
materialization_version (int, optional): version to query.
defaults to self.materialization_version if not specified
"""
filter_in_dict = {}
filter_equal_dict = {}
filter_out_dict = None
filter_equal_dict = {}
filter_spatial_dict = None
if synapse_table is None:
if self.synapse_table is None:
raise ValueError(
"Must define synapse table in class init or pass to method"
)
synapse_table = self.synapse_table
if not include_zeros:
filter_out_dict = {"pre_pt_root_id": [0], "post_pt_root_id": [0]}
if pre_ids is not None:
if isinstance(pre_ids, (Iterable, np.ndarray)):
filter_in_dict["pre_pt_root_id"] = pre_ids
else:
filter_equal_dict["pre_pt_root_id"] = pre_ids
if post_ids is not None:
if isinstance(post_ids, (Iterable, np.ndarray)):
filter_in_dict["post_pt_root_id"] = post_ids
else:
filter_equal_dict["post_pt_root_id"] = post_ids
if bounding_box is not None:
filter_spatial_dict = {bounding_box_column: bounding_box}
df = self.query_table(
synapse_table,
filter_in_dict=filter_in_dict,
filter_out_dict=filter_out_dict,
filter_equal_dict=filter_equal_dict,
filter_spatial_dict=filter_spatial_dict,
offset=offset,
limit=limit,
split_positions=split_positions,
materialization_version=materialization_version,
timestamp=timestamp,
)
if remove_autapses:
return df.query("pre_pt_root_id!=post_pt_root_id")
else:
return df
client_mapping = {2: MaterializatonClientV2, "latest": MaterializatonClientV2}
<file_sep>/caveclient/auth.py
import json
import logging
import requests
import webbrowser
import os
from .endpoints import auth_endpoints_v1, default_global_server_address
import urllib
default_token_location = "~/.cloudvolume/secrets"
default_token_name = "<PASSWORD>"
deprecated_token_names = ["<PASSWORD>"]
default_token_key = "token"
default_token_file = f"{default_token_location}/{default_token_name}"
deprecated_token_files = [
f"{default_token_location}/{f}" for f in deprecated_token_names
]
class AuthClient(object):
"""Client to find and use auth tokens to access the dynamic annotation framework services.
Parameters
----------
token_file : str, optional
Path to a JSON key:value file holding your auth token.
By default, "~/.cloudvolume/secrets/cave-secret.json"
(will check deprecated token name "chunkedgraph-secret.json" as well)
token_key : str, optional
Key for the token in the token_file.
By default, "token"
token : str or None, optional
Direct entry of the token as a string. If provided, overrides the files.
If None, attempts to use the file paths.
server_address : str, optional,
URL to the auth server. By default, uses a default server address.
"""
def __init__(
self,
token_file=None,
token_key=None,
token=None,
server_address=default_global_server_address,
):
if token_file is None:
server = urllib.parse.urlparse(server_address).netloc
server_file = server + "-cave-secret.json"
server_file_path = os.path.join(default_token_location, server_file)
server_file_path = os.path.expanduser(server_file_path)
if os.path.isfile(server_file_path):
token_file = server_file_path
else:
token_file = default_token_file
self._token_file = os.path.expanduser(token_file)
if token_key is None:
token_key = default_token_key
self._token_key = token_key
if token is None:
token = self._load_token(self._token_file, self._token_key)
if token is None:
# then check the deprecated token
for deprecated_file in deprecated_token_files:
_dep_file = os.path.expanduser(deprecated_file)
token = self._load_token(_dep_file, self._token_key)
if token is not None:
logging.warning(
f"""file location {deprecated_file} is deprecated,
rename to 'cave-secret.json' or 'SERVER_ADDRESS-cave-secret.json"""
)
# then we found a token and we should break
break
self._token = token
self._server_address = server_address
self._default_endpoint_mapping = {"auth_server_address": self._server_address}
@property
def token(self):
"""Secret token used to authenticate yourself to the Connectome Annotation Versioning Engine services."""
return self._token
@token.setter
def token(self, new_token):
self._token = new_token
self._token_key = None
def get_token(
self,
token_key=None,
):
"""Load a token with a given key the specified token file
Parameters
----------
token_key : str or None, optional
key in the token file JSON, by default None. If None, uses 'token'.
"""
self._token_key = token_key
self._token = self._load_token(self._token_file, self._token_key)
def _load_token(self, token_file, token_key):
if token_file is None:
return None
if os.path.exists(token_file):
with open(token_file, "r") as f:
token = json.load(f).get(token_key, None)
else:
token = None
return token
def get_new_token(self, open=False):
"""Currently, returns instructions for getting a new token based on the current settings and saving it to the local environment. New OAuth tokens are currently not able to be retrieved programmatically.
Parameters
----------
open : bool, optional
If True, opens a web browser to the web page where you can generate a new token.
"""
auth_url = auth_endpoints_v1["create_token"].format_map(
self._default_endpoint_mapping
)
txt = f"""New Tokens need to be acquired by hand. Please follow the following steps:
1) Go to: {auth_url} to create a new token.
2) Log in with your Google credentials and copy the token shown afterward.
3a) Save it to your computer with: client.auth.save_token(token="<PASSWORD>")
or
3b) Set it for the current session only with client.auth.token = "<PASSWORD>"
Note: If you need to save or load multiple tokens, please read the documentation for details.
Warning! Creating a new token by finishing step 2 will invalidate the previous token!"""
print(txt)
if open is True:
webbrowser.open(auth_url)
return None
def save_token(
self,
token=None,
token_key=default_token_key,
overwrite=False,
token_file=None,
switch_token=True,
):
"""Conveniently save a token in the correct format.
After getting a new token by following the instructions in `authclient.get_new_token()`, you can save it with a fully default configuration by running:
token = '<PASSWORD>'
authclient.save_token(token=token)
Now on next load, authclient=AuthClient() will make an authclient instance using this token.
If you would like to specify more information about the json file where the token will be stored, see the parameters below.
Parameters
----------
token : str, optional
New token to save, by default None
token_key : str, optional
Key for the token in the token_file json, by default "token"
overwrite : bool, optional
Allow an existing token to be changed, by default False
token_file : str, optional
Path to the token file, by default None. If None, uses the default file location specified above.
switch_token : bool, optional
If True, switch the auth client over into using the new token, by default True
"""
if token is None:
token = self.token
if token_file is not None:
save_token_file = token_file
else:
save_token_file = self._token_file
if save_token_file is None:
raise ValueError("No token file is set")
if os.path.exists(save_token_file):
with open(save_token_file, "r") as f:
secrets = json.load(f)
if overwrite is False and token_key in secrets:
raise ValueError(
f'Key "{token_key}" already exists in token file "{save_token_file}"'
)
else:
secrets = {}
secrets[token_key] = token
secret_dir, _ = os.path.split(save_token_file)
if not os.path.exists(secret_dir):
full_dir = os.path.expanduser(secret_dir)
os.makedirs(full_dir)
with open(save_token_file, "w") as f:
json.dump(secrets, f)
if switch_token:
self._token = token
self._token_key = token_key
self._token_file = save_token_file
@property
def request_header(self):
"""Formatted request header with the specified token"""
if self.token is not None:
auth_header = {"Authorization": f"Bearer {self.token}"}
return auth_header
else:
return {}
<file_sep>/caveclient/timeit.py
import builtins
import logging
from time import time
indent = 0
class TimeIt:
def __init__(self, message="", *args, **kwargs):
self._message = message
self._args = args
self._kwargs = kwargs
self._start = None
def __enter__(self):
logging.debug(f"start {self._message}")
global indent
if self._args:
args_str = " ".join(str(x) for x in self._args)
print(args_str)
if self._kwargs:
kwargs_str = " ".join(f"{k}:{v}" for k, v in self._kwargs)
print(kwargs_str)
self._start = time()
def __exit__(self, *args):
global indent
indent -= 2
logging.debug(f"end {self._message} -- {time()-self._start}")
<file_sep>/caveclient/annotationengine.py
from .base import (
ClientBaseWithDataset,
ClientBaseWithDatastack,
ClientBase,
_api_versions,
_api_endpoints,
handle_response,
)
from .auth import AuthClient
from .endpoints import annotation_common, annotation_api_versions
from .infoservice import InfoServiceClientV2
import requests
import time
import json
import numpy as np
from datetime import date, datetime
import pandas as pd
from typing import Iterable, Mapping
SERVER_KEY = "ae_server_address"
class AEEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, np.uint64):
return int(obj)
if isinstance(obj, (datetime, date)):
return obj.isoformat()
return json.JSONEncoder.default(self, obj)
def AnnotationClient(
server_address,
dataset_name=None,
aligned_volume_name=None,
auth_client=None,
api_version="latest",
verify=True,
):
"""Factory for returning AnnotationClient
Parameters
----------
server_address : str
server_address to use to connect to (i.e. https://minniev1.microns-daf.com)
datastack_name : str
Name of the datastack.
auth_client : AuthClient or None, optional
Authentication client to use to connect to server. If None, do not use authentication.
api_version : str or int (default: latest)
What version of the api to use, 0: Legacy client (i.e www.dynamicannotationframework.com)
2: new api version, (i.e. minniev1.microns-daf.com)
'latest': default to the most recent (current 2)
verify : str (default : True)
whether to verify https
Returns
-------
ClientBaseWithDatastack
List of datastack names for available datastacks on the annotation engine
"""
if auth_client is None:
auth_client = AuthClient()
auth_header = auth_client.request_header
endpoints, api_version = _api_endpoints(
api_version,
SERVER_KEY,
server_address,
annotation_common,
annotation_api_versions,
auth_header,
)
AnnoClient = client_mapping[api_version]
if api_version > 1:
return AnnoClient(
server_address,
auth_header,
api_version,
endpoints,
SERVER_KEY,
aligned_volume_name,
verify=verify,
)
else:
return AnnoClient(
server_address,
auth_header,
api_version,
endpoints,
SERVER_KEY,
dataset_name,
verify=verify,
)
class AnnotationClientV2(ClientBase):
def __init__(
self,
server_address,
auth_header,
api_version,
endpoints,
server_name,
aligned_volume_name,
verify=True,
):
super(AnnotationClientV2, self).__init__(
server_address,
auth_header,
api_version,
endpoints,
server_name,
verify=verify,
)
self._aligned_volume_name = aligned_volume_name
@property
def aligned_volume_name(self):
return self._aligned_volume_name
def get_tables(self, aligned_volume_name=None):
"""Gets a list of table names for a aligned_volume_name
Parameters
----------
aligned_volume_name : str or None, optional
Name of the aligned_volume, by default None.
If None, uses the one specified in the client.
Will be set correctly if you are using the framework_client
Returns
-------
list
List of table names
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
url = self._endpoints["tables"].format_map(endpoint_mapping)
print(url)
response = self.session.get(url)
return handle_response(response)
def get_annotation_count(self, table_name: str, aligned_volume_name=None):
"""Get number of annotations in a table
Parameters
----------
table_name (str):
name of table to mark for deletion
aligned_volume_name: str or None, optional,
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
int
number of annotations
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["table_count"].format_map(endpoint_mapping)
response = self.session.get(url)
return handle_response(response)
def get_table_metadata(self, table_name: str, aligned_volume_name=None):
"""Get metadata about a table
Parameters
----------
table_name (str):
name of table to mark for deletion
aligned_volume_name: str or None, optional,
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
metadata about table
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["table_info"].format_map(endpoint_mapping)
response = self.session.get(url)
metadata_d = handle_response(response)
vx = metadata_d.pop("voxel_resolution_x")
vy = metadata_d.pop("voxel_resolution_y")
vz = metadata_d.pop("voxel_resolution_z")
metadata_d["voxel_resolution"] = [vx, vy, vz]
return metadata_d
def delete_table(self, table_name: str, aligned_volume_name=None):
"""Marks a table for deletion
requires super admin priviledges
Parameters
----------
table_name (str):
name of table to mark for deletion
aligned_volume_name: str or None, optional,
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["table_info"].format_map(endpoint_mapping)
response = self.session.delete(url)
return handle_response(response)
def create_table(
self,
table_name,
schema_name,
description,
voxel_resolution,
reference_table=None,
flat_segmentation_source=None,
user_id=None,
aligned_volume_name=None,
):
"""Creates a new data table based on an existing schema
Parameters
----------
table_name: str
Name of the new table. Cannot be the same as an existing table
schema_name: str
Name of the schema for the new table.
descrption: str
Human readable description for what is in the table.
Should include information about who generated the table
What data it covers, and how it should be interpreted.
And who should you talk to if you want to use it.
An Example:
a manual synapse table to detect chandelier synapses
on 81 PyC cells with complete AISs
[created by Agnes - <EMAIL>, uploaded by Forrest]
voxel_resolution: list[float]
voxel resolution points will be uploaded in, typically nm, i.e [1,1,1] means nanometers
[4,4,40] would be 4nm, 4nm, 40nm voxels
reference_table: str or None
If the schema you are using is a reference schema
Meaning it is an annotation of another annotation.
Then you need to specify what table those annotations are in.
flat_segmentation_source: str or None
the source to a flat segmentation that corresponds to this table
i.e. precomputed:\\gs:\\mybucket\this_tables_annotation
user_id: int
If you are uploading this schema on someone else's behalf
and you want to link this table with their ID, you can specify it here
Otherwise, the table will be created with your userID in the user_id column.
aligned_volume_name: str or None, optional,
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
url = self._endpoints["tables"].format_map(endpoint_mapping)
metadata = {
"description": description,
"voxel_resolution_x": float(voxel_resolution[0]),
"voxel_resolution_y": float(voxel_resolution[1]),
"voxel_resolution_z": float(voxel_resolution[2]),
}
if user_id is not None:
metadata["user_id"] = user_id
if reference_table is not None:
metadata["reference_table"] = reference_table
if flat_segmentation_source is not None:
metadata["flat_segmentation_source"] = flat_segmentation_source
data = {
"schema_type": schema_name,
"table_name": table_name,
"metadata": metadata,
}
response = self.session.post(url, json=data)
return handle_response(response, as_json=False)
def get_annotation(self, table_name, annotation_ids, aligned_volume_name=None):
"""Retrieve an annotation or annotations by id(s) and table name.
Parameters
----------
table_name : str
Name of the table
annotation_ids : int or iterable
ID or IDS of the annotation to retreive
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
list
Annotation data
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["annotations"].format_map(endpoint_mapping)
try:
iter(annotation_ids)
except TypeError:
annotation_ids = [annotation_ids]
params = {"annotation_ids": ",".join([str(a) for a in annotation_ids])}
response = self.session.get(url, params=params)
return handle_response(response)
def post_annotation(self, table_name, data, aligned_volume_name=None):
"""Post one or more new annotations to a table in the AnnotationEngine
Parameters
----------
table_name : str
Name of the table where annotations will be added
data : dict or list,
A list of (or a single) dict of schematized annotation data matching the target table.
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
if isinstance(data, dict):
data = [data]
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["annotations"].format_map(endpoint_mapping)
try:
iter(data)
except TypeError:
data = [data]
data = {"annotations": data}
response = self.session.post(
url,
data=json.dumps(data, cls=AEEncoder),
headers={"Content-Type": "application/json"},
)
return handle_response(response)
@staticmethod
def process_position_columns(
df: pd.DataFrame, position_columns: (Iterable[str] or Mapping[str, str] or None)
):
"""process a dataframe into a list of dictionaries, nesting thing
Args:
df (pd.DataFrame): dataframe to process
position_columns (Iterable[str] or Mapping[str, str] or None): see post_annotation_df
Returns:
json list of annotations ready for posting
"""
if position_columns is None:
position_columns = [c for c in df.columns if c.endswith("_position")]
if isinstance(position_columns, (list, np.ndarray, pd.Index)):
position_columns = {c: c.rsplit("_", 1)[0] for c in position_columns}
if type(position_columns) != dict:
raise ValueError("position_columns must be a list, dict or None")
data = df.to_dict(orient="records")
for d in data:
for k, v in position_columns.items():
pos = d.pop(k)
d[v] = {"position": pos}
return data
def post_annotation_df(
self,
table_name: str,
df: pd.DataFrame,
position_columns: (Iterable[str] or Mapping[str, str] or None),
aligned_volume_name=None,
):
"""Post one or more new annotations to a table in the AnnotationEngine
Parameters
----------
table_name : str
Name of the table where annotations will be added
df : pd.DataFrame
A pandas dataframe containing the annotations. Columns should be fields in schema,
position columns need to be called out in position_columns argument.
position_columns: dict or (list or np.array or pd.Index) or None
if None, will look for all columns with 'X_position' in the name and assume they go
in fields called "X".
if Iterable assumes each column given ends in _position.
(i.e. ['pt_position'] if 'pt' is the name of the position field in schema)
if Mapping, keys are names of columns in dataframe, values are the names of the fields
(i.e. {'pt_column': 'pt'} would be correct if you had one column named 'pt_column'
which needed to go into a schema with a position column called 'pt')
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
data = self.process_position_columns(df, position_columns)
return self.post_annotation(
table_name, data, aligned_volume_name=aligned_volume_name
)
def update_annotation(self, table_name, data, aligned_volume_name=None):
"""Update one or more new annotations to a table in the AnnotationEngine
Note update is implemented by deleting the old annotation
and inserting a new annotation, which will receive a new ID.
Parameters
----------
table_name : str
Name of the table where annotations will be added
data : dict or list,
A list of (or a single) dict of schematized annotation data matching the target table.
each dict must contain an "id" field which is the ID of the annotation to update
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON: a list of new annotation IDs.
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
if isinstance(data, dict):
data = [data]
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["annotations"].format_map(endpoint_mapping)
try:
iter(data)
except TypeError:
annotation_ids = [data]
data = {"annotations": data}
response = self.session.put(url, json=data)
return handle_response(response)
def update_annotation_df(
self,
table_name: str,
df: pd.DataFrame,
position_columns: (Iterable[str] or Mapping[str, str] or None),
aligned_volume_name=None,
):
"""Update one or more annotations to a table in the AnnotationEngine using a dataframe as format
Parameters
----------
table_name : str
Name of the table where annotations will be added
df : pd.DataFrame
A pandas dataframe containing the annotations. Columns should be fields in schema,
position columns need to be called out in position_columns argument.
position_columns: dict or (list or np.array or pd.Index) or None
if None, will look for all columns with 'X_position' in the name and assume they go
in fields called "X".
if Iterable assumes each column given ends in _position.
(i.e. ['pt_position'] if 'pt' is the name of the position field in schema)
if Mapping, keys are names of columns in dataframe, values are the names of the fields
(i.e. {'pt_column': 'pt'} would be correct if you had one column named 'pt_column'
which needed to go into a schema with a position column called 'pt')
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
data = self.process_position_columns(df, position_columns)
return self.update_annotation(
table_name, data, aligned_volume_name=aligned_volume_name
)
def delete_annotation(self, table_name, annotation_ids, aligned_volume_name=None):
"""Update one or more new annotations to a table in the AnnotationEngine
Note update is implemented by deleting the old annotation
and inserting a new annotation, which will receive a new ID.
Parameters
----------
table_name : str
Name of the table where annotations will be added
data : dict or list,
A list of (or a single) dict of schematized annotation data matching the target table.
each dict must contain an "id" field which is the ID of the annotation to update
aligned_volume_name : str or None, optional
Name of the aligned_volume. If None, uses the one specified in the client.
Returns
-------
json
Response JSON: a list of new annotation IDs.
"""
if aligned_volume_name is None:
aligned_volume_name = self.aligned_volume_name
endpoint_mapping = self.default_url_mapping
endpoint_mapping["aligned_volume_name"] = aligned_volume_name
endpoint_mapping["table_name"] = table_name
url = self._endpoints["annotations"].format_map(endpoint_mapping)
try:
iter(annotation_ids)
except TypeError:
annotation_ids = [annotation_ids]
data = {"annotation_ids": annotation_ids}
response = self.session.delete(
url,
data=json.dumps(data, cls=AEEncoder),
headers={"Content-Type": "application/json"},
)
return handle_response(response)
client_mapping = {
2: AnnotationClientV2,
"latest": AnnotationClientV2,
}
<file_sep>/caveclient/endpoints.py
default_global_server_address = "https://global.daf-apis.com"
# -------------------------------
# ------ AnnotationEngine endpoints
# -------------------------------
annotation_common = {}
anno_legacy = "{ae_server_address}/annotation"
annotation_endpoints_legacy = {
"datasets": anno_legacy + "/datasets",
"table_names": anno_legacy + "/dataset/{dataset_name}",
"existing_annotation": anno_legacy
+ "/dataset/{dataset_name}/{table_name}/{annotation_id}",
"new_annotation": anno_legacy + "/dataset/{dataset_name}/{table_name}",
}
anno_v2 = "{ae_server_address}/annotation/api/v2"
annotation_endpoints_v2 = {
"tables": anno_v2 + "/aligned_volume/{aligned_volume_name}/table",
"table_info": anno_v2 + "/aligned_volume/{aligned_volume_name}/table/{table_name}",
"annotations": anno_v2
+ "/aligned_volume/{aligned_volume_name}/table/{table_name}/annotations",
"table_count": anno_v2
+ "/aligned_volume/{aligned_volume_name}/table/{table_name}/count",
}
materialization_common = {}
mat_v2_api = "{me_server_address}/materialize/api/v2"
materialization_endpoints_v2 = {
"simple_query": mat_v2_api
+ "/datastack/{datastack_name}/version/{version}/table/{table_name}/query",
"join_query": mat_v2_api + "/datastack/{datastack_name}/version/{version}/query",
"annotations": mat_v2_api
+ "/datastack/{datastack_name}/version/{version}/table/{table_name}",
"table_count": mat_v2_api
+ "/datastack/{datastack_name}/version/{version}/table/{table_name}/count",
"versions": mat_v2_api + "/datastack/{datastack_name}/versions",
"version_metadata": mat_v2_api + "/datastack/{datastack_name}/version/{version}",
"tables": mat_v2_api + "/datastack/{datastack_name}/version/{version}/tables",
"metadata": mat_v2_api
+ "/datastack/{datastack_name}/version/{version}/table/{table_name}/metadata",
"versions_metadata": mat_v2_api + "/datastack/{datastack_name}/metadata",
}
materialization_api_versions = {2: materialization_endpoints_v2}
annotation_api_versions = {0: annotation_endpoints_legacy, 2: annotation_endpoints_v2}
# -------------------------------
# ------ Infoservice endpoints
# -------------------------------
infoservice_common = {}
info_v1 = "{i_server_address}/info/api"
infoservice_endpoints_v1 = {
"datasets": info_v1 + "/datasets",
"dataset_info": info_v1 + "/dataset/{dataset_name}",
}
info_v2 = "{i_server_address}/info/api/v2"
infoservice_endpoints_v2 = {
"aligned_volumes": info_v2 + "/aligned_volume",
"aligned_volume_info": info_v2 + "/aligned_volume/{aligned_volume_name}",
"aligned_volume_by_id": info_v2 + "/aligned_volume/id/{aligned_volume_id}",
"datastacks": info_v2 + "/datastacks",
"datastack_info": info_v2 + "/datastack/full/{datastack_name}",
}
infoservice_api_versions = {1: infoservice_endpoints_v1, 2: infoservice_endpoints_v2}
# -------------------------------
# ------ Pychunkedgraph endpoints
# -------------------------------
pcg_common = "{cg_server_address}/segmentation"
chunkedgraph_endpoints_common = {
"get_api_versions": pcg_common + "/api/versions",
"info": pcg_common + "/table/{table_id}/info",
}
pcg_legacy = "{cg_server_address}/segmentation/1.0"
chunkedgraph_endpoints_legacy = {
# "handle_table": "{cg_server_address}/segmentation/1.0/table",
"handle_root": pcg_legacy + "/{table_id}/graph/root",
"handle_children": pcg_legacy + "/segment/{node_id}/children",
# "info": pcg_legacy + "/{table_id}/info",
"leaves_from_root": pcg_legacy + "/{table_id}/segment/{root_id}/leaves",
"merge_log": pcg_legacy + "/{table_id}/segment/{root_id}/merge_log",
"change_log": pcg_legacy + "/{table_id}/segment/{root_id}/change_log",
"contact_sites": pcg_legacy + "/{table_id}/segment/{root_id}/contact_sites",
"cloudvolume_path": "graphene://" + pcg_legacy + "/{table_id}",
}
pcg_v1 = "{cg_server_address}/segmentation/api/v1"
pcg_meshing_v1 = "{cg_server_address}/meshing/api/v1"
chunkedgraph_endpoints_v1 = {
"handle_root": pcg_v1 + "/table/{table_id}/node/{supervoxel_id}/root",
"handle_roots": pcg_v1 + "/table/{table_id}/roots",
"handle_children": pcg_v1 + "/table/{table_id}/node/{root_id}/children",
"leaves_from_root": pcg_v1 + "/table/{table_id}/node/{root_id}/leaves",
"do_merge": pcg_v1 + "/table/{table_id}/merge",
"get_roots": pcg_v1 + "/table/{table_id}/roots_binary",
"merge_log": pcg_v1 + "/table/{table_id}/root/{root_id}/merge_log",
"change_log": pcg_v1 + "/table/{table_id}/root/{root_id}/change_log",
"tabular_change_log": pcg_v1 + "/table/{table_id}/tabular_change_log_many",
"contact_sites": pcg_v1 + "/table/{table_id}/node/{root_id}/contact_sites",
"contact_sites_pairwise": pcg_v1
+ "/table/{table_id}/contact_sites_pair/{root_id_1}/{root_id_2}",
"cloudvolume_path": "graphene://" + pcg_v1 + "/{table_id}",
"find_path": pcg_v1 + "/table/{table_id}/graph/find_path",
"lvl2_graph": pcg_v1 + "/table/{table_id}/node/{root_id}/lvl2_graph",
"remesh_level2_chunks": pcg_meshing_v1 + "/table/{table_id}/remeshing",
"get_subgraph": pcg_v1 + "/table/{table_id}/node/{root_id}/subgraph",
"handle_lineage_graph": pcg_v1 + "/table/{table_id}/root/{root_id}/lineage_graph",
"past_id_mapping": pcg_v1 + "/table/{table_id}/past_id_mapping",
"operation_details": pcg_v1 + "/table/{table_id}/operation_details",
"is_latest_roots": pcg_v1 + "/table/{table_id}/is_latest_roots",
"root_timestamps": pcg_v1 + "/table/{table_id}/root_timestamps",
"delta_roots": pcg_v1 + "/table/{table_id}/delta_roots",
}
chunkedgraph_api_versions = {
0: chunkedgraph_endpoints_legacy,
1: chunkedgraph_endpoints_v1,
}
# -------------------------------
# ------ EMAnnotationSchemas endpoints
# -------------------------------
schema_common = "{emas_server_address}/schema"
schema_endpoints_common = {
"get_api_versions": schema_common + "/versions",
}
schema_v1 = "{emas_server_address}/schema"
schema_endpoints_v1 = {
"schema": schema_v1 + "/type",
"schema_definition": schema_v1 + "/type/{schema_type}",
}
schema_v2 = "{emas_server_address}/schema/api/v2"
schema_endpoints_v2 = {
"schema": schema_v2 + "/type",
"schema_definition": schema_v2 + "/type/{schema_type}",
}
schema_api_versions = {1: schema_endpoints_v1, 2: schema_endpoints_v2}
# -------------------------------
# ------ StateServer endpoints
# -------------------------------
jsonservice_common = {}
json_v1 = "{json_server_address}/nglstate/api/v1"
jsonservice_endpoints_v1 = {
"upload_state": json_v1 + "/post",
"upload_state_w_id": json_v1 + "/post/{state_id}",
"get_state": json_v1 + "/{state_id}",
"get_state_raw": json_v1 + "/raw/{state_id}",
}
json_legacy = "{json_server_address}/nglstate"
jsonservice_endpoints_legacy = {
"upload_state": json_legacy + "/post",
"get_state": json_legacy + "/{state_id}",
"get_state_raw": json_legacy + "/raw/{state_id}",
}
jsonservice_api_versions = {
0: jsonservice_endpoints_legacy,
1: jsonservice_endpoints_v1,
}
# -------------------------------
# ------ Auth endpoints
# -------------------------------
auth_common = {}
v1_auth = "{auth_server_address}/auth/api/v1"
auth_endpoints_v1 = {
"refresh_token": v1_auth + "/refresh_token",
"create_token": v1_auth + "/create_token",
}
auth_api_versions = {
1: auth_endpoints_v1,
}
# -------------------------------
# ------ L2Cache endpoints
# -------------------------------
l2cache_common = "{l2cache_server_address}/schema"
l2cache_endpoints_common = {
# "get_api_versions": schema_common + "/versions",
}
l2cache_v1 = "{l2cache_server_address}/l2cache/api/v1"
l2cache_endpoints_v1 = {
"l2cache_data": l2cache_v1 + "/table/{table_id}/attributes",
"l2cache_meta": l2cache_v1 + "/attribute_metadata",
}
l2cache_api_versions = {1: l2cache_endpoints_v1}
<file_sep>/caveclient/__init__.py
__version__ = "4.4.1"
from .frameworkclient import CAVEclient
<file_sep>/tests/test_materialization.py
import pytest
import requests
from caveclient import CAVEclient, materializationengine
import os
from caveclient.endpoints import (
materialization_endpoints_v2,
chunkedgraph_endpoints_v1,
chunkedgraph_endpoints_common,
)
import pandas as pd
import responses
import pyarrow as pa
from urllib.parse import urlencode
from .conftest import test_info, TEST_LOCAL_SERVER, TEST_DATASTACK
import datetime
import time
import numpy as np
def test_info_d(myclient):
info = myclient.info.get_datastack_info()
assert info == test_info
def binary_body_match(body):
def match(request_body):
return body == request_body
return match
class TestChunkedgraphException(Exception):
"""Error to raise is bad values make it to chunkedgraph"""
class TestMatclient:
default_mapping = {
"me_server_address": TEST_LOCAL_SERVER,
"cg_server_address": TEST_LOCAL_SERVER,
"table_id": test_info["segmentation_source"].split("/")[-1],
"datastack_name": TEST_DATASTACK,
"table_name": test_info["synapse_table"],
"version": 1,
}
endpoints = materialization_endpoints_v2
@responses.activate
def test_matclient(self, myclient, mocker):
endpoint_mapping = self.default_mapping
api_versions_url = chunkedgraph_endpoints_common["get_api_versions"].format_map(
endpoint_mapping
)
responses.add(responses.GET, url=api_versions_url, json=[0, 1], status=200)
versionurl = self.endpoints["versions"].format_map(endpoint_mapping)
responses.add(responses.GET, url=versionurl, json=[1], status=200)
url = self.endpoints["simple_query"].format_map(endpoint_mapping)
query_d = {"return_pyarrow": True, "split_positions": True}
query_string = urlencode(query_d)
url = url + "?" + query_string
correct_query_data = {
"filter_in_dict": {test_info["synapse_table"]: {"pre_pt_root_id": [500]}},
"filter_notin_dict": {
test_info["synapse_table"]: {"post_pt_root_id": [501]}
},
"filter_equal_dict": {test_info["synapse_table"]: {"size": 100}},
"offset": 0,
"limit": 1000,
}
df = pd.read_pickle("tests/test_data/synapse_query_split.pkl")
context = pa.default_serialization_context()
serialized = context.serialize(df)
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
df = myclient.materialize.query_table(
test_info["synapse_table"],
filter_in_dict={"pre_pt_root_id": [500]},
filter_out_dict={"post_pt_root_id": [501]},
filter_equal_dict={"size": 100},
limit=1000,
offset=0,
)
assert len(df) == 1000
assert type(df) == pd.DataFrame
correct_metadata = [
{
"version": 1,
"expires_on": "2021-04-19T08:10:00.255735",
"id": 84,
"valid": True,
"time_stamp": "2021-04-12T08:10:00.255735",
"datastack": "minnie65_phase3_v1",
}
]
past_timestamp = materializationengine.convert_timestamp(
datetime.datetime.strptime(
correct_metadata[0]["time_stamp"], "%Y-%m-%dT%H:%M:%S.%f"
)
)
md_url = self.endpoints["versions_metadata"].format_map(endpoint_mapping)
responses.add(responses.GET, url=md_url, json=correct_metadata, status=200)
bad_time = materializationengine.convert_timestamp(
datetime.datetime(
year=2020, month=4, day=19, hour=0, tzinfo=datetime.timezone.utc
)
)
good_time = materializationengine.convert_timestamp(
datetime.datetime(
year=2021, month=4, day=19, hour=0, tzinfo=datetime.timezone.utc
)
)
with pytest.raises(ValueError):
df = myclient.materialize.live_query(
test_info["synapse_table"],
bad_time,
filter_in_dict={"pre_pt_root_id": [600]},
filter_out_dict={"post_pt_root_id": [601]},
filter_equal_dict={"size": 100},
limit=1000,
offset=0,
)
### live query test
def my_get_roots(self, supervoxel_ids, timestamp=None, stop_layer=None):
if 0 in supervoxel_ids:
raise TestChunkedgraphException(
("should not call get roots on svid =0")
)
if timestamp == good_time:
sv_lookup = {
1: 200,
2: 200,
3: 201,
4: 201,
5: 203,
6: 203,
7: 203,
8: 103,
9: 103,
10: 103,
11: 200,
12: 103,
13: 203,
14: 201,
15: 201,
}
elif timestamp == past_timestamp:
sv_lookup = {
1: 100,
2: 100,
3: 100,
4: 100,
5: 101,
6: 102,
7: 102,
8: 103,
9: 103,
10: 103,
11: 100,
12: 103,
13: 102,
14: 100,
15: 100,
}
else:
raise ValueError("Mock is not defined at this time")
return np.array([sv_lookup[sv] for sv in supervoxel_ids])
def mocked_get_past_ids(
self, root_ids, timestamp_past=None, timestamp_future=None
):
if 0 in root_ids:
raise TestChunkedgraphException(("should not past_ids on svid =0"))
id_map = {201: [100], 103: [103], 203: [101, 102]}
return {
"future_id_map": {},
"past_id_map": {k: id_map[k] for k in root_ids},
}
def mock_is_latest_roots(self, root_ids, timestamp=None):
if 0 in root_ids:
raise TestChunkedgraphException(
("should not call is_latest on svid =0")
)
if timestamp == good_time:
is_latest = {
100: False,
101: False,
102: False,
103: True,
200: True,
201: True,
202: True,
203: True,
303: True,
}
elif timestamp == past_timestamp:
is_latest = {
100: True,
101: True,
102: True,
103: True,
200: False,
201: False,
202: False,
203: False,
303: True,
}
else:
raise ValueError("Mock is not defined at this time")
return np.array([is_latest[root_id] for root_id in root_ids])
def mock_get_root_timestamps(self, root_ids):
timestamp_dict = {
100: bad_time - datetime.timedelta(days=1),
101: bad_time - datetime.timedelta(days=1),
102: bad_time - datetime.timedelta(days=1),
103: bad_time - datetime.timedelta(days=1),
200: good_time - datetime.timedelta(days=1),
201: good_time - datetime.timedelta(days=1),
202: good_time - datetime.timedelta(days=1),
203: good_time - datetime.timedelta(days=1),
303: good_time + datetime.timedelta(days=1),
}
return np.array([timestamp_dict[root_id] for root_id in root_ids])
mocker.patch(
"caveclient.chunkedgraph.ChunkedGraphClientV1.get_roots",
my_get_roots,
)
mocker.patch(
"caveclient.chunkedgraph.ChunkedGraphClientV1.get_past_ids",
mocked_get_past_ids,
)
mocker.patch(
"caveclient.chunkedgraph.ChunkedGraphClientV1.is_latest_roots",
mock_is_latest_roots,
)
mocker.patch(
"caveclient.chunkedgraph.ChunkedGraphClientV1.get_root_timestamps",
mock_get_root_timestamps,
)
df = pd.read_pickle("tests/test_data/live_query_before.pkl")
context = pa.default_serialization_context()
serialized = context.serialize(df)
correct_query_data = {
"filter_in_dict": {
test_info["synapse_table"]: {"pre_pt_root_id": [100, 103]}
}
}
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
correct_query_data = {
"filter_in_dict": {
test_info["synapse_table"]: {"post_pt_root_id": [100, 101, 102]}
}
}
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
correct_query_data = {
"filter_in_dict": {
test_info["synapse_table"]: {"post_pt_root_id": [101, 102]}
}
}
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
dfq = myclient.materialize.live_query(
test_info["synapse_table"],
good_time,
filter_in_dict={"pre_pt_root_id": [201, 103]},
)
dfr = pd.read_pickle("tests/test_data/live_query_after1.pkl")
assert np.all(dfq.pre_pt_root_id == dfr.pre_pt_root_id)
assert np.all(dfq.post_pt_root_id == dfr.post_pt_root_id)
dfq = myclient.materialize.live_query(
test_info["synapse_table"],
good_time,
filter_in_dict={"post_pt_root_id": [201, 203]},
)
dfr = pd.read_pickle("tests/test_data/live_query_after2.pkl")
assert np.all(dfq.pre_pt_root_id == dfr.pre_pt_root_id)
assert np.all(dfq.post_pt_root_id == dfr.post_pt_root_id)
dfq = myclient.materialize.live_query(
test_info["synapse_table"],
good_time,
filter_equal_dict={"post_pt_root_id": 203},
)
dfr = pd.read_pickle("tests/test_data/live_query_after3.pkl")
assert np.all(dfq.pre_pt_root_id == dfr.pre_pt_root_id)
assert np.all(dfq.post_pt_root_id == dfr.post_pt_root_id)
df_ct = pd.read_pickle("tests/test_data/cell_types.pkl")
context = pa.default_serialization_context()
serialized = context.serialize(df_ct)
endpoint_mapping["table_name"] = "cell_types"
url = self.endpoints["simple_query"].format_map(endpoint_mapping)
query_d = {"return_pyarrow": True, "split_positions": True}
query_string = urlencode(query_d)
url = url + "?" + query_string
correct_query_data = {}
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
dfq = myclient.materialize.live_query(
"cell_types", good_time, split_positions=True
)
correct_ct = pd.read_pickle("tests/test_data/cell_types_live.pkl")
assert np.all(correct_ct.pt_root_id == dfq.pt_root_id)
correct_query_data = {"filter_equal_dict": {"cell_types": {"cell_type": "BC"}}}
responses.add(
responses.POST,
url=url,
body=serialized.to_buffer().to_pybytes(),
headers={"content-type": "x-application/pyarrow"},
match=[responses.json_params_matcher(correct_query_data)],
)
dfq = myclient.materialize.live_query(
"cell_types",
good_time,
filter_equal_dict={"cell_type": "BC"},
split_positions=True,
)
cdf = correct_ct.query('cell_type=="BC"')
assert np.all(cdf.pt_root_id == dfq.pt_root_id)
assert np.all(cdf.cell_type == dfq.cell_type)
dfq = myclient.materialize.live_query(
"cell_types",
good_time,
filter_equal_dict={"cell_type": "BC"},
split_positions=False,
)
cdf = correct_ct.query('cell_type=="BC"')
assert np.all(cdf.pt_root_id == dfq.pt_root_id)
assert np.all(cdf.cell_type == dfq.cell_type)
x = cdf.iloc[0]
pos = np.array([x.pt_position_x, x.pt_position_y, x.pt_position_z])
assert np.all(dfq.pt_position.iloc[0] == pos)
with pytest.raises(ValueError):
dfq = myclient.materialize.live_query(
test_info["synapse_table"],
good_time,
filter_in_dict={"pre_pt_root_id": [303]},
)
<file_sep>/docs/guide/chunkedgraph.rst
ChunkedGraph
============
The ChunkedGraph client allows one to interact with the ChunkedGraph,
which stores and updates the supervoxel agglomeration graph. This is
most often useful for looking up an object root id of a supervoxel or
looking up supervoxels belonging to a root id. The ChunkedGraph client
is at ``client.chunkedgraph``.
Look up a supervoxel
^^^^^^^^^^^^^^^^^^^^
Usually in Neuroglancer, one never notices supervoxel ids, but they are
important for programmatic work. In order to look up the root id for a
location in space, one needs to use the supervoxel segmentation to get
the associated supervoxel id. The ChunkedGraph client makes this easy
using the :func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.get_root_id` method.
.. code:: python
sv_id = 104200755619042523
client.chunkedgraph.get_root_id(supervoxel_id=sv_id)
However, as proofreading occurs, the root id that a supervoxel belongs
to can change. By default, this function returns the current state,
however one can also provide a UTC timestamp to get the root id at a
particular moment in history. This can be useful for reproducible
analysis. Note below that the root id for the same supervoxel is
different than it is now.
.. code:: python
import datetime
# I looked up the UTC POSIX timestamp from a day in early 2019.
timestamp = datetime.datetime.utcfromtimestamp(1546595253)
sv_id = 104200755619042523
client.chunkedgraph.get_root_id(supervoxel_id=sv_id, timestamp=timestamp)
If you are doing this across lots of supervoxels (or any nodes)
then you can do it more efficently in one request with
:func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.get_roots`
.. code:: python
node_ids = [104200755619042523, 104200755619042524,104200755619042525]
root_ids = client.chunkedgraph.get_roots(node_ids)
Getting supervoxels for a root id
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A root id is associated with a particular agglomeration of supervoxels,
which can be found with the :func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.get_leaves` method. A new root id is
generated for every new change in the chunkedgraph, so time stamps do
not apply.
.. code:: python
root_id = 648518346349541252
client.chunkedgraph.get_leaves(root_id)
You can also query the chunkedgraph not all the way to the bottom, using the stop_layer
option
.. code:: python
root_id = 648518346349541252
client.chunkedgraph.get_leaves(root_id,stop_layer=2)
This will get all the level 2 IDs for this root, which correspond to the lowest chunk of the heirachy.
An analogous option exists for :func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.get_roots`.
Other functions
^^^^^^^^^^^^^^^
There are a variety of other interesting functions to explore in the :class:`~caveclient.chunkedgraph.ChunkedGraphClientV1`
<file_sep>/docs/guide/info.rst
Info Service
============
A datastack has a number of complex paths to various data sources that
together comprise a datastack. Rather than hardcode these paths, the
InfoService allows one to query the location of each data source. This
is also convenient in case data sources change.
An InfoClient is accessed at ``client.info``.
.. code:: python
client = CAVEclient(datastack_name)
print(f"This is an info client for {client.info.datastack_name} on {client.info.server_address}")
Accessing datastack information
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
All of the information accessible for the datastack can be seen as a dict
using ``get_datastack_info()``.
.. code:: python
info.get_datastack_info()
Individual entries can be found as well. Use tab autocomplete to see the
various possibilities.
.. code:: python
info.graphene_source()
Adjusting formatting
^^^^^^^^^^^^^^^^^^^^
Because of the way neuroglancer looks up data versus cloudvolume,
sometimes one needs to convert between ``gs://`` style paths to
``https://storage.googleapis.com/`` stype paths. All of the path sources
in the info client accept a ``format_for`` argument that can handle
this, and correctly adapts to graphene vs precomputed data sources.
.. code:: python
neuroglancer_style_source = info.image_source(format_for='neuroglancer')
print(f"With gs-style: { neuroglancer_style_source }")
cloudvolume_style_source = info.image_source(format_for='cloudvolume')
print(f"With https-style: { cloudvolume_style_source }")
<file_sep>/test_requirements.txt
pytest
pytest-cov
pytest-env
responses
pytest-mock<file_sep>/README.rst
.. image:: https://readthedocs.org/projects/caveclient/badge/?version=latest
:target: https://caveclient.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://github.com/seung-lab/CAVEclient/workflows/Annotation%20Framework%20Client/badge.svg
:target: https://github.com/seung-lab/CAVEclient/actions
:alt: Actions Status
CAVEclient
###########################
This repository supplies client side code to interact with microservices
in the Connectome Annotation Versioning Engine (CAVE).
<file_sep>/docs/guide/materialization.rst
Materialization
================
The Materialization client allows one to interact with the materialized
annotation tables, that were posted to the annotation service (see
:doc:`annotation`).
To see the entire class visit the API doc :class:`~caveclient.materializationengine.MaterializatonClientV2`
The service regularly looks up all annotations and the segids underneath
all the boundspatialpoints. You can then query these tables to find out
the IDs that underlie the annotations, or the annotations that now intersect
with certain IDs.
For example, one common pattern is that you have idenfied a cell based on
the location of its cell body, and you have an annotation there.
You want to know what are the inputs onto the cell, so you first query the
annotation table with your soma annotation, asking for the current ID underneath
that soma. Then you query a synapse table for all synapse annotations that
have a post-synaptic ID equal to the ID from your soma annotation.
In this way your code stays the same, as the proofreading changes and you can
track the connectivity of your cell over time.
Initializing the client
^^^^^^^^^^^^^^^^^^^^^^^
By default when you initialize the overall client, it will choose the most recent
materialization version available. This may or may not be desirable depending on your
use case. If your code involves using specific IDs then you should be using a
specific version that is tied to a timepoint where those IDs are valid.
To see what versions are available, use the :func:`~caveclient.materializationengine.MaterializatonClientV2.get_versions`
.. code:: python
client.materialize.get_versions()
Each version has a timestamp it was run on as well as a date when it will expire.
You can query all this metadata for a specific version using
:func:`~caveclient.materializationengine.MaterializatonClientV2.get_version_metadata`
or all versions using
:func:`~caveclient.materializationengine.MaterializatonClientV2.get_versions_metadata`
To change the default version, alter the .version property of the materialization client.
.. code:: python
client.materialize.version = 9
or specify the version when making a particular call.
Browsing versions
^^^^^^^^^^^^^^^^^
To see what tables are available in a version you can use
:func:`~caveclient.materializationengine.MaterializatonClientV2.get_tables`
If you want to read about the description of what that table is, use the annotationengine client
:func:`~caveclient.annotationengine.AnnotationClientV2.get_table_metadata`
If you want to read more about the schema for the annotation table use the schema service
:func:`~caveclient.emannotationschemas.SchemaClientLegacy.schema_definition`
Note, the materialization service has a human readable webpage that links to the other services
that might be more convienent for you to browse,
to get a link there in ipython display ``client.materialize.homepage``
for some important tables, the info service has a pointer to which table you should use in
the metadata for the datastack. ```client.info.get_datastack_info()['synapse_table']```
and ```client.info.get_datastack_info()['soma_table']```.
To see how many annotations are in a particular table use
.. code:: python
nannotations=client.materialize.get_annotation_count('my_table')
Querying tables
^^^^^^^^^^^^^^^
To query a small table, you can just download the whole thing using
:func:`~caveclient.materializationengine.MaterializatonClientV2.query_table`
which will return a dataframe of the table.
Note however, some tables, such as the synapse table might be very large 200-300 million rows
and the service will only return the first 200,000 results.
To just get a preview, use the limit argument
.. code:: python
df=client.materialize.query_table('my_table', limit=10)
For many applications, you will want to filter the query in some way.
We offer three kinds of filters you can apply: filter_equal, filter_in and filter_not_in.
For query_table each is specified as a dictionary where the keys are column names,
and the values are a list of values (or single value in the case of filter_equal).
So for example to query a synapse table for all synapses onto a neuron in flywire you would use
.. code:: python
synapse_table = client.info.get_datastack_info('synapse_table')
df=client.materialize.query_table(synapse_table,
filter_equal_dict = {'post_pt_root_id': MYID})
The speed of querying is affected by a number of factors, including the size of the data.
To improve the performance of results, you can reduce the number of columns returned using
select_colums.
So for example, if you are only interested in the root_ids and locations of pre_synaptic terminals
you might limit the query with select_columns. Also, it is convient to return the
with positions as a column of np.array([x,y,z]) coordinates for many purposes.
However, sometimes you might prefer to have them split out as seperate _x, _y, _z columns.
To enable this option use split_columns=True. split_columns=True is faster, as combining them is an extra step.
You can recombine split-out position columns using :func:`~caveclient.materializationengine.concatenate_position_columns`
.. code:: python
synapse_table = client.info.get_datastack_info('synapse_table')
df=client.materialize.query_table(synapse_table,
filter_equal_dict = {'post_pt_root_id': MYID},
select_columns=['id','pre_pt_root_id', 'pre_pt_position'],
split_columns=True)
Spatial Filters
^^^^^^^^^^^^^^^
You can also filter columns that are associated with spatial locations based upon being within a 3d bounding box.
This is done by adding a filter_spatial_dict argument to query_table.
The units of the bounding box should be in the units of the voxel_resolution of the table
(which can be obtained from :func:`~caveclient.materializationengine.MaterializatonClientV2.get_table_metadata`).
.. code:: python
bounding_box = [[min_x, min_y, min_z], [max_x, max_y, max_z]]
synapse_table = client.info.get_datastack_info('synapse_table')
df=client.materialize.query_table(synapse_table,
filter_equal_dict = {'post_pt_root_id': MYID},
filter_spatial_dict = {'post_pt_position': bounding_box})
Synapse Query
^^^^^^^^^^^^^
For synapses in particular, we have a simplified method for querying them with a reduced syntax.
:func:`~caveclient.materializationengine.MaterializatonClientV2.synapse_query`
lets you specify pre and post synaptic partners as keyword arguments and bounding boxes.
The defaults make reasonable assumptions about what you want to query, namely that the synapse_table is
the table that the info service advertises, and that if you specify a bounding box, that you want the post_pt_position.
These can be overridden of course, but the above bounding box query is simplified to.
.. code:: python
bounding_box = [[min_x, min_y, min_z], [max_x, max_y, max_z]]
df=client.materialize.query_table(post_ids = MYID,
bounding_box=bounding_box)
Live Query
^^^^^^^^^^
In order to query the materialized tables above you can only use IDs that were present at the
timestamp of the materialization. If you query the tables with an ID that is not valid during the
time of the materialization you will get empty results.
To check if root_ids are valid at your materialization's timestamp, you can use
:func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.is_latest_roots`
.. code:: python
import numpy as np
mat_time = client.materialize.get_timestamp()
is_latest = client.chunkedgraph.is_latest_roots([MYID], timestamp=mat_time)
assert(np.all(is_latest))
If you need to lookup what happened to that ID, you can use the chunkedgraph lineage tree,
to look into the future or the past, depending on your application you can use
:func:`~caveclient.chunkedgraph.ChunkedGraphClientV1.get_lineage_graph`
Again, the ideal situation is that you have an annotation in the database which refers
to your objects of interest, and querying that table by the id column will return the
object in the most recent materialization.
However, sometimes you might be browsing and proofreadding the data and get an ID
that is more recent that the most recent version available. For convience, you can use
:func:`~caveclient.materializationengine.MaterializatonClientV2.live_query`.
to automatically update the results of your query to a time in the future, such as now.
For example, to pass now, use ```datetime.datetime.utcnow```. Note all timestamps are in UTC
throughout the codebase.
.. code:: python
import datetime
synapse_table = client.info.get_datastack_info('synapse_table')
df=client.materialize.live_query(synapse_table,
datetime.datetime.utcnow(),
filter_equal_dict = {'post_pt_root_id': MYID})
This will raise an ValueError exception if the IDs passed in your filters are not valid at the timestamp given
You can also pass a timestamp directly to query_table and it will call live_query automatically.
.. code:: python
import datetime
synapse_table = client.info.get_datastack_info('synapse_table')
df=client.materialize.query_table(synapse_table,
timestamp=datetime.datetime.utcnow(),
filter_equal_dict = {'post_pt_root_id': MYID})
Also, keep in mind if you run multiple queries and at each time pass ``datetime.datetime.utcnow()``,
there is no gauruntee that the IDs will be consistent from query to query, as proofreading might be happening
at any time. For larger scale analysis constraining oneself to a materialized version will ensure consistent results.
Versions have varying expiration times in order to support the tradeoff between recency and consistency,
so before undertakin an analysis project consider what version you want to query and what your plan will be to
update your analysis to future versions.
<file_sep>/caveclient/format_utils.py
from urllib.parse import urlparse
def format_precomputed_neuroglancer(objurl):
qry = urlparse(objurl)
if qry.scheme == "gs":
objurl_out = f"precomputed://{objurl}"
elif qry.scheme == "http" or qry.scheme == "https":
objurl_out = f"precomputed://gs://{qry.path[1:]}"
else:
objurl_out = None
return objurl_out
def format_precomputed_https(objurl):
qry = urlparse(objurl)
if qry.scheme == "gs":
objurl_out = f"precomputed://https://storage.googleapis.com/{qry.path[1:]}"
elif qry.scheme == "http" or qry.scheme == "https":
objurl_out = f"precomputed://{objurl}"
else:
objurl_out = None
return objurl_out
def format_graphene(objurl):
qry = urlparse(objurl)
if qry.scheme == "http" or qry.scheme == "https":
objurl_out = f"graphene://{objurl}"
elif qry.scheme == "graphene":
objurl_out = objurl
else:
objurl_out = None
return objurl_out
def format_cloudvolume(objurl):
qry = urlparse(objurl)
if qry.scheme == "graphene":
return format_graphene(objurl)
elif qry.scheme == "gs" or qry.scheme == "http" or qry.scheme == "https":
return format_precomputed_https(objurl)
else:
return None
def format_raw(objurl):
return objurl
# No reformatting
output_map_raw = {}
# Use precomputed://gs:// links for neuroglancer, but use precomputed://https://storage.googleapis.com links in cloudvolume
output_map_precomputed = {
"raw": format_raw,
"cloudvolume": format_precomputed_https,
"neuroglancer": format_precomputed_neuroglancer,
}
# Use graphene://https:// links for both neuroglancer and cloudvolume
output_map_graphene = {
"raw": format_raw,
"cloudvolume": format_graphene,
"neuroglancer": format_graphene,
}
<file_sep>/requirements.txt
numpy
pyarrow>=3
requests
pandas
cachetools>=4.2.1
ipython
networkx<file_sep>/tests/test_chunkedgraph.py
from re import match
from .conftest import test_info, TEST_LOCAL_SERVER, TEST_DATASTACK
import pytest
import responses
import numpy as np
from caveclient.endpoints import (
chunkedgraph_endpoints_v1,
chunkedgraph_endpoints_common,
)
import datetime
import time
from urllib.parse import urlencode
def binary_body_match(body):
def match(request_body):
return body == request_body
return match
class TestChunkedgraph:
_default_endpoint_map = {
"cg_server_address": TEST_LOCAL_SERVER,
"table_id": test_info["segmentation_source"].split("/")[-1],
}
@responses.activate
def test_get_roots(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_v1["get_roots"].format_map(endpoint_mapping)
svids = np.array([97557743795364048, 75089979126506763], dtype=np.uint64)
root_ids = np.array([864691135217871271, 864691135566275148], dtype=np.uint64)
now = datetime.datetime.utcnow()
query_d = {"timestamp": time.mktime(now.timetuple())}
qurl = url + "?" + urlencode(query_d)
responses.add(
responses.POST,
url=qurl,
body=root_ids.tobytes(),
match=[binary_body_match(svids.tobytes())],
)
new_root_ids = myclient.chunkedgraph.get_roots(svids, timestamp=now)
assert np.all(new_root_ids == root_ids)
myclient.chunkedgraph._default_timestamp = now
new_root_ids = myclient.chunkedgraph.get_roots(svids)
assert np.all(new_root_ids == root_ids)
query_d = {"timestamp": time.mktime(now.timetuple()), "stop_layer": 3}
qurl = url + "?" + urlencode(query_d)
responses.add(
responses.POST,
url=qurl,
body=root_ids.tobytes(),
match=[binary_body_match(svids.tobytes())],
)
new_root_ids = myclient.chunkedgraph.get_roots(
svids, timestamp=now, stop_layer=3
)
assert np.all(new_root_ids == root_ids)
endpoint_mapping["supervoxel_id"] = svids[0]
url = chunkedgraph_endpoints_v1["handle_root"].format_map(endpoint_mapping)
query_d = {"timestamp": time.mktime(now.timetuple())}
qurl = url + "?" + urlencode(query_d)
responses.add(responses.GET, url=qurl, json={"root_id": int(root_ids[0])})
qroot_id = myclient.chunkedgraph.get_root_id(svids[0], timestamp=now)
assert qroot_id == root_ids[0]
@responses.activate
def test_get_leaves(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135217871271
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["leaves_from_root"].format_map(endpoint_mapping)
bad_bounds = np.array([[0, 0, 0, 2], [100, 100, 100, 0]])
with pytest.raises(ValueError):
myclient.chunkedgraph.get_leaves(root_id, bounds=bad_bounds)
bounds = np.array([[0, 0, 0], [100, 200, 300]]).T
bounds_str = "0-100_0-200_0-300"
query_d = {"bounds": bounds_str}
urlq = url + "?" + urlencode(query_d)
svlist = [97557743795364048, 75089979126506763]
svids = np.array(svlist, dtype=np.int64)
responses.add(responses.GET, json={"leaf_ids": svlist}, url=urlq)
svids_ret = myclient.chunkedgraph.get_leaves(root_id, bounds=bounds)
assert np.all(svids == svids_ret)
query_d = {"bounds": bounds_str, "stop_layer": 2}
urlq = url + "?" + urlencode(query_d)
responses.add(responses.GET, json={"leaf_ids": svlist}, url=urlq)
svids_ret = myclient.chunkedgraph.get_leaves(
root_id, bounds=bounds, stop_layer=2
)
assert np.all(svids == svids_ret)
@responses.activate
def test_get_root(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135217871271
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["leaves_from_root"].format_map(endpoint_mapping)
bad_bounds = np.array([[0, 0, 0, 2], [100, 100, 100, 0]])
with pytest.raises(ValueError):
myclient.chunkedgraph.get_leaves(root_id, bounds=bad_bounds)
bounds = np.array([[0, 0, 0], [100, 200, 300]]).T
bounds_str = "0-100_0-200_0-300"
query_d = {"bounds": bounds_str}
urlq = url + "?" + urlencode(query_d)
svlist = [97557743795364048, 75089979126506763]
svids = np.array(svlist, dtype=np.int64)
responses.add(responses.GET, json={"leaf_ids": svlist}, url=urlq)
svids_ret = myclient.chunkedgraph.get_leaves(root_id, bounds=bounds)
assert np.all(svids == svids_ret)
query_d = {"bounds": bounds_str, "stop_layer": 2}
urlq = url + "?" + urlencode(query_d)
responses.add(responses.GET, json={"leaf_ids": svlist}, url=urlq)
svids_ret = myclient.chunkedgraph.get_leaves(
root_id, bounds=bounds, stop_layer=2
)
assert np.all(svids == svids_ret)
@responses.activate
def test_merge_log(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135217871271
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["merge_log"].format_map(endpoint_mapping)
merge_log = {
"merge_edge_coords": [
[[[85785, 68475, 20988]], [[85717, 67955, 20964]]],
[[[86511, 70071, 20870]], [[86642, 70011, 20913]]],
[[[80660, 67637, 19735]], [[80946, 67810, 19735]]],
[[[84680, 63424, 20735]], [[84696, 63464, 20735]]],
[[[94096, 71152, 19934]], [[94096, 71168, 19937]]],
[[[89728, 72692, 20008]], [[89668, 72839, 19996]]],
[[[82492, 71488, 21534]], [[82726, 71281, 21584]]],
[[[85221, 69913, 20891]], [[85104, 70003, 20856]]],
],
"merge_edges": [
[[88393382627986340, 88322876444801990]],
[[88534532433083295, 88604901177276829]],
[[86985732732043081, 87056170195711450]],
[[88040164517305351, 88040164517304487]],
[[90645869502201091, 90645869502200218]],
[[89450013234655197, 89450081954148949]],
[[87479345001609186, 87549713745838644]],
[[88182619992741827, 88182688712176449]],
],
}
responses.add(responses.GET, json=merge_log, url=url)
qmerge_log = myclient.chunkedgraph.get_merge_log(root_id)
assert merge_log == qmerge_log
@responses.activate
def test_change_log(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135217871271
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["change_log"].format_map(endpoint_mapping)
change_log = {
"n_mergers": 2,
"n_splits": 2,
"operations_ids": [178060, 178059, 178046, 178050],
"past_ids": [
864691135181922050,
864691135761746230,
864691135785389764,
864691135583980920,
],
"user_info": {
"160": {"n_mergers": 1, "n_splits": 1},
"161": {"n_mergers": 1},
"164": {"n_splits": 1},
},
}
responses.add(responses.GET, json=change_log, url=url)
qchange_log = myclient.chunkedgraph.get_change_log(root_id)
assert change_log == qchange_log
@responses.activate
def test_children(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135217871271
endpoint_mapping["node_id"] = root_id
url = chunkedgraph_endpoints_v1["handle_children"].format_map(endpoint_mapping)
children_list = [792633534440329101, 828662331442575736, 792633534440186368]
children_ids = np.array([children_list])
responses.add(responses.GET, json={"children_ids": children_list}, url=url)
qchildren_ids = myclient.chunkedgraph.get_children(root_id)
assert np.all(children_ids == qchildren_ids)
@responses.activate
def test_delta_roots(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_v1["delta_roots"].format_map(endpoint_mapping)
now = datetime.datetime.utcnow()
timestamp_past = now - datetime.timedelta(days=1)
query_d = {
"timestamp_past": time.mktime(timestamp_past.timetuple()),
"timestamp_future": time.mktime(now.timetuple()),
}
urlq = url + "?" + urlencode(query_d)
old_ids = [
864691135969138021,
864691135139373503,
864691135478492102,
864691135799951458,
864691135383121131,
864691134136949808,
864691136867461742,
864691135697607189,
864691135140967615,
864691135383120619,
]
new_ids = [
864691136123991846,
864691135122597927,
864691135564887127,
864691136109117880,
864691135502092381,
864691135865876613,
864691136008781742,
864691135564887383,
864691135776866656,
864691135292110006,
864691136672918919,
]
responses.add(
responses.GET, json={"old_roots": old_ids, "new_roots": new_ids}, url=urlq
)
qold_ids, qnew_ids = myclient.chunkedgraph.get_delta_roots(timestamp_past, now)
assert np.all(qold_ids == old_ids)
assert np.all(qnew_ids == new_ids)
# waiting for backend fix
# @responses.activate
# def test_contact_sites(self, myclient):
# endpoint_mapping = self._default_endpoint_map
# root_id = 864691135217871271
# endpoint_mapping['node_id']=root_id
# url=chunkedgraph_endpoints_v1['handle_children'].format_map(endpoint_mapping)
# children_list = [792633534440329101, 828662331442575736, 792633534440186368]
# children_ids = np.array([children_list])
# responses.add(responses.GET,
# json={'children_ids':children_list},
# url=url)
# qchildren_ids = myclient.chunkedgraph.get_children(root_id)
# assert(np.all(children_ids==qchildren_ids))
# waiting for backend to fix to finish
# @responses.activate
# def test_find_path(self, myclient):
# endpoint_mapping = self._default_endpoint_map
# root_id = 864691135217871271
# endpoint_mapping['node_id']=root_id
# url=chunkedgraph_endpoints_v1['handle_children'].format_map(endpoint_mapping)
# children_list = [792633534440329101, 828662331442575736, 792633534440186368]
# children_ids = np.array([children_list])
# responses.add(responses.GET,
# json={'children_ids':children_list},
# url=url)
# qchildren_ids = myclient.chunkedgraph.get_children(root_id)
# assert(np.all(children_ids==qchildren_ids))
@responses.activate
def test_get_subgraph(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135776832352
bounds = np.array([[120241, 120441], [103825, 104025], [21350, 21370]])
bounds_str = "120241-120441_103825-104025_21350-21370"
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["get_subgraph"].format_map(endpoint_mapping)
query_d = {"bounds": bounds_str}
qurl = url + "?" + urlencode(query_d)
nodes_list = [
[97832277702483859, 97832277702483868],
[97832277702483868, 97832277702489688],
[97832277702505017, 97832277702505025],
]
affinity_list = [2486.50634766, 7.49544525, 18.80846024]
area_list = [2486, 7, 18]
nodes = np.array(nodes_list, dtype=np.int64)
affinities = np.array(affinity_list, dtype=np.float64)
areas = np.array(area_list, dtype=np.int32)
responses.add(
responses.GET,
json={"nodes": nodes_list, "affinities": affinity_list, "areas": area_list},
url=qurl,
)
qnodes, qaffinities, qareas = myclient.chunkedgraph.get_subgraph(
root_id, bounds=bounds
)
assert np.all(qnodes == nodes)
assert np.all(affinities == qaffinities)
assert np.all(areas == qareas)
@responses.activate
def test_get_lvl2subgraph(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135776832352
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["lvl2_graph"].format_map(endpoint_mapping)
lvl2_graph_list = [
[164471753114911373, 164471821834388004],
[164471753114911373, 164542121859089069],
[164471753114911412, 164542121859089069],
[164471821834388004, 164542190578565862],
]
lvl2_graph = np.array(lvl2_graph_list, dtype=np.int64)
responses.add(responses.GET, json={"edge_graph": lvl2_graph_list}, url=url)
qlvl2_graph = myclient.chunkedgraph.level2_chunk_graph(root_id)
assert np.all(qlvl2_graph == lvl2_graph)
@responses.activate
def test_get_remeshing(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691135776832352
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["remesh_level2_chunks"].format_map(
endpoint_mapping
)
chunkid_list = [164471753114911373, 164471821834388004]
chunk_ids = np.array(chunkid_list, dtype=np.int64)
responses.add(
responses.POST,
status=200,
url=url,
match=[responses.json_params_matcher({"new_lvl2_ids": chunkid_list})],
)
myclient.chunkedgraph.remesh_level2_chunks(chunk_ids)
myclient.chunkedgraph.remesh_level2_chunks(chunkid_list)
@responses.activate
def test_is_latest_roots(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_v1["is_latest_roots"].format_map(endpoint_mapping)
root_id_list = [864691135776832352, 164471821834388001]
root_ids = np.array(root_id_list, dtype=np.int64)
is_latest_list = [True, False]
is_latest = np.array(is_latest_list, np.bool)
responses.add(
responses.POST,
status=200,
url=url,
json={"is_latest": is_latest_list},
match=[responses.json_params_matcher({"node_ids": root_id_list})],
)
qis_latest = myclient.chunkedgraph.is_latest_roots(root_ids)
assert np.all(is_latest == qis_latest)
qis_latest = myclient.chunkedgraph.is_latest_roots(root_id_list)
assert np.all(is_latest == qis_latest)
@responses.activate
def test_past_ids(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_v1["past_id_mapping"].format_map(endpoint_mapping)
root_id_list = [864691136577570580, 864691135415612346]
root_ids = np.array(root_id_list, np.int64)
id_map = {
"future_id_map": {},
"past_id_map": {
864691135415612346: [864691134989972295, 864691135574118596],
864691136577570580: [864691136721486702, 864691133958789149],
},
}
id_map_str = {
"future_id_map": {},
"past_id_map": {
"864691135415612346": [864691134989972295, 864691135574118596],
"864691136577570580": [864691136721486702, 864691133958789149],
},
}
now = datetime.datetime.utcnow()
past_time = now - datetime.timedelta(days=7)
query_d = {
"timestamp_past": time.mktime(past_time.timetuple()),
"timestamp_future": time.mktime(now.timetuple()),
}
qurl = url + "?" + urlencode(query_d)
responses.add(
responses.GET,
status=200,
url=qurl,
json=id_map_str,
match=[responses.json_params_matcher({"root_ids": root_id_list})],
)
qid_map = myclient.chunkedgraph.get_past_ids(
root_ids, timestamp_past=past_time, timestamp_future=now
)
assert qid_map == id_map
qid_map = myclient.chunkedgraph.get_past_ids(
root_id_list, timestamp_past=past_time, timestamp_future=now
)
assert qid_map == id_map
def test_cloudvolume_path(self, myclient):
cvpath = f"graphene://{TEST_LOCAL_SERVER}/segmentation/api/v1/test_v1"
assert myclient.chunkedgraph.cloudvolume_path == cvpath
@responses.activate
def test_lineage_graph(self, myclient):
endpoint_mapping = self._default_endpoint_map
root_id = 864691136089107255
endpoint_mapping["root_id"] = root_id
url = chunkedgraph_endpoints_v1["handle_lineage_graph"].format_map(
endpoint_mapping
)
now = datetime.datetime.utcnow()
past_time = now - datetime.timedelta(days=7)
query_d = {
"timestamp_past": time.mktime(past_time.timetuple()),
"timestamp_future": time.mktime(now.timetuple()),
}
qurl = url + "?" + urlencode(query_d)
lineage_graph = {
"directed": True,
"graph": {},
"links": [
{"source": 864691136089107255, "target": 864691135490360423},
{"source": 864691135348456151, "target": 864691136089107255},
],
"multigraph": False,
"nodes": [
{
"id": 864691136089107255,
"operation_id": 225368,
"timestamp": 1616699178.177,
},
{
"id": 864691135348456151,
"operation_id": 217696,
"timestamp": 1608622183.079,
},
{
"id": 864691135490360423,
"operation_id": 225368,
"timestamp": 1618255909.638,
},
],
}
responses.add(responses.GET, status=200, url=qurl, json=lineage_graph)
qlineage_graph = myclient.chunkedgraph.get_lineage_graph(
root_id, timestamp_past=past_time, timestamp_future=now
)
assert lineage_graph == qlineage_graph
@responses.activate
def test_operatin_details(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_v1["operation_details"].format_map(
endpoint_mapping
)
operation_id_list = [225368, 217696, 225368]
operation_ids = np.array(operation_id_list, np.int32)
qurl = url + "?" + urlencode({"operation_ids": operation_id_list})
operation_details = {
"217696": {
"bb_offset": [240, 240, 24],
"operation_exception": "",
"operation_status": 0,
"operation_ts": "2021-03-25 19:06:18.177000+00:00",
"removed_edges": [
[99450140208950705, 99450140208951689],
[99450140208950705, 99450140208952912],
[99450140208950716, 99450140208951689],
[99450140208950716, 99450140208952912],
],
"roots": [864691135100007968, 864691136089107255],
"sink_coords": [[126012, 101764, 20218], [126179, 101638, 20215]],
"sink_ids": [99450208928441939, 99450140208946136],
"source_coords": [[125981, 101650, 20232], [126209, 101690, 20210]],
"source_ids": [99450140208968497, 99450140208944278],
"timestamp": "2021-03-25 19:06:27.532000+00:00",
"user": "121",
},
"225368": {
"added_edges": [[99093279832296446, 99093279832302829]],
"operation_exception": "",
"operation_status": 0,
"operation_ts": "2021-04-12 19:31:49.638000+00:00",
"roots": [864691135490360423],
"sink_coords": [[124760, 82888, 19383]],
"source_coords": [[124700, 82887, 19377]],
"timestamp": "2021-04-12 19:31:58.329000+00:00",
"user": "121",
},
}
responses.add(responses.GET, status=200, url=qurl, json=operation_details)
# test that it works as np.array or list
qoperation_details = myclient.chunkedgraph.get_operation_details(operation_ids)
assert operation_details == qoperation_details
qoperation_details = myclient.chunkedgraph.get_operation_details(
operation_id_list
)
assert operation_details == qoperation_details
@responses.activate
def test_get_info(self, myclient):
endpoint_mapping = self._default_endpoint_map
url = chunkedgraph_endpoints_common["info"].format_map(endpoint_mapping)
test_info = {
"app": {"supported_api_versions": [0, 1]},
"chunks_start_at_voxel_offset": True,
"data_dir": "gs://cave_test/ws",
"data_type": "uint64",
"graph": {
"bounding_box": [2048, 2048, 512],
"chunk_size": [256, 256, 512],
"cv_mip": 0,
"n_bits_for_layer_id": 8,
"n_layers": 12,
"spatial_bit_masks": {
"1": 10,
"10": 2,
"11": 1,
"12": 1,
"2": 10,
"3": 9,
"4": 8,
"5": 7,
"6": 6,
"7": 5,
"8": 4,
"9": 3,
},
},
"mesh": "cave_test_meshes",
"mesh_metadata": {
"uniform_draco_grid_size": 21,
"unsharded_mesh_dir": "dynamic",
},
"num_channels": 1,
"scales": [
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "8_8_40",
"locked": True,
"resolution": [8, 8, 40],
"size": [192424, 131051, 13008],
"voxel_offset": [26385, 30308, 14850],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "16_16_40",
"locked": True,
"resolution": [16, 16, 40],
"size": [96212, 65526, 13008],
"voxel_offset": [13192, 15154, 14850],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "32_32_40",
"locked": True,
"resolution": [32, 32, 40],
"size": [48106, 32763, 13008],
"voxel_offset": [6596, 7577, 14850],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "64_64_40",
"locked": True,
"resolution": [64, 64, 40],
"size": [24053, 16382, 13008],
"voxel_offset": [3298, 3788, 14850],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "128_128_80",
"resolution": [128, 128, 80],
"size": [12027, 8191, 6504],
"voxel_offset": [1649, 1894, 7425],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "256_256_160",
"resolution": [256, 256, 160],
"size": [6014, 4096, 3252],
"voxel_offset": [824, 947, 3712],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "512_512_320",
"resolution": [512, 512, 320],
"size": [3007, 2048, 1626],
"voxel_offset": [412, 473, 1856],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "1024_1024_640",
"resolution": [1024, 1024, 640],
"size": [1504, 1024, 813],
"voxel_offset": [206, 236, 928],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "2048_2048_1280",
"resolution": [2048, 2048, 1280],
"size": [752, 512, 407],
"voxel_offset": [103, 118, 464],
},
{
"chunk_sizes": [[256, 256, 32]],
"compressed_segmentation_block_size": [8, 8, 8],
"encoding": "compressed_segmentation",
"key": "4096_4096_2560",
"resolution": [4096, 4096, 2560],
"size": [376, 256, 204],
"voxel_offset": [51, 59, 232],
},
],
"sharded_mesh": True,
"skeletons": "test_skeletons",
"type": "segmentation",
"verify_mesh": False,
}
responses.add(responses.GET, status=200, url=url, json=test_info)
qinfo = myclient.chunkedgraph.segmentation_info
assert test_info == qinfo
# test twice for caching
qinfo = myclient.chunkedgraph.segmentation_info
assert test_info == qinfo
base_resolution = myclient.chunkedgraph.base_resolution
assert np.all(base_resolution == [8, 8, 40])
| 5271d7556fb061cfe831dfd571c34d7caebaffe6 | [
"Markdown",
"Python",
"Text",
"reStructuredText"
] | 33 | reStructuredText | seung-lab/AnnotationFrameworkClient | 795c7ee63b40c54face902679149492e5ec430ab | ef557694f502968d13080b07617b7ec5c019524a |
refs/heads/master | <file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T21:29:01.404+08:00
*/
package jken.support.mvc;
import com.google.common.base.Strings;
import jken.support.json.pathfilter.Jackson2Helper;
import jken.support.json.pathfilter.PathFilter;
import org.apache.commons.lang3.builder.Builder;
import org.springframework.core.MethodParameter;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.AbstractJackson2HttpMessageConverter;
import org.springframework.http.converter.json.MappingJacksonValue;
import org.springframework.http.server.ServerHttpRequest;
import org.springframework.http.server.ServerHttpResponse;
import org.springframework.http.server.ServletServerHttpRequest;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyAdvice;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Map;
@ControllerAdvice
@Order(value = Ordered.HIGHEST_PRECEDENCE)
public class JacksonResponseCustomizer implements ResponseBodyAdvice<Object> {
@Override
public boolean supports(MethodParameter returnType, Class<? extends HttpMessageConverter<?>> converterType) {
return AbstractJackson2HttpMessageConverter.class.isAssignableFrom(converterType);
}
@Override
public Object beforeBodyWrite(Object body, MethodParameter returnType, MediaType selectedContentType, Class<? extends HttpMessageConverter<?>> selectedConverterType, ServerHttpRequest request, ServerHttpResponse response) {
if (body == null) {
return ResultBuilder.SUCCESS;
}
MappingJacksonValue container = getOrCreateContainer(body);
beforeBodyWriteInternal(container, selectedContentType, returnType, request, response);
return container;
}
protected MappingJacksonValue getOrCreateContainer(Object body) {
return (body instanceof MappingJacksonValue ? (MappingJacksonValue) body : new MappingJacksonValue(body));
}
protected void beforeBodyWriteInternal(MappingJacksonValue bodyContainer, MediaType contentType, MethodParameter returnType, ServerHttpRequest request, ServerHttpResponse response) {
Object value = bodyContainer.getValue();
if (value != null) {
HttpServletRequest httpRequest = ((ServletServerHttpRequest) request).getServletRequest();
String filterString = httpRequest.getParameter("_path_filter");
String[] filters = Strings.isNullOrEmpty(filterString) ? null : filterString.split(",");
if (filters == null || filters.length == 0) {
if (returnType.hasMethodAnnotation(PathFilter.class)) {
filters = returnType.getMethodAnnotation(PathFilter.class).value();
}
}
if (filters == null || filters.length == 0) {
filters = new String[]{"*", "*.*"};
}
bodyContainer.setFilters(Jackson2Helper.buildFilterProvider(filters));
}
if (value instanceof Page) {
Page<?> page = (Page<?>) value;
bodyContainer.setValue(new ResultBuilder(0, "").layuiPage(page).build());
} else if (value instanceof DataWrap) {
bodyContainer.setValue(new ResultBuilder(0, "").data(((DataWrap) value).getData()).build());
} else {
bodyContainer.setValue(new ResultBuilder(0, "").data(value).build());
}
}
static class ResultBuilder implements Builder<Map<String, Object>> {
private static final Map<String, Object> SUCCESS = new ResultBuilder(0, "").build();
private Map<String, Object> map = new HashMap<>();
public ResultBuilder(Integer code, String msg) {
put("code", code).put("msg", msg);
}
public ResultBuilder newResult(Integer code, String msg) {
return put("code", code).put("msg", msg);
}
public ResultBuilder data(Object data) {
return put("data", data);
}
public ResultBuilder put(String key, Object value) {
map.put(key, value);
return this;
}
public ResultBuilder layuiPage(Page<?> page) {
return data(page.getContent()).put("count", page.getTotalElements());
}
@Override
public Map<String, Object> build() {
return map;
}
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.474+08:00
*/
package jken.module.core.entity;
import jken.support.data.Disabledable;
import jken.support.data.jpa.DataEntity;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@Entity
@Table(name = "tbl_core_corp")
public class Corp extends DataEntity<User, Long> implements Disabledable {
@NotNull
@Column(length = 100, nullable = false)
private String name;
@NotNull
@Column(unique = true, length = 100, nullable = false)
private String code;
@Column(length = 20)
@Enumerated(EnumType.STRING)
private Status status;
@Column(length = 200)
private String logo;
@Column(length = 200)
private String website;
@Column(length = 1000)
private String introduction;
private boolean disabled = false;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public String getLogo() {
return logo;
}
public void setLogo(String logo) {
this.logo = logo;
}
public String getWebsite() {
return website;
}
public void setWebsite(String website) {
this.website = website;
}
public String getIntroduction() {
return introduction;
}
public void setIntroduction(String introduction) {
this.introduction = introduction;
}
@Override
public boolean isDisabled() {
return disabled;
}
@Override
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
public enum Status {
NORMAL,
ARREARAGE
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.421+08:00
*/
package jken.support.data.jpa;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
import java.io.Serializable;
@NoRepositoryBean
public interface EntityRepository<T, I extends Serializable> extends JpaRepository<T, I> {
T createNew();
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T15:00:37.478+08:00
*/
package jken.security;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import java.lang.annotation.*;
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@AuthenticationPrincipal(expression = "id")
public @interface CurrentUser {
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-07T18:21:24.118+08:00
*/
package jken.module.core.service;
import jken.module.core.entity.Dict;
import jken.module.core.entity.DictItem;
import jken.module.core.entity.QDict;
import jken.support.service.CrudService;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class DictService extends CrudService<Dict, Long> {
@Override
public <S extends Dict> S save(S entity) {
List<DictItem> items = entity.getItems();
items.forEach(item -> item.setDict(entity));
return super.save(entity);
}
public Dict findByCode(String code) {
return getRepository().findOne(QDict.dict.code.eq(code)).orElseThrow(RuntimeException::new);
}
public List<DictItem> getItemsByCode(String code) {
Dict dict = findByCode(code);
return dict.getItems();
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-07T18:03:18.918+08:00
*/
package jken.module.core.entity;
import com.fasterxml.jackson.annotation.JsonBackReference;
import jken.support.data.jpa.TreeEntity;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.util.Objects;
@Entity
@Table(name = "tbl_core_dict_item")
public class DictItem extends TreeEntity<DictItem, User, Long> {
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "dict_id")
@JsonBackReference
private Dict dict;
@NotNull
@Size(max = 60)
@Column(length = 64)
private String name;
@NotNull
@Size(max = 60)
@Column(length = 64)
private String value;
public Dict.ItemType getType() {
Dict.ItemType itemType = getDict().getType();
if (Objects.equals(itemType, Dict.ItemType.LIST_GROUP) && getParent() != null) {
itemType = Dict.ItemType.LIST_OPTION;
}
return itemType;
}
public Dict getDict() {
return dict;
}
public void setDict(Dict dict) {
this.dict = dict;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-06T17:22:17.044+08:00
*/
package jken.module.core.entity;
import jken.support.data.jpa.DataEntity;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
@Entity
@Table(name = "tbl_core_message")
public class Message extends DataEntity<User, Long> {
@NotNull
@Size(max = 100)
@Column(length = 255)
private String title;
@Size(max = 1000)
@Column(length = 2047)
private String content;
@ManyToOne
private User from;
@ManyToOne
private User to;
@Enumerated(EnumType.STRING)
private Type type;
private boolean readed = false;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public User getFrom() {
return from;
}
public void setFrom(User from) {
this.from = from;
}
public User getTo() {
return to;
}
public void setTo(User to) {
this.to = to;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public boolean isReaded() {
return readed;
}
public void setReaded(boolean readed) {
this.readed = readed;
}
public enum Type {
NOTIFICATION,
LETTER
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.438+08:00
*/
package jken.support.data;
public interface CorpDetection {
String getCurrentCorpCode();
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.411+08:00
*/
package jken.security;
import org.springframework.context.ApplicationContext;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
public class DelegatingUserDetailsServiceProxy implements UserDetailsService {
private volatile UserDetailsService target;
private ApplicationContext applicationContext;
private Class<?> userDetailsServiceInterface = UserDetailsService.class;
public DelegatingUserDetailsServiceProxy(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
public DelegatingUserDetailsServiceProxy(ApplicationContext applicationContext, Class<?> userDetailsServiceInterface) {
this(applicationContext);
this.userDetailsServiceInterface = userDetailsServiceInterface;
}
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
if (target == null) {
synchronized (this) {
if (target == null) {
target = (UserDetailsService) this.applicationContext.getBean(userDetailsServiceInterface);
if (target == null) {
throw new RuntimeException("userDetailsService not found.");
}
}
}
}
return target.loadUserByUsername(username);
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.448+08:00
*/
package jken.support.json;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Persistable;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
public class PageSerializer<P extends Page<T>, T extends Persistable<I>, I extends Serializable> extends JsonSerializer<P> {
public static final String DEFAULT_TOTAL_FIELD = "total";
public static final String DEFAULT_CURRENT_FIELD = "current";
public static final String DEFAULT_ROWCOUNT_FIELD = "rowCount";
public static final String DEFAULT_ROWS_FIELD = "rows";
@Override
public void serialize(P ts, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeNumberField(DEFAULT_CURRENT_FIELD, ts.getNumber() + 1);
jsonGenerator.writeNumberField(DEFAULT_ROWCOUNT_FIELD, ts.getSize());
jsonGenerator.writeFieldName(DEFAULT_ROWS_FIELD);
serializerProvider.findValueSerializer(List.class, null).serialize(ts.getContent(), jsonGenerator, serializerProvider);
jsonGenerator.writeNumberField(DEFAULT_TOTAL_FIELD, ts.getTotalElements());
jsonGenerator.writeEndObject();
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.470+08:00
*/
package jken.module.core.web;
import com.google.common.base.Strings;
import com.querydsl.core.types.Predicate;
import jken.module.core.entity.MenuItem;
import jken.support.web.TreeController;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Sort;
import org.springframework.data.querydsl.binding.QuerydslPredicate;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import java.util.List;
import java.util.Map;
@Controller
@RequestMapping("/menu")
public class MenuController extends TreeController<MenuItem, Long> {
@Override
public List<Object> list(@QuerydslPredicate(root = MenuItem.class) Predicate predicate, Sort sort) {
return super.doList(predicate, sort);
}
@Override
public List<Object> tree(@QuerydslPredicate(root = MenuItem.class) Predicate predicate, Sort sort) {
return super.doTree(predicate, sort);
}
@Override
protected void extraListConvert(Map<String, Object> data, MenuItem entity) {
data.put("name", entity.getName());
data.put("href", StringUtils.startsWith(entity.getHref(), "javascript:") ? "" : entity.getHref());
data.put("code", entity.getCode());
data.put("iconCls", Strings.nullToEmpty(entity.getIconCls()));
}
@Override
protected String treeNodeDisplay(MenuItem entity) {
return entity.getName();
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-03T20:13:33.752+08:00
*/
package jken.integration;
public interface ModuleIntegration {
String getName();
void integrate(JkenModule module);
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-11T13:49:16.347+08:00
*/
package jken.support.web;
import jken.support.data.jpa.Entity;
import jken.support.service.CrudService;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.io.Serializable;
import java.util.Arrays;
public class CrudController<T extends Entity<I>, I extends Serializable> extends BaseController implements InitializingBean {
@Autowired
private CrudService<T, I> service;
private String viewDir;
@Override
public void afterPropertiesSet() {
RequestMapping requestMapping = getClass().getAnnotation(RequestMapping.class);
if (requestMapping != null) {
String path = StringUtils.trimToEmpty(requestMapping.value()[0]);
setViewDir(StringUtils.removeStart(path, "/"));
}
}
/**
* 显示列表页面
*
* @param model
* @return
*/
@GetMapping(produces = "text/html")
public String showList(Model model) {
onShowList(model);
return getViewDir() + "/list";
}
/**
* 显示添加页面
*
* @param model
* @return
*/
@GetMapping(value = "/add", produces = "text/html")
public String showDetailAdd(T entity, Model model) {
if (entity == null) {
entity = getService().createNew();
}
return showDetailEdit(entity, model);
}
/**
* 显示编辑页面
*
* @param entity
* @param model
* @return
*/
@GetMapping(value = "/{id}", produces = "text/html")
public String showDetailEdit(@PathVariable("id") T entity, Model model) {
model.addAttribute("entity", entity);
onShowEdit(entity, model);
return getViewDir() + "/edit";
}
//=====================================================
// 以下是REST操作
//=====================================================
/**
* 添加实体
*
* @param entity
* @param bindingResult
*/
@PostMapping
@ResponseBody
public void create(@ModelAttribute @Valid T entity, BindingResult bindingResult) {
update(entity, bindingResult);
}
/**
* 更新实体
*
* @param entity
* @param bindingResult
*/
@PutMapping("/{id}")
@ResponseBody
public void update(@ModelAttribute("id") @Valid T entity, BindingResult bindingResult) {
onValidate(entity, bindingResult);
if (bindingResult.hasErrors()) {
throw new RuntimeException("validate error");
}
onBeforeSave(entity);
onSave(entity);
onAfterSave(entity);
}
/**
* 删除实体
*
* @param entity
*/
@DeleteMapping("/{id}")
@ResponseBody
public void delete(@PathVariable("id") T entity) {
if (onBeforeDelete(entity)) {
onDelete(entity);
}
}
/**
* 批量删除实体
*
* @param entities
*/
@DeleteMapping
@ResponseBody
public void batchDelete(@RequestParam(value = "ids[]") T[] entities) {
if (entities != null) {
onBatchDelete(entities);
}
}
protected void onShowList(Model model) {
}
protected void onShowEdit(T entity, Model model) {
}
protected void onValidate(T entity, BindingResult bindingResult) {
}
protected void onBeforeSave(T entity) {
}
protected void onSave(T entity) {
getService().save(entity);
}
protected void onAfterSave(T entity) {
}
protected boolean onBeforeDelete(T entity) {
return true;
}
protected void onDelete(T entity) {
getService().delete(entity);
}
protected void onBatchDelete(T[] entities) {
getService().deleteInBatch(Arrays.asList(entities));
}
public CrudService<T, I> getService() {
return service;
}
public String getViewDir() {
return viewDir;
}
public void setViewDir(String viewDir) {
this.viewDir = viewDir;
}
}<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T21:29:01.400+08:00
*/
package jken.support.json.pathfilter;
import java.lang.annotation.*;
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface PathFilter {
String[] value() default {};
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.485+08:00
*/
package jken.module.core.service;
import com.google.common.collect.Sets;
import jken.integration.Authority;
import jken.integration.IntegrationService;
import jken.integration.JkenModule;
import jken.module.core.entity.*;
import jken.module.core.repo.CorpRepository;
import jken.support.service.CrudService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Service
public class CorpService extends CrudService<Corp, Long> {
@Autowired
private MenuItemService menuItemService;
@Autowired
private RoleService roleService;
@Autowired
private UserService userService;
@Autowired
private DictService dictService;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private CorpRepository corpRepository;
public Corp findByCode(String code) {
return corpRepository.findByCode(code);
}
@Transactional
public Corp createNewCorp(String name, String code, String adminUsername, String adminPassword) {
Corp corp = buildCorp(name, code);
List<JkenModule> modules = IntegrationService.getModules();
List<String> authorities = IntegrationService.getAuthorities().stream().map(Authority::getCode).collect(Collectors.toList());
List<MenuItem> menuItems = buildMenuItems(code, modules);
Role role = buildAdminRole(code, menuItems, authorities);
buildAdmin(code, role, adminUsername, adminPassword);
buildDicts(code, modules);
return corp;
}
protected Corp buildCorp(String name, String code) {
Corp corp = createNew();
corp.setName(name);
corp.setCode(code);
corp.setStatus(Corp.Status.NORMAL);
return save(corp);
}
protected List<MenuItem> buildMenuItems(String corpCode, List<JkenModule> modules) {
final List<MenuItem> menuItems = new ArrayList<>();
Integer sortNo = 1;
for (JkenModule module : modules) {
List<JkenModule.Mi> mis = module.getMenus();
if (mis != null) {
buildMenus(menuItems, mis, null, corpCode, sortNo);
sortNo++;
}
}
return menuItems;
}
private void buildMenus(final List<MenuItem> result, List<JkenModule.Mi> mis, MenuItem parent, String corpCode, Integer sortNo) {
for (JkenModule.Mi mi : mis) {
MenuItem menuItem = buildMenuItem(mi.getName(), mi.getCode(), mi.getHref(), mi.getIconCls(), mi.getAuthorities(), sortNo, corpCode, parent);
sortNo++;
result.add(menuItem);
if (mi.getChildren() != null) {
int subSort = 1;
buildMenus(result, mi.getChildren(), menuItem, corpCode, subSort);
subSort++;
}
}
}
protected Role buildAdminRole(String corpCode, List<MenuItem> menuItems, List<String> authorities) {
Role role = roleService.createNew();
role.setLocked(true);
role.setName("管理员");
role.setCode(Authority.AUTHORITY_ADMIN);
role.setMenuItems(menuItems);
role.setAuthorities(authorities);
role.setCorpCode(corpCode);
roleService.save(role);
return role;
}
protected User buildAdmin(String corpCode, Role adminRole, String username, String password) {
User user = userService.createNew();
user.setName("管理员");
user.setUsername(username);
user.setPassword(passwordEncoder.encode(password));
user.setLocked(true);
user.setRoles(Sets.newHashSet(adminRole));
user.setCorpCode(corpCode);
return userService.save(user);
}
private MenuItem buildMenuItem(String name, String code, String href, String iconCls, String authorities, Integer sortNo, String corpCode, MenuItem parent) {
MenuItem mi = menuItemService.createNew();
mi.setName(name);
mi.setCode(code);
mi.setHref(href);
mi.setIconCls(iconCls);
mi.setCorpCode(corpCode);
mi.setSortNo(sortNo * 100);
mi.setAuthorities(authorities);
mi.setParent(parent);
menuItemService.save(mi);
return mi;
}
private void buildDicts(String corpCode, List<JkenModule> modules) {
modules.forEach(module -> {
if (module.getDicts() != null) {
module.getDicts().forEach(dict -> {
Dict dictEntity = dictService.createNew();
dictEntity.setName(dict.getName());
dictEntity.setCode(dict.getCode());
dictEntity.setLocked(true);
dictEntity.setCorpCode(corpCode);
dictEntity.setItems(dict.getItems().stream().map(item -> {
DictItem dictItem = new DictItem();
dictItem.setName(item.getName());
dictItem.setValue(item.getValue());
return dictItem;
}).collect(Collectors.toList()));
dictService.save(dictEntity);
});
}
});
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T15:00:37.481+08:00
*/
package jken.support.web;
import com.querydsl.core.types.Predicate;
import jken.support.data.jpa.Entity;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import java.io.Serializable;
public abstract class EntityController<T extends Entity<I>, I extends Serializable> extends CrudController<T, I> {
/**
* 获取列表数据(分页)
*
* @param predicate
* @param pageable
* @return
*/
@GetMapping(produces = "application/json")
@ResponseBody
public abstract Page<T> list(Predicate predicate, Pageable pageable);
protected Page<T> doInternalPage(Predicate predicate, Pageable pageable) {
return getService().findAll(predicate, pageable);
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-08T12:28:42.204+08:00
*/
package jken;
import com.google.common.collect.Lists;
import org.springframework.boot.context.properties.ConfigurationProperties;
import java.util.List;
@ConfigurationProperties("jken")
public class JkenProperties {
private String name = "jken";
private String version = "1.0";
private String ownerCorp = "wl";
private final Security security = new Security();
public static class Security {
private List<String> ignorePatterns = Lists.newArrayList("/js/**", "/images/**", "/css/**", "/layuiadmin/**", "/favicon.ico", "/h2/**");
public List<String> getIgnorePatterns() {
return ignorePatterns;
}
public void setIgnorePatterns(List<String> ignorePatterns) {
this.ignorePatterns = ignorePatterns;
}
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getOwnerCorp() {
return ownerCorp;
}
public void setOwnerCorp(String ownerCorp) {
this.ownerCorp = ownerCorp;
}
public Security getSecurity() {
return security;
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T21:29:01.399+08:00
*/
package jken.support.json.pathfilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
/**
* This is just a help for a start.
*/
public class Jackson2Helper {
/**
* Build the FilterProvider for the given filters
*
* @param filters The filters to be user
* @return The configured FilterProvider
*/
public static SimpleFilterProvider buildFilterProvider(final String... filters) {
return new SimpleFilterProvider().addFilter("antPathFilter", new AntPathPropertyFilter(filters));
}
}<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.444+08:00
*/
package jken.support.data;
public interface Sortable {
Integer getSortNo();
void setSortNo(Integer sortNo);
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T21:29:01.402+08:00
*/
package jken.support.json;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.module.SimpleModule;
import jken.support.data.jpa.Entity;
import jken.support.json.pathfilter.AntPathFilterMixin;
import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.Ordered;
import org.springframework.data.domain.Page;
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
@Configuration
public class JsonConfig {
@Bean
public Jackson2ObjectMapperBuilderCustomizer jackson2ObjectMapperBuilderCustomizer() {
return new JkenJackson2ObjectMapperBuilderCustomizer();
}
@Bean
public SimpleModule simpleModule() {
SimpleModule module = new SimpleModule();
module.addSerializer(Page.class, new PageSerializer<>());
return module;
}
static final class JkenJackson2ObjectMapperBuilderCustomizer implements Jackson2ObjectMapperBuilderCustomizer, Ordered {
@Override
public void customize(Jackson2ObjectMapperBuilder builder) {
builder.serializationInclusion(JsonInclude.Include.NON_NULL);
builder.featuresToDisable(SerializationFeature.FAIL_ON_SELF_REFERENCES);
builder.mixIn(Entity.class, AntPathFilterMixin.class);
}
@Override
public int getOrder() {
return Ordered.LOWEST_PRECEDENCE;
}
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-05T19:35:47.655+08:00
*/
package jken.module.core.service;
import jken.module.core.entity.User;
import jken.module.core.repo.RoleRepository;
import jken.module.core.repo.UserRepository;
import jken.security.AbstractUserDetailsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
@Service
@Transactional
public class CorpUserDetailsService extends AbstractUserDetailsService<User, Long> {
@Autowired
private UserRepository userRepository;
@Autowired
private RoleRepository roleRepository;
@Override
protected User loadRepoUserDetails(Long id) {
return userRepository.getOne(id);
}
@Override
protected User loadUserByUsernameAndCorpCode(String username, String corpCode) {
return userRepository.findByUsernameAndCorpCode(username, corpCode);
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-06T17:28:25.790+08:00
*/
package jken.module.core.repo;
import com.querydsl.core.types.dsl.StringExpression;
import jken.module.core.entity.Message;
import jken.module.core.entity.QMessage;
import jken.support.data.jpa.QuerydslEntityRepository;
import org.springframework.data.querydsl.binding.QuerydslBinderCustomizer;
import org.springframework.data.querydsl.binding.QuerydslBindings;
public interface MessageRepository extends QuerydslEntityRepository<Message, Long>, QuerydslBinderCustomizer<QMessage> {
@Override
default void customize(QuerydslBindings querydslBindings, QMessage qMessage) {
querydslBindings.bind(qMessage.title).first(StringExpression::contains);
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-05T19:35:47.652+08:00
*/
package jken.module.core.entity;
import jken.support.data.Lockedable;
import jken.support.data.jpa.CorpableEntity;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.util.Collection;
import java.util.List;
@Entity
@Table(name = "tbl_core_role")
public class Role extends CorpableEntity<User, Long> implements Lockedable {
@NotNull
@Size(max = 20)
@Column(length = 63)
private String name;
@NotNull
@Size(max = 20)
@Column(length = 31)
private String code;
@Size(max = 1000)
@Column(length = 1023)
private String description;
private boolean locked = false;
@ManyToMany(mappedBy = "roles")
private Collection<User> users;
@ManyToMany(fetch = FetchType.LAZY)
@JoinTable(name = "tbl_core_role_menu",
joinColumns = @JoinColumn(name = "role_id", referencedColumnName = "id"), inverseJoinColumns = @JoinColumn(name = "menu_id", referencedColumnName = "id"))
private List<MenuItem> menuItems;
@ElementCollection(fetch = FetchType.LAZY)
@CollectionTable(name = "tbl_core_role_authority")
private List<String> authorities;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public boolean isLocked() {
return locked;
}
@Override
public void setLocked(boolean locked) {
this.locked = locked;
}
public Collection<User> getUsers() {
return users;
}
public void setUsers(Collection<User> users) {
this.users = users;
}
public List<MenuItem> getMenuItems() {
return menuItems;
}
public void setMenuItems(List<MenuItem> menuItems) {
this.menuItems = menuItems;
}
public List<String> getAuthorities() {
return authorities;
}
public void setAuthorities(List<String> authorities) {
this.authorities = authorities;
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.428+08:00
*/
package jken.support.data.jpa;
import org.springframework.data.querydsl.QuerydslPredicateExecutor;
import org.springframework.data.repository.NoRepositoryBean;
import java.io.Serializable;
@NoRepositoryBean
public interface QuerydslEntityRepository<T, I extends Serializable> extends EntityRepository<T, I>, QuerydslPredicateExecutor<T> {
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-04T15:00:37.510+08:00
*/
package jken.endpoint;
import jken.module.core.entity.User;
import jken.module.core.service.MenuItemService;
import jken.support.data.TreeHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.AuditorAware;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
@Controller
public class IndexEndpoint {
@Autowired
private MenuItemService menuItemService;
@Autowired
private AuditorAware<User> auditorAware;
@GetMapping({"", "/"})
public String indexPage(Model model) {
User currentUser = auditorAware.getCurrentAuditor().orElseThrow(RuntimeException::new);
model.addAttribute("currentUser", currentUser);
model.addAttribute("mis", TreeHelper.toTree(currentUser.getMenuItems()));
return "index";
}
@GetMapping({"/home"})
public String homePage() {
return "home";
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-05T19:35:47.647+08:00
*/
package jken.security;
import com.google.common.collect.Lists;
import jken.JkenProperties;
import jken.integration.Authority;
import jken.support.data.jpa.Entity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.domain.AuditorAware;
import org.springframework.orm.jpa.EntityManagerFactoryUtils;
import org.springframework.orm.jpa.EntityManagerHolder;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public abstract class AbstractUserDetailsService<U extends UserDetails, I extends Serializable> implements UserDetailsService, AuditorAware<U>, ApplicationContextAware {
@Autowired
private JkenProperties properties;
private ApplicationContext applicationContext;
protected abstract U loadRepoUserDetails(I id);
protected abstract U loadUserByUsernameAndCorpCode(String username, String corpCode);
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
EntityManagerFactory entityManagerFactory = applicationContext.getBean(EntityManagerFactory.class);
EntityManager entityManager = entityManagerFactory.createEntityManager();
EntityManagerHolder entityManagerHolder = new EntityManagerHolder(entityManager);
TransactionSynchronizationManager.bindResource(entityManagerFactory, entityManagerHolder);
String corpCode = obtainCorpCode();
U user = loadUserByUsernameAndCorpCode(username, corpCode);
if (user == null) {
throw new UsernameNotFoundException("Not found user " + username);
}
I id = null;
if (user instanceof Entity) {
id = ((Entity<I>) user).getId();
}
Collection<? extends GrantedAuthority> authorityCollection = user.getAuthorities();
List<GrantedAuthority> authorities = Lists.newArrayList(authorityCollection);
TransactionSynchronizationManager.unbindResource(entityManagerFactory);
EntityManagerFactoryUtils.closeEntityManager(entityManager);
if (Objects.equals(properties.getOwnerCorp(), corpCode)) {
if (authorities.stream().anyMatch(authority -> Objects.equals(authority.getAuthority(), Authority.ROLE_ADMIN))) {
authorities.add(Authority.SUPER_ADMIN);
}
}
return new CustomUserDetails<>(corpCode, authorities.contains(Authority.SUPER_ADMIN), id, user.getUsername(), user.getPassword(), user.isEnabled(), user.isAccountNonExpired(), user.isCredentialsNonExpired(), user.isAccountNonLocked(),
authorities);
}
@Override
public Optional<U> getCurrentAuditor() {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication == null) {
return Optional.empty();
}
Object principal = authentication.getPrincipal();
if (principal instanceof CustomUserDetails) {
return Optional.of(loadRepoUserDetails(((CustomUserDetails<I>) principal).getId()));
}
return Optional.empty();
}
protected String obtainCorpCode() {
return CorpCodeHolder.obtainCorpCode();
}
}
<file_sep># smartnet
SmartNet
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T21:44:55.234+08:00
*/
package jken;
import org.junit.Test;
//@RunWith(SpringRunner.class)
//@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class CommonTest {
@Test
public void test() {
}
}
<file_sep>package jken.module.core.multipart;
import com.google.common.base.Joiner;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import java.util.Arrays;
import java.util.stream.Collectors;
@Controller
@RequestMapping("/upload")
public class UploadEndpoint {
@Autowired
private UploadService uploadService;
@RequestMapping(value = "/public", method = RequestMethod.POST)
@ResponseBody
public String uploadPublic(@RequestPart("file") MultipartFile[] parts) {
return Joiner.on(",").join(Arrays.stream(parts).map(uploadService::uploadPublic).collect(Collectors.toList()));
}
@RequestMapping(value = "/private", method = RequestMethod.POST)
@ResponseBody
public String uploadPrivate(@RequestPart("file") MultipartFile[] parts) {
return Joiner.on(",").join(Arrays.stream(parts).map(uploadService::uploadPrivate).collect(Collectors.toList()));
}
}
<file_sep>package jken.module.core.support.thymeleaf;
import jken.module.core.entity.DictItem;
import jken.module.core.service.DictService;
import jken.support.thymeleaf.ModuleExpressionObject;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
@ModuleExpressionObject(objectName = "dict")
public class DictExpressionObject {
@Autowired
private DictService dictService;
public List<DictItem> items(String code) {
return dictService.getItemsByCode(code);
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-03T20:13:33.753+08:00
*/
package jken.support.mvc;
public class DataWrap {
private Object data;
public DataWrap() {
}
public DataWrap(Object data) {
this.data = data;
}
public static DataWrap of(Object data) {
return new DataWrap(data);
}
public Object getData() {
return data;
}
public void setData(Object data) {
this.data = data;
}
}
<file_sep><?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright (c) 2020.
~ @Link: http://jken.site
~ @Author: <NAME>
~ @LastModified: 2020-02-04T15:00:37.474+08:00
-->
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jken</groupId>
<artifactId>jken-admin</artifactId>
<version>1.0</version>
<modules>
<module>jken-common</module>
<module>jken-module-core</module>
<module>jken-webapp</module>
</modules>
<packaging>pom</packaging>
<name>Jken Admin</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>8</java.version>
<servlet-api.version>4.0.3</servlet-api.version>
<springboot.version>2.2.5.RELEASE</springboot.version>
<thymeleaf-springsecurity5.version>3.0.4.RELEASE</thymeleaf-springsecurity5.version>
<xstream.version>1.4.11.1</xstream.version>
<querydsl.version>4.2.2</querydsl.version>
<wechat-sdk.version>3.7.0</wechat-sdk.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${springboot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
<resource>
<directory>src/main/java</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.7</version>
<configuration>
<delimiters>
<delimiter>@</delimiter>
</delimiters>
<useDefaultDelimiters>false</useDefaultDelimiters>
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.shared</groupId>
<artifactId>maven-filtering</artifactId>
<version>1.3</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<skip>true</skip>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<compilerVersion>${java.version}</compilerVersion>
<verbose>true</verbose>
<optimize>true</optimize>
</configuration>
</plugin>
</plugins>
</build>
<developers>
<developer>
<id>ken</id>
<name>孔祥溪</name>
<email><EMAIL></email>
</developer>
</developers>
</project><file_sep>package jken.module.core.support.exception;
import org.springframework.boot.web.server.ErrorPage;
import org.springframework.boot.web.server.ErrorPageRegistrar;
import org.springframework.boot.web.server.ErrorPageRegistry;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpStatus;
@Configuration
public class ExceptionConfig {
@Bean
public ErrorPageRegistrar errorPageRegistrar() {
return new ErrorPageRegistrar() {
@Override
public void registerErrorPages(ErrorPageRegistry registry) {
registry.addErrorPages(
new ErrorPage(HttpStatus.NOT_FOUND, "/error/404"),
new ErrorPage(HttpStatus.FORBIDDEN, "/error/403"),
new ErrorPage(HttpStatus.INTERNAL_SERVER_ERROR, "/error/500"),
new ErrorPage("/error")
);
}
};
}
}
<file_sep>/*
* Copyright (c) 2020.
* @Link: http://jken.site
* @Author: <NAME>
* @LastModified: 2020-02-01T20:59:46.446+08:00
*/
package jken.support.data;
import com.google.common.base.Objects;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class TreeHelper {
public static <T extends Hierarchical<T>> List<T> toTree(T root, List<T> all) {
return toTree(all);
}
public static <T extends Hierarchical<T>> List<T> toTree(List<T> all) {
int[] roots = new int[all.size()];
Arrays.fill(roots, 0);
for (int i = 0; i < all.size(); i++) {
List<T> children = new ArrayList<>();
T ei = all.get(i);
for (int j = 0; j < all.size(); j++) {
if (i != j) {
T ej = all.get(j);
if (Objects.equal(ej.getParent(), ei)) {
children.add(ej);
roots[j] = 1;
}
}
}
if (children.size() > 0) {
Collections.sort(children, SortNoComparator.COMPARATOR);
}
ei.setChildren(children);
}
List<T> result = new ArrayList<>();
for (int i = 0; i < roots.length; i++) {
if (roots[i] == 0) {
result.add(all.get(i));
}
}
if (result.size() > 0) {
Collections.sort(result, SortNoComparator.COMPARATOR);
}
return result;
}
}
| ba92daac2d8ca0179c0a556cfbf1f10e46c7a289 | [
"Markdown",
"Java",
"Maven POM"
] | 34 | Java | jken-site/smartnet | 428e55feaffae6b5da582119a7a5e269c7d252c2 | 144cd2e7e2189fced5a3023dddbabad2ab5f27ab |
refs/heads/master | <repo_name>VasilioRuzanni/framework-comparison-article-hyperapp<file_sep>/README.md
# hyperapp example app
A `hyperapp`-based example app. This repo and its [live demo](https://vasilioruzanni-framework-comparison-article-hyperapp.glitch.me/) are published as part of the corresponding blog article at https://www.toptal.com/javascript/whats-better-than-vue-js-react-angular
Visit https://www.toptal.com/blog and subscribe to our newsletter to read great posts!
* * *
## Quickstart
1) First, install everything with `yarn`:
yarn
2) Then start the app (with [parcel](https://parceljs.org/)):
yarn start
3) There's no step 3. Open up a browser at [`localhost:1234`](http://localhost:1234).
<file_sep>/src/index.jsx
import { h, app } from 'hyperapp';
import { Http } from 'hyperapp-fx';
import hyperappLogo from '../public/hyperapp-2-logo-sign.svg';
// NOTE: Once the package is published, it would be imported with
// import { preventDefault } from '@hyperapp/events';
const preventDefault = [
function(_, props, event) {
if (props == null || props === true) event.preventDefault();
}
];
function fetchPostList(search) {
return Http({
url: `https://codingthat-quick-json-back-end-2.glitch.me/posts?q=${search}`,
action: (state, responseData) => ({
...state,
posts: responseData,
isLoading: false
})
});
}
function getTargetValue(event) {
return event.target.value;
}
function inputChange(state, search) {
return { ...state, search };
}
function formSubmit(state) {
return [
{ ...state, isLoading: true },
preventDefault,
fetchPostList(state.search)
];
}
function init() {
return [
{
isLoading: true,
search: 'engineering',
posts: []
},
fetchPostList('engineering')
];
}
const PostList = props => (
<div className="post-list">
{props.posts.map(post => (
<a className="post-list-item" href={post.url}>
{post.title}
</a>
))}
</div>
);
const NoContentPlaceholder = ({ message, note }) => (
<div className="list-empty-placeholder">
<div className="list-empty-placeholder-message">{message}</div>
{note && <div className="list-empty-placeholder-note">{note}</div>}
</div>
);
app({
init,
view: state => (
<div className="layout">
<header className="app-header">
<div className="layout-container">
<div className="app-header-content">
<div className="header-image">
<img src={hyperappLogo} />
</div>
<form
className="search-form"
noValidate={true}
onSubmit={formSubmit}
>
<input
type="text"
className="search-input"
value={state.search}
oninput={[inputChange, getTargetValue]}
/>
<button
type="submit"
className="search-btn"
disabled={state.isLoading}
>
{state.isLoading ? 'Searching...' : 'Search'}
</button>
</form>
</div>
</div>
</header>
<div className="layout-container">
<article className="app-main">
{state.isLoading ? (
<div className="list-loader">
<div className="spinner" />
</div>
) : state.posts.length ? (
<PostList posts={state.posts} />
) : (
<NoContentPlaceholder
message="Nothing found :("
note="Don't give up! Type in something different."
/>
)}
</article>
<footer className="app-footer">
Made with ❤️and{' '}
<a href="https://github.com/jorgebucaran/hyperapp">hyperapp</a>
</footer>
</div>
</div>
),
node: document.getElementById('app')
});
| bfc2d4e28baa8276293c3b9cc7a56b78f079dd39 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | VasilioRuzanni/framework-comparison-article-hyperapp | b7186d2e59fb3be488c37320157cc6acd86474de | c5412c95fb174d6e42e5f72338f967cd43f9429d |
refs/heads/main | <file_sep># Clase3
Tarea Clase3
<file_sep>var miRouter = require('express').Router()
var suma_service = require('../services/suma_service');
miRouter.get('/', (req, res) => {
res.send('Deme valores para sumar')
});
miRouter.get('/:vlr1/:vlr2', (req, res) => {
var vlr1 = req.params.vlr1;
var vlr2 = req.params.vlr2;
var resultado = suma_service.obtenerSumaDeValores(vlr1, vlr2);
//res.send(JSON.stringify(resultado.toString()));
res.send(resultado.toString());
});
module.exports = miRouter<file_sep>const suma_infrastruture = require('../infrastructure/suma_infrastructure')
function obtenerSumaDeValores(vlr1, vlr2) {
if (vlr1 !== null && vlr2 !==null) {
return suma_infrastruture.obtenerSumaDeValores(vlr1, vlr2);
} else {
return { error: 'No me dio valores correctos' }
}
}
module.exports = {
obtenerSumaDeValores
}
| 9861650a781f807bde874f2b8641b18ce49c4fc1 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | kike2068/Clase3 | 0678e9f5c9b8919ebf16b35236776a0a26c90584 | cb46079af24f7aa73f4295d6e70f1d889b8584b6 |
refs/heads/master | <file_sep># Title : TODO
# Objective : TODO
# Created by: chi
# Created on: 2/22/20
IBM = read.csv("unit-1/datasets/IBMStock.csv")
str(IBM)
IBM$Date = as.Date(IBM$Date, "%m/%d/%y")
str(IBM)
plot(IBM$Date, IBM$StockPrice, type = "l", col = "red")
abline(h = 1, v = 0)<file_sep># ocw-15.071-spring_2017
AA
AAA
AAA
A
<file_sep>poll = read.csv("unit-1/datasets/AnonymityPoll.csv")
summary(poll)
str(poll)
summary(poll$Smartphone)
str(poll$Smartphone)
table(poll$Smartphone)
table(poll$State, poll$Region == 'Midwest')
table(poll$State, poll$Region == 'South')
SouthInterview = subset(poll, poll$Region == 'South')
sort(table(SouthInterview$State))
which.max(table(SouthInterview$State))
table(poll$Internet.Use, poll$Smartphone)
summary(poll$Internet.Use)
summary(poll$Smartphone)
limited = subset(poll, poll$Internet.Use == 1 | poll$Smartphone == 1)
summary(limited)
mean(poll$Info.On.Internet, na.rm = T)
str(poll$Info.On.Internet)
table(poll$Info.On.Internet)
table(limited$Worry.About.Info)
summary(limited$Worry.About.Info)
table(limited$Anonymity.Possible)
hist(limited$Age)
plot(limited$Age, limited$Info.On.Internet)
max(table(limited$Age, limited$Info.On.Internet))
plot(jitter(limited$Age), jitter(limited$Info.On.Internet))
tapply(limited$Tried.Masking.Identity, limited$Smartphone, mean, na.rm = T)
| a37185ca26a2ce6f7aeb1ef072686979083cb685 | [
"Markdown",
"R"
] | 3 | R | lchi91/ocw-15.071-spring_2017 | 2a37a147caef41fa9990b5bc771e367659a273b9 | e3b3bee54a5b9eade64d6af1a8127ad243cc4920 |
refs/heads/master | <repo_name>gchevanel/rocketelevator<file_sep>/Rocket/HTML_BS4/assets/js/Quote.js
$(document).ready(function() {
$("#sum, #naparts, #nfloors, #nbasements, #nbusinesses, #nparkings, #ncages, #noccperfloor, #ndha, .buttoncenter").hide();
//hide everything that is not input for residential
$("#btnresidential").on('click', function(){
$('#btnhybrid').removeClass('type_clicked');
$('#btncorporate').removeClass('type_clicked');
$('#btncommercial').removeClass('type_clicked');
$('#btnresidential').addClass('type_clicked');
$("#naparts, #nfloors, #nbasements, #nbusinesses, #nparkings, #ncages, #noccperfloor, #ndha, .buttoncenter").hide();
$("#sum, #naparts, #nfloors, #nbasements, .buttoncenter").show(1000);
});
//hide everything thta is not input for commercial
$("#btncommercial").on('click', function(){
$('#btnhybrid').removeClass('type_clicked');
$('#btncorporate').removeClass('type_clicked');
$('#btncommercial').addClass('type_clicked');
$('#btnresidential').removeClass('type_clicked');
$("#naparts, #nfloors, #nbasements, #nbusinesses, #nparkings, #ncages, #noccperfloor, #ndha, .buttoncenter").hide();
$("#sum, #nbusinesses, #nfloors, #nbasements, #nparkings, #ncages, .buttoncenter").show(1000);
});
//hide everything thta is not input for corporate
$("#btncorporate").on('click', function(){
$('#btnhybrid').removeClass('type_clicked');
$('#btncorporate').addClass('type_clicked');
$('#btncommercial').removeClass('type_clicked');
$('#btnresidential').removeClass('type_clicked');
$("#naparts, #nfloors, #nbasements, #nbusinesses, #nparkings, #ncages, #noccperfloor, #ndha, .buttoncenter").hide();
$("#sum, #nbusinesses, #nfloors, #nbasements, #nparkings, #noccperfloor, .buttoncenter").show(1000);
});
//hide everything thta is not input for hybrid
$("#btnhybrid").on('click', function(){
$('#btnhybrid').addClass('type_clicked');
$('#btncorporate').removeClass('type_clicked');
$('#btncommercial').removeClass('type_clicked');
$('#btnresidential').removeClass('type_clicked');
$("#naparts, #nfloors, #nbasements, #nbusinesses, #nparkings, #ncages, #noccperfloor, #ndha, .buttoncenter").hide();
$("#sum, #nbusinesses, #nfloors, #nbasements, #nparkings, #noccperfloor, #ndha, .buttoncenter").show(1000);
});
// onclick switch
$("#standard").on('click', function(){
var collected_vars = collect_vars();
computed_result = compute_price(7565, collected_vars, 'standard');
//show_results(computed_result); inner.html
});
$("#premium").on('click', function(){
var collected_vars = collect_vars();
computed_result = compute_price(12345, collected_vars, 'premium');
});
$("#excelium").on('click', function(){
var collected_vars = collect_vars();
computed_result = compute_price(15400, collected_vars, 'excelium');
});
$(".residential1 :input").on("change keyup", function () {
console.log("test")
var collected_vars = collect_vars();
computed_result = compute_price(7565, collected_vars, 'standard');
})
function compute_price(price, collected_vars, calc_type){
let total;
let fees;
let cage2;
let occfl1000;
let calccol;
// console.table(collected_vars)
if (collected_vars['business_type'].toLowerCase() === 'residential'){
cage2 = collected_vars.cagesPerFloors * collected_vars.nbcolumns;
// console.log("residential")
if (calc_type === 'standard'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.1;
total = totsfees2 + fees;
}
if (calc_type == 'premium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.13;
total = totsfees2 + fees;
}
if (calc_type == 'excelium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.16;
total = totsfees2 + fees;
}
}
else if (collected_vars['business_type'].toLowerCase() === 'commercial'){
cage2 = collected_vars.nbcages;
// console.log("commercial")
if (calc_type === 'standard'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.10;
total = totsfees2 + fees;
}
if (calc_type == 'premium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.13;
total = totsfees2 + fees;
}
if (calc_type == 'excelium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.16;
total = totsfees2 + fees;
}
}
else if (collected_vars['business_type'].toLowerCase() === 'corporate' || 'hybrid'){
occfl1000 = Math.ceil((collected_vars.nbocperfloors * (collected_vars.nbfloors + collected_vars.nbbassements)) /1000);
calccol = Math.ceil(occfl1000 / collected_vars.nbcolumns);
cage2 = Math.ceil(calccol * collected_vars.nbcolumns);
if (calc_type === 'standard'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.10;
total = totsfees2 + fees;
}
if (calc_type == 'premium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.13;
total = totsfees2 + fees;
}
if (calc_type == 'excelium'){
totsfees2 = price * cage2;
fees = totsfees2 * 0.16;
total = totsfees2 + fees;
}
}
if(isNaN(total, fees, totsfees2, cage2)){
total=0;
fees=0;
totsfees2=0;
cage2=0;
}
// nb_appart = colected_vars['nbapartments']
// nb_floors = colected_vars['nbfloors']
// if (type_of_business == 'residential'){
document.getElementById("cage1").style.display = "block";
document.getElementById("cage2").innerHTML = "Numbers of cages : " + cage2;
document.getElementById("totsfees1").style.display = "block";
document.getElementById("totsfees2").innerHTML = "Price without fees : " + totsfees2 + "$";
document.getElementById("totalPrice1").style.display = "block";
document.getElementById("fees").innerHTML = "Installation fees : " + fees + "$";
document.getElementById("totalPrice").style.display = "block";
document.getElementById("total").innerHTML = "Total : " + total + "$";
}
function collect_vars(){
var business_type = $('.type_clicked').html();
var nbapartments = parseInt(document.getElementById("numbers_apartments").value);
var nbfloors = parseInt(document.getElementById("numbers_floors").value);
var nbbassements = parseInt(document.getElementById("numbers_basements").value);
var nbbusinesses = parseInt(document.getElementById("numbers_businesses").value);
var nbparkings = parseInt(document.getElementById("numbers_parkings").value);
var nbcages = parseInt(document.getElementById("numbers_cages").value);
var nbocperfloors = parseInt(document.getElementById("numbers_occupantsPerFloors").value);
var nbhouractivity = parseInt(document.getElementById("numbers_hourActivity").value);
var apartPerFloors = Math.ceil (nbapartments / nbfloors);
console.log(apartPerFloors)
var cagesPerFloors = Math.ceil(apartPerFloors / 6);
console.log(cagesPerFloors)
var nbcolumns = Math.ceil(nbfloors /20);
console.log(nbcolumns)
return { business_type: business_type, nbapartments: nbapartments,nbfloors: nbfloors,nbbassements: nbbassements,nbbusinesses:nbbusinesses,nbparkings:nbparkings,
nbcages:nbcages,nbocperfloors:nbocperfloors,nbhouractivity:nbhouractivity, apartPerFloors:apartPerFloors,
cagesPerFloors:cagesPerFloors, nbcolumns:nbcolumns}
}
// function calcresidential(price, nbapartments, nbfloors) {
// document.getElementById('numbers_apartments').setAttribute('onchange','calcresidential();');
// document.getElementById('numbers_floors').setAttribute('onchange','calcresidential();');
// document.getElementById('numbers_basements').setAttribute('onchange','calcresidential();');
// nbapartments = parseInt(document.getElementById("numbers_apartments").value);
// console.log('patate' + nbapartments)
// nbfloors = parseInt(document.getElementById("numbers_floors").value);
// nbbassements = parseInt(document.getElementById("numbers_basements").value);
// apartPerFloors = nbapartments / nbfloors;
// cagesPerFloors = Math.ceil(apartPerFloors / 6);
// nbcolumns = Math.ceil(nbfloors /20);
// var cageunit = cagesPerFloors;
// var total1 = cagesPerFloors * nbcolumns * price;
// var total2 = total1 *0.1;
// var total = total1 + total2;
// document.getElementById("totalPrice").style.display = "block";
// document.getElementById("total").innerHTML = total + "$";
// }
// // Montrer le prix
// document.getElementById("totalPrice").style.display = "block";
// document.getElementById("total").innerHTML = `s`;
// console.log("lol");
// };
// function total123(){
// var residentialprice
// }
// function calculateTotal(){
// var residentialprice = nbapartments + nbfloors + nbbassements;
// var divTot = document.getElementById("totalPrice");
// divTot.style.display='block';
// divTot.innerHTML = "$"+residentialprice;
// }
// id="numbers_apartments">
// id="numbers_floors">
// id="numbers_basements">
// id="numbers_businesses">
// id="numbers_parkings">
// id="numbers_cages">
// id="numbers_occupantsPerFloors">
// id="numbers_hourActivity">
});
| 6abdfeb163ed369800d0f133900083f27c8ab6f7 | [
"JavaScript"
] | 1 | JavaScript | gchevanel/rocketelevator | 81c0e348783fe8739113e2865c66fbf573d37a4c | b3bda5df7071f8f22b0c3a2ba3269cf303170587 |
refs/heads/master | <file_sep>/* File: Controller.java - March 2014 */
//package raw_sim;
import java.util.*;
/**
* This is the contoller class for the raw hearts simulator for my COSC490
* project. The program simulates an environment of the card game hearts
* and passes this information to the player modules. This program itself
* does not do any of the algorithms, it merely sets up the play space.
*
* @author <NAME>
*/
public class Controller {
ArrayList<Card> currentTrick;
private boolean heartsBroken;
private int[] scores = new int[4];
int startPlayer;
public Player[] players = new Player[4];
private boolean trickInPlay = false;
private int currentSuit;
private State state;
/**
* The main function, sets up the player space / hearts environment.
*
*/
public Controller (State state, ArrayList<Player> playerTypes) {
this.state = state;
// Sets up the scores for each player
for (int x = 0; x < scores.length; x++) {
scores[x] = 0;
}
// Default value for the current suit
currentSuit = -1;
// Determines whether or not hearts is broken
if (state != null) {
heartsBroken = state.isHeartsBroken();
} else {
heartsBroken = false;
}
// Generates and assigns hands to the players based on what has already been played
ArrayList<ArrayList> hands = GenerateHands(state);
// Creates the players
players[0] = playerTypes.get(0);
players[1] = playerTypes.get(1);
players[2] = playerTypes.get(2);
players[3] = playerTypes.get(3);
players[0].setUp(0, hands.get(0), this);
players[1].setUp(1, hands.get(1), this);
players[2].setUp(2, hands.get(2), this);
players[3].setUp(3, hands.get(3), this);
// Finds the player with the two of clubs, and records that for start player,
// unless otherwise specified in the state.
startPlayer = 0;
boolean foundStart = false; // Flag that shows whether the 2 of clubs was present.
if (state == null || state.getStartPlayer() == -1) {
for (int i = 0; i < 4; i++) {
if (players[i].getStart() == true) {
startPlayer = i;
foundStart = true;
}
}
} else {
startPlayer = state.getStartPlayer();
foundStart = true;
}
// If we could not find a start player
if (!foundStart) {
Random rand = new Random();
startPlayer = rand.nextInt(4);
}
// Initiates the trick holding array
currentTrick = new ArrayList<Card>(4);
}
public static ArrayList<ArrayList> GenerateHands(State state) {
ArrayList<Card> deck = new ArrayList<Card>();
if (state != null) {
deck = state.getRemainingCards();
} else {
// Generate an ArrayList of 52 cards
int suit = 0, rank = 0;
while (suit < 4) {
while (rank < 13) {
Card tempCard = new Card(suit, rank);
deck.add(tempCard);
rank++;
}
suit++;
rank = 0;
}
}
// Randomises the array
Random rand = new Random();
int randomNum;
Card tempCard;
// For each index in the deck starting at the last and decrementing
for(int i = deck.size() - 1; i > 0; i--) {
// Pick a card from the remainding deck
randomNum = rand.nextInt(i + 1);
// Swap the card at the end for the random number card
tempCard = deck.get(i);
deck.set(i, deck.get(randomNum));
deck.set(randomNum, tempCard);
}
// Assign the deck to the players
ArrayList<ArrayList> splitDeck = new ArrayList<ArrayList>();
int handSize = deck.size() / 4;
for (int i = 0; i < 4; i++) {
ArrayList<Card> temp = new ArrayList<Card>();
for (int z = 0; z < handSize; z++) {
temp.add(deck.get(i * handSize + z));
}
splitDeck.add(temp);
}
return splitDeck;
}
private static int GetWinningCardIndex(ArrayList<Card> trick, int startPlayer) {
// Gets the winning card index
Card candidate = trick.get(startPlayer);
int trumpSuit = trick.get(startPlayer).getSuit();
// Compares cards to see which one wins
for (Card i : trick) {
if (i.getSuit() == trumpSuit && i.getRank() > candidate.getRank()) {
candidate = i;
}
}
//System.out.println("WINNER = " + candidate);
return trick.indexOf(candidate);
}
// Returns an array of cards which a player could play
public ArrayList<Card> getLegalHands (ArrayList<Card> currentHand) {
ArrayList<Card> legalPlays = new ArrayList<Card>();
// If current hand is of size 1, return the current hand
if (currentHand.size() == 1) return currentHand;
// If a suit is already in play
if (trickInPlay) {
// Scan through the list of cards in the current hand, adding any of the suit to a different hand
for (Card i : currentHand) {
if (i.getSuit() == currentSuit) {
legalPlays.add(i);
}
}
// Return the legal plays if not empty, else return entire hand
if (legalPlays.size() > 0) {
return legalPlays;
} else {
return currentHand;
}
}
// If there are no hands in play
// If hearts broken
if (heartsBroken) {
return currentHand;
}
// Otherwise return the current hand except for all the hearts
ArrayList<Card> heartsCaught = new ArrayList<Card>();
for (Card i : currentHand) {
if (i.getSuit() == 0) {
heartsCaught.add(i);
}
}
// If the size of the hearts caught equals the hand, it means we can only play hearts, we are forced to break hearts early
if (heartsCaught.size() == currentHand.size()) return currentHand;
//System.out.println("HEARTS CAUGHT = " + heartsCaught);
for (Card i : heartsCaught) {
currentHand.remove(i);
}
//System.out.println("LEGAL PLAYS = " + currentHand);
return currentHand;
}
/* Plays a certain number of games from beginning to end, and records
* the scores in the State class it returns.
*/
public State playGames (int gameNumber, int reporting, int handSize) {
int[] scores = new int[4];
for (int i = 0; i < gameNumber; i++) {
// Plays the first games
State tempState = play(handSize, reporting);
// Assign scores
for (int x = 0; x < 4; x++) {
scores[x] += tempState.getScores()[x];
}
// Generates new hands
ArrayList<ArrayList> hands = GenerateHands(state);
// Assigns new hands to the players
players[0].setUp(0, hands.get(0), this);
players[1].setUp(1, hands.get(1), this);
players[2].setUp(2, hands.get(2), this);
players[3].setUp(3, hands.get(3), this);
//System.out.println("END OF GAME " + i + "\n\n\n\n");
}
return new State(scores, new int[13]);
}
/* Initiates the playing of a round. Returns a State class, which is the state of the game
* world following the play of the given number of tricks.
*/
public State play (int tricks, int reporting) {
// Variables for recording game info
int[] scores = new int[4];
int[] trickRecord = new int[13];
// Plays out the tricks specified
for (int i = 0; i < tricks; i++) {
trickInPlay = false;
for (int x = 0; x < 4; x++) {
currentTrick.add(new Card(0, 0));
}
if(reporting != 0) System.out.println("STARTING TRICK NUMBER " + i + " --------------");
//System.out.println(players[0] + "\n" +players[1] + "\n" +players[2] + "\n" +players[3]);
// Each player plays a card
currentTrick.set(startPlayer, players[startPlayer].getPlay(state));
trickInPlay = true;
currentSuit = currentTrick.get(startPlayer).getSuit();
state.setCurrentTrick(currentTrick);
currentTrick.set(players[(startPlayer+1)%4].getPosition(), players[(startPlayer+1)%4].getPlay(state));
state.setCurrentTrick(currentTrick);
currentTrick.set(players[(startPlayer+2)%4].getPosition(), players[(startPlayer+2)%4].getPlay(state));
state.setCurrentTrick(currentTrick);
currentTrick.set(players[(startPlayer+3)%4].getPosition(), players[(startPlayer+3)%4].getPlay(state));
state.setCurrentTrick(currentTrick);
// If a heart is in play that is not the first card
if (!heartsBroken) {
for (Card z : currentTrick) {
if (z.getSuit() == 0) {
heartsBroken = true;
if (reporting != 0) System.out.println("HEARTS BROKEN");
}
}
}
// Calculate who wins the trick
int oldStartPlayer = startPlayer;
startPlayer = GetWinningCardIndex(currentTrick, oldStartPlayer);
if (reporting != 0) System.out.println("Winner = " + startPlayer);
// Record results of the trick
// Calculate how many hearts were won
int scoreAddition = 0;
for (int x = 0; x < 4; x++) {
if (currentTrick.get(x).getSuit() == 0) scoreAddition++;
if (currentTrick.get(x).getSuit() == 1 && currentTrick.get(x).getRank() == 10) scoreAddition += 13; // Queen of Spades
}
if (reporting != 0) System.out.println("adding a score of " + scoreAddition);
scores[startPlayer] += scoreAddition;
trickRecord[i] = startPlayer;
if (reporting != 0) System.out.println(currentTrick + "\n");
currentTrick.clear();
state.setCurrentTrick(new ArrayList<Card>());
}
// Creates a new State representing the game state as it is after the tricks were played out
State currentState = new State(scores, trickRecord);
return currentState;
}
public String toString() {
for (Player i : players) {
System.out.println(i);
}
return "";
}
}
<file_sep>/* File: CowardPlayer.java - March 2014 */
import java.util.*;
/**
* This is the CowardPlayer class.
*
* The play in this class will always be the lowest card possible. This
* player will aim to never win a trick and always play the lowest card
* in their hand that follows the rules.
*
* @author <NAME>
*/
public class CowardPlayer implements Player {
private ArrayList<Card> hand = new ArrayList<Card>();
private int position;
private Controller controller;
public CowardPlayer () {
}
public void setUp (int position, ArrayList<Card> hand, Controller controller) {
this.position = position;
this.controller = controller;
// Sets up the ArrayList of Cards
for (Card i : hand) {
this.hand.add(i);
}
}
public String toString() {
String info = "";
info += "I am COWARD player " + position + " and my hand is " + hand.size() + "\n\n";
int i = 0;
while (i < hand.size()) {
info += hand.get(i).toString() + "\n";
i++;
}
return info;
}
public Card getLead() {
// Find out what is legal to play
// Play a card randomly from this subset
Card tempCard = hand.get(0);
hand.remove(0);
return tempCard;
}
public Card getPlay(State state) {
// Have to deep copy the hand for this to work
ArrayList<Card> tempHand = new ArrayList<Card>();
for (Card i : hand) {
tempHand.add(i);
}
// Find out what is legal to play
ArrayList<Card> legalHands = controller.getLegalHands(tempHand);
// Play the LOWEST CARD in the group
Card tempCard = legalHands.get(0);
for (Card i : legalHands) {
if (i.getRank() < tempCard.getRank()) {
tempCard = i;
}
}
// Find the card to remove
int index = 0;
for (int i = 0; i < hand.size(); i++) {
if (hand.get(i).match(tempCard)) index = i;
}
hand.remove(index);
return tempCard;
}
public boolean getStart() {
// Finds out if we have the 2 of clubs and therefore can start play
for (Card i : hand) {
if (i.getRank() == 0 && i.getSuit() == 3) {
return true;
}
}
return false;
}
public int getPosition() {
return position;
}
public ArrayList<Card> getHand() {
return hand;
}
}
<file_sep>public class Sandbox {
public static void main (String[]args) {
int count = 0;
long startTime = System.nanoTime();
long endTime = System.nanoTime();
// While there is still time remaining, keep searching
while (((endTime - startTime)/1000000000) < 10) { // Is this seconds?
if (count % 100000 == 0) {
System.out.println((endTime - startTime) / 1000000000);
}
endTime = System.nanoTime();
}
}
}<file_sep>/* File: Misc.java - March 2014 */
//package raw_sim;
import java.util.*;
public class Misc {
private static boolean verbose = false;
// Based on a hand and a trump suit
public static ArrayList<Card> GetLegalPlays (ArrayList<Card> hand, int trumpSuit) {
ArrayList<Card> legal = new ArrayList<Card>();
for (Card i : hand) {
if (i.getSuit() == trumpSuit) {
legal.add(new Card(i.getSuit(), i.getRank()));
}
}
if (legal.size() == 0) return hand;
return legal;
}
// Returns how many non null elements are in an arraylist
public static int RealSize (ArrayList<Card> list) {
int count = 0;
for (int i = 0; i < list.size(); i++) {
if (list.get(i) != null) count++;
}
return count;
}
// Returns a deep copy of the array passed
public static ArrayList<Card> DeepCopyTrick (ArrayList<Card> trick) {
// if (trick == null) return null;
ArrayList<Card> copy = new ArrayList<Card>();
for (Card i : trick) {
if (i == null) {
copy.add(null);
} else {
copy.add(new Card(i.getSuit(), i.getRank()));
}
}
return copy;
}
// Returns a deep copy of the array passed
public static ArrayList<ArrayList<Card>> DeepCopy (ArrayList<ArrayList<Card>> deck) {
ArrayList<ArrayList<Card>> copy = new ArrayList<ArrayList<Card>>();
int count = 0;
for (ArrayList<Card> i : deck) {
copy.add(new ArrayList<Card>());
for (Card x : i) {
copy.get(count).add(new Card(x.getSuit(), x.getRank()));
}
count++;
}
return copy;
}
// If the card is within the hand, it returns the array where it is.
// Otherwise returns -1.
public static int RemoveIndex (Card cardToRemove, ArrayList<Card> hand) {
int count = 0;
for (Card i : hand) {
if (i.match(cardToRemove)) return count;
count++;
}
return -1;
}
public static ArrayList<ArrayList<Card>> RemovePlayedCards (ArrayList<ArrayList<Card>> deal, MCTSNode node) {
// Deep copy the deal
ArrayList<ArrayList<Card>> hands = Misc.DeepCopy(deal);
//System.out.println("HANDS " + hands.size());
// WE NEVER REMOVE FROM ROOT NODE!
//if (node.getParent() == null && (node.getTrick() == null || node.getTrick().size() == 0)) return hands;
if (node.getParent() == null) return hands;
// If the node contains null values, it is the root node and therefore will not be removed,
// since the trick is repeated in the immeidate leaves
// Set up the game state - remove all cards that have already been played
MCTSNode currentNode = node;
int count = 0;
//while (currentNode != null && (currentNode.getTrick() != null && currentNode.getTrick().size() != 0)) {
while (currentNode.getParent() != null) {
if (verbose) System.out.println("Count: " + count + "Node: " + currentNode.getTrick());
count++;
// Remove a card from each hand per trick
for (int i = 0; i < hands.size(); i++) {
//System.out.println("Hand: " + hands.get(i));
for (int x = 0; x < hands.get(i).size(); x++) {
//System.out.println("TRICK " + currentNode.getTrick() + " i VALUE: " + i);
//System.out.println("Comparing: " + hands.get(i).get(x) + " with " + currentNode.getTrick().get(i));
if (hands.get(i).get(x).match(currentNode.getTrick().get(i))) {
hands.get(i).remove(x);
break;
}
}
}
currentNode = currentNode.getParent();
// CHECK TO SEE IF WE HAVE REACHED A ROOT NODE CASE WITH NO / HALF TRICK
if (node.getParent() == null) return hands;
}
return hands;
}
}
<file_sep>/*import javax.swing.JFrame;
public class TreeGUI {
// Creates and displays the main program frame
public static void main (String[]args) {
JFrame frame = new JFrame ("Tree GUI");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
TreePanel panel = new TreePanel();
frame.getContentPane().add(panel);
frame.pack();
frame.setVisible(true);
}
}*/<file_sep>TODO:
-> Check out the datacom task
-> Get started on it
-> Send email tonight or tomorrow confirming that you have started it and are going to work on it
for the next few weeks
-> Set your testing environment up tonight before you head into work!
-> REPORT
-> While running tests on your programs, do some report stuff...
-> FIRST... Get new document up and running and put everything into order!!!!
-> Then work through from the start?
-> Red for old stuff hat has not been gone over
-> Upto section 5 can be completed before you even get any results back
-> Basic diagrams of each implementation working in an example case?
-> 1) Fix up what Michael suggested in your implementations write up.
-> 2) Add a write up for implementation 2
-> 3) Fix your interim report
-> 4) Put everything together into a final report doc
-> 5) Write detailed notes / plans for other sections
-> Specific sections of the report to complete
-> UCT write up
-> Further research questions
-> IMPLEMENTATION ONE
-> IMPLEMENTATION TWO
-> Would adding logic to node expansion be beneficial? Rather than choosing tricks at random,
choose tricks based on optimal play from opponents?
-> Read through paper "an analysis of UCT in multiplayer games" - especially the Hearts section
-------------------------------------------------------------------------------
Current Goals:
-> Improving implementation 2
-> Make it so that the Select method doesn't always choose a leaf node
-> Ensure that the assign children is adding, not overwriting
-> Maybe leave till later? Get other things up and running first, since this algorithm is working well.
-> Implement the BASIC PLAYER
-> Just play highest card possible that will not win the trick?
-> Compare basic player and advanced player
-> Maybe make a bit worse? Performing similar to the Advanced player
Notes:
State records information about the game (e.g. what has been played)
The MCTS will record information about the deal it has chosen, since
this is specific to the MCTS. The methods will be given this information
when called. A deal will be assigned at the start and remove all cards
that have already been played, recorded in the state.
TRICKS RECORDED AS (P1, O1, O2, O3) AND THE STARTPLAYER STORED
Currently expansions are based on a simply random algorithm, the idea being that we randomly
generate possible moves and then test them via random simulation.
-------------------------------------------------------------------------------
List of things to do on the hearts game:
1) Investigate whether there is a bias for order of Hearts players
-> There should be no bias at all, all spots should perform equally well
2) Do some checks on the backpropagation to ensure that the scores are correct.
-> Remeber that a node has its own score as well as the score of the nodes below it.
6) REDO THE APP FOR TESTING PURPOSES
-> Should be testing all values on same hands
-> Should be recording results so you can find SD
-> Could be useful to practice getting these results into excel
-> If you build a prototype results collector, you'll have less work to do later on
7) Test for coefficient and thresehold in UCT
-> Talk to Michael about these results
-> Write them up!
8) Tests on how long we should let the MCST develop for? e.g. value of timer
9) Continue to implement DoubleDummyModular
-> Do we need to even do this? Maybe just comparing the different methods of Selection and Simulation are enough?
-> Maybe come back and do it after third implementation?
10) Write up explanantion of implementations
-------------------------------------------------------------------------------
SCHEUDLE:
8th September - All implementations / coding done
15th September - First draft of final report submitted
22nd September - Second draft of final report submitted
3rd October - Final copy of report submitted
-------------------------------------------------------------------------------
NOTES ON TIMING AND INCONSISTENCY:
The program right now is inconsistent. This can be fixed in a couple of ways.
1) Increase timer for MCTS - this will also ensure UCT is implemented correctly.
-> Each game can take upto 10 seconds, or perhaps even longer.
-> For the current state of the first implementation (14/08), having the timer set to 200,000
will have the cards executing at a rate of 9.37 seconds per game.
2) Increase the amount of runs we use as a sample
-> Should satisfy condition of 2SD/sqrt(n) = 0.2
3) Introduce diminishing returns to the MCTS - e.g. the less nodes it can explore, the less time it has
-------------------------------------------------------------------------------
REPORT NOTES:
Include a paragraph of some general findings / decisions regarding...
-> UCT Parameters
-> Coeefficient
-> Thresehold
-> Number of tests (found with SD)
-> Amount of time given to the MCTS algorithm
Include in future work a discussion on the advantages of card counting / more soft information, since your focus was not on that.
-------------------------------------------------------------------------------
IDEAS FOR SECOND IMPLEMENTATION:
1) Currently, one MCTS is drawn and at each node a simulate play method is completed, each with a different random set of cards.
-> Instead of having one tree, have multiple
-> The tree assigns hands at the start, then traverses
-> Caluclate the best trees as opposed to best nodes
2) Implement the UCT algorithm for the Select method.
3) Improve the DoubleDummy playout with more advanced logic
-> E.g. play highest card unless winning trick
-> Maybe try to get rid of one suit first
4) Add an option for the DoubleDummy simulation to have preset players as the logic
-> Make modular
-------------------------------------------------------------------------------
IDEAS FOR THIRD IMPLEMENTATION:
1) Introduce more hard and soft information
-> e.g. card counting and prediction
-------------------------------------------------------------------------------
SOME NOTES:
-------------------------------------------------------------------------------
IMPLEMENTATION 0: Current improvement over random 3-4%
SELECT METHOD:
Currently, the select method works in a very naive fashion.
For each node, we calculate the average score of that node,
and then we subtract this from 13 to get X. We then add
X + 1 of the node to an array.
Once we have done this for all nodes, we randomly select from
this array. This is a very simple approach for biasing nodes
with a low average score.
EXPANSION METHOD:
Creates child nodes based on the card that can be played at the node.
-> Represents a PLAY, not the whole trick
SIMULATE METHOD:
Upon reaching a node to begin a random simulation...
-> Remove previously played cards from hand
-> Randomly remove the equivalent number of opponents cards from remaining deck
-> Randomly deal hands to the opponents
-> Begin a "DoubleDumy" playout under these conditions
-> DoubleDummy very naive, based on some simple rules
-> Will try avoid trick if Hearts, otherwise win
-> Does not support half made tricks
-> How significant will improvements be on this?
-> Simple algorithm - if no Hearts, play highest card, else lowest
-> Record results
BACKPROPAGATE METHOD:
Basic backprop, adding score and visit count to previous nodes.
-------------------------------------------------------------------------------
IMPLEMENTATION 1: Current improvement over random 9-10%
SELECT METHOD:
-> UCT with chosen parameters
EXPANSION METHOD:
SIMULATE METHOD:
Number of improvements to the DoubleDummy playouts:
-> Randomised the start player (when no current trick passed) rather than assuming the player would start
-> Added support for halfway through trick
-> Potential TODO: change how the algorithm responds to playing different players
BACKPROPAGATE METHOD:
-------------------------------------------------------------------------------
IMPLEMENTATION IDEAS:
* Make a hand represent a trick as opposed to just a hand
* Swap out the tree approach - instead of one tree with random deals at nodes, have multiple
trees that define a permanent deal at the root. Calculate the best move from the most common
move returned from the trees.
-------------------------------------------------------------------------------
MISC NOTES / OBSERVATIONS:
* Had to randomise the start player for low trick games
-> It seems that the start player has a disadvantage, assumed to be
because other players could not follow suit.
* Simply improving the DoubleDummy playout method had a huge improvement over performance,
suggesting that a great deal of the MCTS capability relies on the DoubleDummy method itself.
-------------------------------------------------------------------------------
EVALUATION METHODS
There are three main methods that are to be used in the evaluation of the algorithms.
Each of these methods assumes 6 player types.
1) Random
2) Basic Player (Rule based)
3) Advanced Player (Rule based)
4) MCTS Implementation 0
5) MCTS Implementation 1
6) MCTS Implementation 2
Also to note, we need to retain the most information possible - probably results of each game
copyed into a program like excel.
METHOD 1
Say we wish to compare our players.
We playout some number of games with random deals (say 1000). Each player plays against
3 other random opponents, and we look at the average number of Hearts collected. The same
hands / games need to be played out by all players (not a different random game for each player).
Once we have this information, we can then look at the difference in average Hearts collected
in a graph format.
We can then place the difference in a table like this...
1 2 3
1 0.2 1 0.6 ...
2 1 2 0.8 ...
3 3.4 5 0.7 ...
METHOD 2:
From the above table, we may choose to look at some of the bigger differences in more detail.
We can then play a player against X Y and Z, where XYZ are players from our player pool, and we
can investigate how well they do against other player types.
METHOD 3:
When looking at MCTS specifically, we can make a graph of performance to timer varaible - as we increase
how much time we are giving the algorithm, our results should also improve.
OTHER METHODS:
We can consider other methods, for example comparing components of MCTS, as we wish. But we should start
with the main methods above.
<file_sep>/* File: MCTSNode_two.java - August 2014 */
//package raw_sim;
import java.util.*;
public interface MCTSNode {
public int getScore ();
public int getVisitCount ();
public Card getPlay ();
public int getNumberChildren ();
public MCTSNode getParent();
public ArrayList<MCTSNode> getChildren();
public void setScore (int amount);
public void setVisit (int amount);
// NEEDS TO ADD TO THE CHILDREN NOT REPLACE
public void assignChildren (ArrayList<MCTSNode> children);
public String toString ();
public int getLength ();
public void setLength (int length);
public ArrayList<Card> getTrick ();
public int getStartPlayer();
public int getWinner ();
public double getAverageScore();
}<file_sep>/* File: DoubleDummyModular.java - August 2014 */
import java.util.*;
/**
* An expansion on the DoubleDummy method - the idea is to base the results of the
* simulation on the type of opponents we are playing.
*
* The idea is to determine whether knowledge about your basic opponents moves has an impace on how well
* the MCTS algorithm works.
*
* Assumes every player wants to win.
*
* @author <NAME>
*/
public class DoubleDummyModular {
private static boolean verbose = false;
// Returns the number of hearts collected by the player during the play out
public static int PlayOut(ArrayList<Card> hand, ArrayList<ArrayList<Card>> opponentsHands) {
// Stores points
int[] playerScores = new int[4];
// Puts the players together
ArrayList<ArrayList<Card>> players = new ArrayList<ArrayList<Card>>();
// Deep copy the hands
for (int i = 0; i < 4; i++) {
players.add(new ArrayList<Card>());
}
for (Card i : hand) {
players.get(0).add(new Card(i.getSuit(), i.getRank()));
}
for (int x = 1; x < 4; x++) {
for (Card i : opponentsHands.get(x-1)) {
players.get(x).add(new Card (i.getSuit(), i.getRank()));
}
}
/*
players.add(hand);
players.add(opponentsHands.get(0));
players.add(opponentsHands.get(1));
players.add(opponentsHands.get(2));
*/
// Randomise the starting trick
Random rand = new Random();
int startPlayer = rand.nextInt(4);
// While there are still cards to be played
while (players.get(0).size() > 0) {
ArrayList<Card> currentTrick = new ArrayList<Card>();
// Play out the trick
for (int i = 0; i < 4; i++) {
int currentPlayer = (startPlayer + i) % 4;
//System.out.println(players.get(currentPlayer));
Card cardToPlay = playCard(players.get(currentPlayer), currentTrick);
currentTrick.add(cardToPlay);
// Remove card played from players hand
int cardIndex = 0;
for (int z = 0; z < players.get(currentPlayer).size(); z++) {
if (players.get(currentPlayer).get(z).match(cardToPlay)) cardIndex = z;
}
players.get(currentPlayer).remove(cardIndex);
}
// Calculate the winner of the trick and how many points they receive
int previousStartPlayer = startPlayer;
startPlayer = getWinner(currentTrick, previousStartPlayer);
//System.out.println(startPlayer);
playerScores[startPlayer] += getPoints(currentTrick);
}
// Return player score
return playerScores[0];
}
// Play out the round FROM HALFWAY THROUGH A TRICK
// Since a state is passed, we assume that we should start the playout halfway through the trick.
public static int PlayOut(ArrayList<Card> preHand, ArrayList<ArrayList<Card>> preOpponentsHands, State state) {
//System.out.println("IM BEING PASSED A HAND : " + preOpponentsHands.get(0));
ArrayList<Card> hand = new ArrayList<Card>();
ArrayList<ArrayList<Card>> opponentsHands = new ArrayList<ArrayList<Card>>();
for (Card i : preHand) {
hand.add(new Card(i.getSuit(), i.getRank()));
}
opponentsHands.add(new ArrayList<Card>());
opponentsHands.add(new ArrayList<Card>());
opponentsHands.add(new ArrayList<Card>());
for (int x = 0; x < 3; x++) {
for (Card i : preOpponentsHands.get(x)) {
opponentsHands.get(x).add(new Card(i.getSuit(), i.getRank()));
}
}
// Stores points
int[] playerScores = new int[4];
// Puts the players together
ArrayList<ArrayList<Card>> players = new ArrayList<ArrayList<Card>>();
if (state.getCurrentTrick() == null) {
return PlayOut(hand, opponentsHands);
}
// If there is a state with a current trick being played
if (state.getCurrentTrick().size() != 4) {
// Remove the currently played cards from the opponents hands
// For each card already played
for (Card z : state.getCurrentTrick()) {
// Scan through opponents and remove if found
for (int i = 0; i < 3; i++) {
for (int x = 0; x < opponentsHands.get(i).size(); x++) {
if (opponentsHands.get(i).get(x).match(z)) opponentsHands.get(i).remove(x);
}
}
}
}
// Redeal the opponents hands based on how many cards were played
ArrayList<Card> opponentsHandsPool = new ArrayList<Card>();
for (int i = 0; i < 3; i++) {
for (Card x : opponentsHands.get(i)) {
opponentsHandsPool.add(new Card(x.getSuit(), x.getRank()));
}
}
int dealCount = 0;
int deckIndex = 0;
while (dealCount < Misc.RealSize(state.getCurrentTrick())) {
opponentsHands.get(dealCount).clear();
for (int i = 0; i < hand.size() - 1; i++) {
opponentsHands.get(dealCount).add(new Card(opponentsHandsPool.get(deckIndex).getSuit(), opponentsHandsPool.get(deckIndex).getRank()));
deckIndex++;
}
dealCount++;
}
// For the remaining hands just deal normal number, since no cards have been played
while (dealCount < 3) {
opponentsHands.get(dealCount).clear();
for (int i = 0; i < hand.size(); i++) {
opponentsHands.get(dealCount).add(new Card(opponentsHandsPool.get(deckIndex).getSuit(), opponentsHandsPool.get(deckIndex).getRank()));
deckIndex++;
}
dealCount++;
}
players.add(hand);
players.add(opponentsHands.get(0));
players.add(opponentsHands.get(1));
players.add(opponentsHands.get(2));
if (verbose) {
System.out.println("PLAYING OUT WITH THESE HANDS");
System.out.println(players.get(0));
System.out.println(players.get(1));
System.out.println(players.get(2));
System.out.println(players.get(3));
System.out.println("------------------------------");
}
// Opponents hands should now correctly reflect what has been played
int startPlayer = Misc.RealSize(state.getCurrentTrick()) + 1;
// Deep copy the states current trick
ArrayList<Card> currentTrick = new ArrayList<Card>();
for (Card i : state.getCurrentTrick()) {
if(i != null) currentTrick.add(new Card(i.getSuit(), i.getRank()));
}
if (verbose) System.out.println("Starting with = " + currentTrick);
boolean resetTrick = false;
// While there are still cards to be played
while (players.get(0).size() > 0) {
if (resetTrick) {
currentTrick = new ArrayList<Card>();
}
int beginningCardsInTrick = currentTrick.size();
// Play out the trick
for (int i = 0; currentTrick.size() < 4; i++) {
int currentPlayer = (startPlayer + i) % 4;
if (verbose) {
System.out.println(players.get(currentPlayer));
System.out.println("Current Players Hand = " + players.get(currentPlayer));
System.out.println("Current Trick = " + currentTrick);
System.out.println("--------------");
}
// If an opponent has already played, it should not play again
if (beginningCardsInTrick > 0 && currentPlayer != 0 && currentPlayer <= beginningCardsInTrick) {
} else {
Card cardToPlay = playCard(players.get(currentPlayer), currentTrick);
currentTrick.add(cardToPlay);
// Remove card played from players hand
int cardIndex = 0;
for (int z = 0; z < players.get(currentPlayer).size(); z++) {
if (players.get(currentPlayer).get(z).match(cardToPlay)) cardIndex = z;
}
players.get(currentPlayer).remove(cardIndex);
}
}
// Calculate the winner of the trick and how many points they receive
int previousStartPlayer = startPlayer - beginningCardsInTrick;
if (verbose) {
System.out.println("END TRICK = " + currentTrick);
System.out.println("Player who started trick = " + previousStartPlayer);
System.out.println("** NEW TRICK **");
}
startPlayer = getWinner(currentTrick, previousStartPlayer);
//System.out.println(startPlayer);
playerScores[startPlayer] += getPoints(currentTrick);
resetTrick = true;
}
// Return player score
return playerScores[0];
}
// Plays the highest card it can provided there are no hearts currently being played. If there are, it
// will play the lowest card.
private static Card playCard (ArrayList<Card> hand, ArrayList<Card> currentTrick) {
// Finds the trump suit
int trumpSuit = -1;
if (currentTrick.size() > 1) {
trumpSuit = currentTrick.get(0).getSuit();
}
// If we have a trump suit, they are the legal hand, otherwise everything is legal
ArrayList<Card> candidatePlays = new ArrayList<Card>();
if (trumpSuit != -1) {
for (Card i : hand) {
if (i.getSuit() == trumpSuit) candidatePlays.add(i);
}
if (candidatePlays.size() == 0) candidatePlays = hand;
} else {
candidatePlays = hand;
}
// Candidate plays now contains all the cards we could play
// See if there are any hearts present in the trick
boolean heartsPresent = false;
for (Card i : currentTrick) {
if (i.getSuit() == 0) heartsPresent = true;
}
// If heart is present, play lowest card, else play highest
Card play = candidatePlays.get(0);
if (heartsPresent) {
for (Card i : candidatePlays) {
if (play.getRank() > i.getRank()) play = i;
}
} else {
for (Card i : candidatePlays) {
if (play.getRank() < i.getRank()) play = i;
}
}
return play;
}
private static int getWinner (ArrayList<Card> trick, int startPlayer) {
// Get trump suit
int trumpSuit = trick.get(startPlayer).getSuit();
// See what trump suit wins
int winningIndex = startPlayer;
for (int i = 0; i < trick.size(); i++) {
// If suits match
if (trumpSuit == trick.get(i).getSuit()) {
// If rank is greater than, change it
if (trick.get(winningIndex).getRank() < trick.get(i).getRank()) winningIndex = i;
}
}
return winningIndex;
}
// Calculates score of the trick
private static int getPoints (ArrayList<Card> trick) {
int score = 0;
for (Card i : trick) {
// A heart
if (i.getSuit() == 0) score++;
// Black bitch
if (i.getSuit() == 1 && i.getRank() == 10) score += 13;
}
return score;
}
}
<file_sep>/* File: AdvancedPlayer.java - March 2014 */
import java.util.*;
/**
* This is the AdvancedPlayer class.
*
* The play in this class will attempt to be as advanced as possible. A rule based approach is used.
* -> If last to play in trick
* -> Play highest card of the suit ELSE
* -> Play Queen of Spades ELSE
* -> Play highest card of suit with fewset remaining cards
* -> If leading the trick
* -> Open with the lowest card...?
* -> If 2nd or 3rd in trick
* -> Play heighest card of suit that will not win trick ELSE
* -> Play Queen of Spades ELSE
* -> Play highest card of suit with fewset remaining cards
*
* @author <NAME>
*/
public class BasicPlayer implements Player {
private ArrayList<Card> hand = new ArrayList<Card>();
private int position;
private Controller controller;
public BasicPlayer () {
}
public void setUp (int position, ArrayList<Card> hand, Controller controller) {
this.position = position;
this.controller = controller;
// Sets up the ArrayList of Cards
for (Card i : hand) {
this.hand.add(i);
}
}
public String toString() {
String info = "";
info += "I am an BASIC PLAYER player " + position + " and my hand is " + hand.size() + "\n\n";
int i = 0;
while (i < hand.size()) {
info += hand.get(i).toString() + "\n";
i++;
}
return info;
}
public Card getLead() {
// Find out what is legal to play
// Play a card randomly from this subset
Card tempCard = hand.get(0);
hand.remove(0);
return tempCard;
}
// Consider getting rid of one suit
public Card getPlay(State state) {
//return hand.get(0);
// Have to deep copy the hand for this to work
ArrayList<Card> tempHand = new ArrayList<Card>();
for (Card i : hand) {
tempHand.add(i);
}
// Find out what is legal to play
ArrayList<Card> legalHands = controller.getLegalHands(tempHand);
Card tempCard = legalHands.get(0);
// Find out the player we are in the trick
int player = 0;
if (state.getCurrentTrick() != null) player = Misc.RealSize(state.getCurrentTrick());
if (player == 0) {
// If starting player, open with lowest card
for (Card i : legalHands) {
if (i.getRank() < tempCard.getRank()) {
tempCard = i;
}
}
} else {
// Last player
// Check to see if we still have the suit
boolean playingToSuit = false;
//System.out.println(state.getCurrentTrick());
/*
// Gets the trump suit
int trumpSuit = -1;
for (Card i : state.getCurrentTrick()) {
if (i != null) {
trumpSuit = i.getSuit();
break;
}
}
*/
int trumpSuit;
if (Misc.RealSize(state.getCurrentTrick()) == 1) {
trumpSuit = state.getCurrentTrick().get(3).getSuit();
} else if (Misc.RealSize(state.getCurrentTrick()) == 2) {
trumpSuit = state.getCurrentTrick().get(2).getSuit();
} else {
trumpSuit = state.getCurrentTrick().get(1).getSuit();
}
for (Card i : legalHands) {
if (i.getSuit() == trumpSuit) {
playingToSuit = true;
break;
}
}
if (playingToSuit) {
// Play lowest card possible
// Play the LOWEST CARD in the group
tempCard = legalHands.get(0);
for (Card i : legalHands) {
if (i.getRank() < tempCard.getRank()) {
tempCard = i;
}
}
} else {
// If we are not playing to suit, play random
Random rand = new Random();
tempCard = legalHands.get(rand.nextInt(legalHands.size()));
}
}
// Tempcard is now the card we are going to play
// Find the card to remove
int index = 0;
for (int i = 0; i < hand.size(); i++) {
if (hand.get(i).match(tempCard)) index = i;
}
hand.remove(index);
return tempCard;
}
public boolean getStart() {
// Finds out if we have the 2 of clubs and therefore can start play
for (Card i : hand) {
if (i.getRank() == 0 && i.getSuit() == 3) {
return true;
}
}
return false;
}
public int getPosition() {
return position;
}
public ArrayList<Card> getHand() {
return hand;
}
}
<file_sep>/* File: GameApp.java - March 2014 */
//package raw_sim;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import java.util.*;
/**
* This is the app class for the hearts project. It contains the main method
* and calls upon the other classes to create a game state and then begins
* playing the game.
*
* @author <NAME>
*/
public class GameApp {
/**
* The main function, sets up the player space and begins the play
* loop.
*
*/
public static void main(String[]args) {
// Options array - 0 = Select coefficient
int[] options = new int[1];
options[0] = 20; // thresehold for mcts select
// MAIN GAME CODE HERE
int numberOfGames = 3000;
int[] totalScores = {0,0,0,0};
for (int x = 0; x < numberOfGames; x++) {
ArrayList<Card> cardsPlayed = randomRemove(0);
ArrayList<Player> playerType = new ArrayList<Player>();
playerType.add(new MCTSPlayer(options));
playerType.add(new RandomPlayer());
playerType.add(new RandomPlayer());
playerType.add(new RandomPlayer());
State testState = new State(false, -1, cardsPlayed);
Controller control = new Controller(testState, playerType, null);
State results = control.playGames(1, 0, 13);
//State results = control.play(3, 1);
int[] scores = results.getScores();
for (int i = 0; i < 4; i++) {
totalScores[i] += scores[i];
}
System.out.println("END OF GAME " + x);
}
for (int i = 0; i < 4; i++) {
System.out.println((double) totalScores[i] / numberOfGames);
}
// END MAIN GAME CODE
//System.out.println(MCTS.search(testState, control.players[0].getHand()));
//Visualise("");
// Testing visualisation method
//Visualise("{c6,0,1,0,140;}");
/*
// Testing the Select method
MCTSNode root = new MCTSNode(null, null);
ArrayList<MCTSNode> children = new ArrayList<MCTSNode>();
children.add(new MCTSNode(new Card(0, 0), root));
children.add(new MCTSNode(new Card(0, 1), root));
children.add(new MCTSNode(new Card(0, 2), root));
children.add(new MCTSNode(new Card(0, 3), root));
children.get(0).setScore(1);
children.get(0).setVisit(1);
children.get(1).setScore(1);
children.get(1).setVisit(1);
int[] scores = {0,0,0,0};
root.assignChildren(children);
for (int i = 0; i < 10000; i++) {
scores[MCTS.Select(root).getPlay().getRank()] += 1;
}
for (int i = 0; i < 4; i++) {
System.out.println(scores[i]);
}
*/
/*
// Testing the Expand method
ArrayList<Card> tempHand = new ArrayList<Card>();
tempHand.add(new Card(0,0));
tempHand.add(new Card(1,0));
// Creates a MCTS tree with a root node and 4 possible plays - 1, 2, 3, 4 of Hearts
MCTSNode root = new MCTSNode(null, null);
ArrayList<MCTSNode> children = new ArrayList<MCTSNode>();
children.add(new MCTSNode(new Card(0, 0), root));
root.assignChildren(children);
ArrayList<MCTSNode> children2 = new ArrayList<MCTSNode>();
children2.add(new MCTSNode(new Card(1, 0), root.getChildren().get(0)));
root.getChildren().get(0).assignChildren(children2);
ArrayList<MCTSNode> expansion = MCTS.Expand(root.getChildren().get(0).getChildren().get(0), testState, tempHand);
for (MCTSNode i : expansion) {
System.out.println(i);
}
*/
/*
// Testing the SimulatePlay method
System.out.println(control.players[0].getHand() + "\n\n");
MCTSNode root = new MCTSNode(null, null);
ArrayList<MCTSNode> children = new ArrayList<MCTSNode>();
children.add(new MCTSNode(control.players[0].getHand().get(0), root));
root.assignChildren(children);
ArrayList<MCTSNode> children2 = new ArrayList<MCTSNode>();
children2.add(new MCTSNode(control.players[0].getHand().get(1), root.getChildren().get(0)));
root.getChildren().get(0).assignChildren(children2);
System.out.println(MCTS.SimulatePlay(root.getChildren().get(0).getChildren().get(0), testState, control.players[0].getHand()));
*/
/*
int max = 0;
// Testing the DoubleDummy method
for (int i = 0; i < 5000; i++) {
ArrayList<ArrayList> hands = Controller.GenerateHands(null);
ArrayList<Card> player = hands.get(0);
ArrayList<ArrayList<Card>> opponents = new ArrayList<ArrayList<Card>>();
opponents.add(hands.get(1));
opponents.add(hands.get(2));
opponents.add(hands.get(3));
int result = DoubleDummy.PlayOut(player, opponents);
if (result > max) max = result;
}
System.out.println(max);
*/
/*
ArrayList<Player> playerType = new ArrayList<Player>();
playerType.add(new RandomPlayer());
playerType.add(new CowardPlayer());
playerType.add(new BullyPlayer());
playerType.add(new RandomPlayer());
Controller control = new Controller(null, playerType);
System.out.println(control);
control.play(13);
*/
}
public static void Visualise (String treeStructure, int totalNodes) {
JFrame frame = new JFrame ("Tree GUI");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//TreePanel panel = new TreePanel("{A,0,6,7,-1;}{B,0,2,5,6;C,0,7,1,6;}{D,0,1,1,2;E,0,4,3,2;}{F,0,3,1,4;G,0,5,1,4;}");
//{0,A,0,6,7,-1;}{1,B,0,2,5,6;2,C,0,7,1,6;}{3,D,0,1,1,2;4,E,0,4,3,2;}{5,F,0,3,1,4;6,G,0,5,1,4;}
TreePanel panel = new TreePanel(treeStructure, totalNodes);
panel.setLayout(null);
JScrollPane scrollFrame = new JScrollPane(panel);
scrollFrame.setPreferredSize(new Dimension(panel.getWidth(), panel.getHeight()));
frame.getContentPane().add(scrollFrame);
//frame.getContentPane().add(panel);
//frame.setLayout(null);
frame.pack();
frame.setVisible(true);
}
// Returns a random assortment of cards played
public static ArrayList<Card> randomRemove (int tricks) {
ArrayList<Card> deck = new ArrayList<Card>();
// Generate an ArrayList of 52 cards
int suit = 0, rank = 0;
while (suit < 4) {
while (rank < 13) {
Card tempCard = new Card(suit, rank);
deck.add(tempCard);
rank++;
}
suit++;
rank = 0;
}
// Randomises the array
Random rand = new Random();
int randomNum;
Card tempCard;
// For each index in the deck starting at the last and decrementing
for(int i = deck.size() - 1; i > 0; i--) {
// Pick a card from the remainding deck
randomNum = rand.nextInt(i + 1);
// Swap the card at the end for the random number card
tempCard = deck.get(i);
deck.set(i, deck.get(randomNum));
deck.set(randomNum, tempCard);
}
// Return a portion of this randomised deck
ArrayList<Card> temp = new ArrayList<Card>();
for (int i = 0; i < 4*tricks; i++) {
temp.add(deck.get(i));
}
return temp;
}
}<file_sep>/* File: ComponentTester.java - March 2014 */
//package raw_sim;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import java.util.*;
/**
* This is the app class for the hearts project. It contains the main method
* and calls upon the other classes to create a game state and then begins
* playing the game.
*
* @author <NAME>
*/
public class ComponentTester {
/**
* The main function, sets up the player space and begins the play
* loop.
*
*/
public static void main(String[]args) {
int tricks = 4;
ArrayList<Card> hand = new ArrayList<Card>();
ArrayList<Card> pool = GameApp.randomRemove(tricks);
for(int i = 0; i < tricks; i++) {
hand.add(new Card(pool.get(i).getSuit(), pool.get(i).getRank()));
}
ArrayList<ArrayList<Card>> opponentsHands = new ArrayList<ArrayList<Card>>();
opponentsHands.add(new ArrayList<Card>());
opponentsHands.add(new ArrayList<Card>());
opponentsHands.add(new ArrayList<Card>());
for (int i = 0; i < 3; i++) {
for (int z = 0; z < tricks; z++) {
opponentsHands.get(i).add(new Card(pool.get(tricks + (i * tricks) + z).getSuit(), pool.get(tricks + (i * tricks) + z).getRank()));
}
}
State testState = new State(false, -1, null);
ArrayList<Card> trick = new ArrayList<Card>();
trick.add(new Card(2, 0));
testState.setCurrentTrick(trick);
System.out.println(DoubleDummy.PlayOut(hand, opponentsHands, testState));
}
}
<file_sep>HowToPlayWell
=============
COSC490 2014
<file_sep>/* File: RandomPlayer.java - March 2014 */
import java.util.*;
/**
* This is the RandomPlayer class.
*
* The play in this class is determined entirely by randomisation.
* As long as a group of cards satisfy to rules of the play, then a random
* card will be picked.
*
* @author <NAME>
*/
public class RandomPlayer implements Player {
private ArrayList<Card> hand = new ArrayList<Card>();
private int position;
private Controller controller;
public RandomPlayer () {
}
public void setUp (int position, ArrayList<Card> hand, Controller controller) {
this.position = position;
this.controller = controller;
// Sets up the ArrayList of Cards
for (Card i : hand) {
this.hand.add(i);
}
}
public String toString() {
String info = "";
info += "I am RANDOM player " + position + " and my hand is " + hand.size() + "\n\n";
int i = 0;
while (i < hand.size()) {
info += hand.get(i).toString() + "\n";
i++;
}
return info;
}
public Card getLead() {
// Find out what is legal to play
// Play a card randomly from this subset
Card tempCard = hand.get(0);
hand.remove(0);
return tempCard;
}
public Card getPlay(State state) {
// Have to deep copy the hand for this to work
ArrayList<Card> tempHand = new ArrayList<Card>();
for (Card i : hand) {
tempHand.add(i);
}
// Find out what is legal to play
ArrayList<Card> legalHands = controller.getLegalHands(tempHand);
// Play a card randomly from this subset
Random rand = new Random();
int randomNum = rand.nextInt(legalHands.size());
Card tempCard = legalHands.get(randomNum);
// Find the card to remove
int index = 0;
for (int i = 0; i < hand.size(); i++) {
if (hand.get(i).match(tempCard)) index = i;
}
hand.remove(index);
return tempCard;
}
public boolean getStart() {
// Finds out if we have the 2 of clubs and therefore can start play
for (Card i : hand) {
if (i.getRank() == 0 && i.getSuit() == 3) {
return true;
}
}
return false;
}
public int getPosition() {
return position;
}
public ArrayList<Card> getHand() {
return hand;
}
}
<file_sep>/* File: Card.java - March 2014 */
//package raw_sim;
import java.util.*;
/**
* This is the Card class which represents a single card. This class has
* methods to determine suit and rank of a card.
*
* Basics taken from http://www.dreamincode.net/forums/topic/110380-deck-of-cards-using-various-methods/
*
* @author <NAME>
*/
public class Card {
private int rank, suit;
private static String[] suits = {"hearts", "spades", "diamonds", "clubs"};
private static String[] ranks = {"2", "3", "4", "5", "6", "7", "8", "9", "10", "jack", "queen", "king", "ace"};
Card (int suit, int rank) {
this.suit = suit;
this.rank = rank;
}
public String toString() {
return ranks[rank] + " of " + suits[suit];
}
public int getRank() {
return rank;
}
public int getSuit() {
return suit;
}
public String getSuitString() {
return suits[suit];
}
public boolean match (Card candidate) {
if (this.suit == candidate.getSuit() && this.rank == candidate.getRank()) return true;
return false;
}
}
<file_sep>/* File: MCTS.java - August 2014 */
//package raw_sim;
import java.util.*;
/**
* Second implementation of MCTS. This version is intended to have less time
* for each tree, since multiple trees will be made.
*
* Each node contains trick info (as opposed to just a play), and therefore
* some of the component methods change accordingly.
*
* @author <NAME>
*/
public class MCTS {
private static boolean verbose = false;
private static boolean visualise = false;
// Given a State and a Player, search the state space and return the
// best card available to be played.
/**
* OPTIONS:
* [0] Select method thresehold criteria
* [1] Max nodes expanded
* [2] Timer variable
*/
public static Card search (State state, ArrayList<Card> hand, int[] options) {
// THE MCTS ALGORITHM
MCTSNode currentNode, rootNode, prevNode = null;
// CREATES THE BASE DEAL - REMOVING ALL CARDS ALREADY PLAYED AT PLAY CALL
// Creates the deal based on remaining cards - NOTE: should only do this for opponents,
// and our hand will remain the same.
ArrayList<ArrayList<Card>> deal;
if (state.getCurrentTrick() != null && state.getCurrentTrick().size() != 0) {
deal = Controller.GenerateHandsTrick(state, hand);
} else {
deal = Controller.GenerateHands(state, hand);
}
/*
for (ArrayList<Card> player : deal) {
System.out.println("Deal Player Size: " + player.size());
}
*/
// Check to see if there is a trick in play
ArrayList<Card> currentTrick = Misc.DeepCopyTrick(state.getCurrentTrick());
//System.out.println(currentTrick);
if (currentTrick != null && currentTrick.size() != 0) {
// Calculate start player based on the trick - it will be earliest card played
int rootNodeStartPlayer = 0;
for (int i = 0; i < 4; i++) {
if (currentTrick.get(i) != null) {
rootNodeStartPlayer = i;
break;
}
}
// Creates a half full root node
rootNode = new MCTSNode(null, null, currentTrick, rootNodeStartPlayer);
} else {
// Creates an empty root node, ready for expansion
// If this is the case, we must be starting the trick, so we are the start player
rootNode = new MCTSNode(null, null, null, 0);
}
// CONSTRUCTS AND SEARCHES THE TREE
// while (has time)
int timer = 0;
while (timer < options[2]) {
int depth = 0;
// current node <-- root node
currentNode = rootNode;
// Assign prevNode to currentNode, used in the case of examining the root
prevNode = currentNode;
// while (current node is within the State aka has children)
while (currentNode.getNumberChildren() != 0) {
// prev node <-- current node
prevNode = currentNode;
// current node <-- Select(current_node)
currentNode = currentNode.getChildren().get(Select(currentNode, options[0]));
depth++;
}
if (verbose) System.out.println("Expanding at depth: " + depth);
if (verbose) System.out.println("Cards played: " + state.getCardsPlayed().size());
// prev node <-- expand(prev node)
// Assign children will have to append to the children array, not replace it
currentNode.assignChildren(Expand(currentNode, state, deal, options[1]));
/*
System.out.println("PREV " + prevNode);
System.out.println("CURR " + currentNode);
System.out.println("Children = " + Expand(currentNode, state, hand));
*/
// R <-- play_simulated_game(prev node)
// Must take into account the current trick from the state!
if (verbose) System.out.println("\n\nExploring a node with count: " + timer + " \n\n");
if (verbose) System.out.println("Current node represents : " + currentNode.getPlay());
if (verbose) {
for (ArrayList<Card> i : deal) {
System.out.println(i);
}
}
// Avoid simulating play / backprop at root node
if (timer != 0) {
int doesWin = SimulatePlay(currentNode, state, deal);
// BACK PROPAGATE METHOD
// while (current node is within the State aka is not root node)
while (currentNode != null) {
// current node.backprop(R)
currentNode.setScore(doesWin);
// current node.visit count ++
currentNode.setVisit(1);
// current node <-- current node.parent
currentNode = currentNode.getParent();
}
}
timer++;
}
/*
// Iterate through the tree (only first layer at the moment)
for (MCTSNode i : rootNode.getChildren()) {
System.out.println(i);
}
System.out.println(rootNode.getChildren().size());
System.out.println(rootNode);*/
// Return the best move specified in tree
// To do this, simply look at the LOWEST score on the first depth
MCTSNode max = rootNode.getChildren().get(0);
for (MCTSNode i : rootNode.getChildren()) {
if (i.getAverageScore() < max.getAverageScore()) {
max = i;
}
}
// Max is now the node with the best score. Lets check if any are the same, and if so, change if visit count higher
for (MCTSNode i : rootNode.getChildren()) {
if (i.getAverageScore() == max.getAverageScore()) {
if (i.getVisitCount() > max.getVisitCount()) {
max = i;
}
}
}
//System.out.println("The best move is : " + max.getPlay());
// Find midway point for root node
ArrayList<MCTSNode> children = rootNode.getChildren();
// Find the total length
int totalLength = 0;
for (MCTSNode i : children) {
totalLength += i.getLength();
}
// Find the midway point
int tempLength = 0;
int leftNodeLength = 0;
for (MCTSNode i : children) {
tempLength += i.getLength();
if (tempLength > totalLength / 2) break;
}
// VISUALISE METHOD
if (visualise) {
//System.out.println("Total Length = " + CalculateLengths(rootNode));
String treeStructure = "{";
treeStructure += CalculatePositions(rootNode, tempLength, -1, 1);
treeStructure += "}";
GameApp.Visualise(treeStructure, rootNode.getLength());
}
// Iterates down the tree
//MCTS.Iterate(rootNode.getChildren(), 0);
return max.getPlay();
}
public static void Iterate (ArrayList<MCTSNode> childNodes, int depth) {
for (MCTSNode i : childNodes) {
System.out.println(i + " Parent: " + i.getParent() + " at depth: " + depth);
if (i.children != null) {
MCTS.Iterate(i.getChildren(), depth + 1);
}
}
System.out.println("------------------------------------------------------------\n");
}
/* Select method to account for scoring based on avergae hearts collected per visit.
The idea here is that the score associated with a node is how many hearts it has collected in total.
If we divide this by number of visits we get average hearts collected.
We increase the chance of picking a node with low average hearts, and decrease it otherwise.
*/
public static int Select(MCTSNode node, int option) {
return Select.UCT(node, option);
/* Old select method
int numberOfChildren = node.getNumberChildren();
Random rand = new Random();
ArrayList<Integer> indexSelection = new ArrayList<Integer>();
// Adds nodes based on equation 13 - (average score / 2)
// If still 13, add no extra nodes
for (int i = 0; i < numberOfChildren; i++) {
// Adds at least 1 representation of node to indexSelection array
indexSelection.add(i);
MCTSNode temp = node.getChildren().get(i);
// If node has been visited.
if (temp.getVisitCount() > 0) {
// Calculate average score / 2
int averageHeartScore = temp.getScore() / temp.getVisitCount();
int representation = 13 - temp.getScore();
for (int z = 0; z < representation; z++) {
indexSelection.add(i);
}
}
}
// Selects a node at random from the array
int successor = indexSelection.get(rand.nextInt(indexSelection.size()));
return successor;
*/
}
// Expands the node, creating child nodes based on following tricks.
// Returns a list of child nodes
public static ArrayList<MCTSNode> Expand(MCTSNode node, State state, ArrayList<ArrayList<Card>> deal, int numChildren) {
ArrayList<MCTSNode> candidateChildren = new ArrayList<MCTSNode>();
// Root node case - TRICK IS IN PLAY
if (node.getParent() == null && node.getTrick() != null) {
// We are in the root node and have to deal with a current trick
// All tricks must begin with what is currently in the trick already - assume this information in the node
// Assumes trick will have null entries if no card has been played
ArrayList<Card> trick = new ArrayList<Card>();
// No cards to remove from deal (due to root node) but we still must deep copy
ArrayList<ArrayList<Card>> revisedDeal = Misc.DeepCopy(deal);
// Remove cards already played in the trick from the specific players -- DONT NEED TO DO THIS, BUT ACTS AS A CHECK!
for (int i = 0; i < 4; i++) {
// If we find a card in the trick
if (node.getTrick().get(i) != null) {
// Remove from hand that played it
int index = Misc.RemoveIndex(node.getTrick().get(i), revisedDeal.get(i));
revisedDeal.get(i).remove(index);
}
}
// Make new tricks - WE CAN ONLY PLAY WHAT IS LEGAL, AND SO CAN THE OPPONENT
Random rand = new Random();
// Find the trump suit of the trick
int trumpSuit = -1;
for (int i = 0; i < node.getTrick().size(); i++) {
if (node.getTrick().get(i) != null) {
trumpSuit = node.getTrick().get(i).getSuit();
break;
}
}
for (int i = 0; i < numChildren; i++) {
ArrayList<Card> newTrick = Misc.DeepCopyTrick(node.getTrick());
for (int x = 0; x < 4; x++) {
if (newTrick.get(x) == null) {
ArrayList<Card> legalPlays = Misc.GetLegalPlays(revisedDeal.get(x), trumpSuit);
int selected = rand.nextInt(legalPlays.size());
newTrick.set(x, new Card(legalPlays.get(selected).getSuit(), legalPlays.get(selected).getRank()));
//int selected = rand.nextInt(revisedDeal.get(x).size());
//newTrick.set(x, new Card(revisedDeal.get(x).get(selected).getSuit(), revisedDeal.get(x).get(selected).getRank()));
}
}
// Creates the node with the startPlayer and parent based on the parent node
MCTSNode temp = new MCTSNode(newTrick.get(0), node, newTrick, node.getStartPlayer());
// Add to candidate children
candidateChildren.add(temp);
}
}
// EASY CASE - NO TRICK IN PLAY
else {
// Remove already played cards from deal
//System.out.println("TRACE: " + node.getTrick());
//System.out.println(deal.get(0));
//System.out.println(deal.get(1));
ArrayList<ArrayList<Card>> revisedDeal = Misc.RemovePlayedCards(deal, node);
// If we have no plays left to expand, return empty
if (revisedDeal.get(0).size() == 0) return new ArrayList<MCTSNode>();
//System.out.println(revisedDeal.get(0));
// Expanding possible nodes based on all the remaining tricks available
// Will expand until numChildren reached
for (int i = 0; i < numChildren; i++) {
// Generate a trick
ArrayList<Card> trick = new ArrayList<Card>();
Random rand = new Random();
for (ArrayList<Card> player : revisedDeal) {
int selected = rand.nextInt(player.size());
trick.add(new Card(player.get(selected).getSuit(), player.get(selected).getRank()));
}
// Creates the node with the startPlayer and parent based on the parent node
MCTSNode temp = new MCTSNode(trick.get(0), node, trick, node.getWinner());
// Add to candidate children
candidateChildren.add(temp);
}
}
return candidateChildren;
}
/* Plays through the game world, beginning with current node selected.
*
* Basic procedure is as follows...
* -> Calculate winner of the trick passed.
* -> Set up the game state
* -> Recursively remove cards already played
* -> Set up remaining hands / cards
* -> Pass this info to DoubleDummy play
* -> Return winner
*
* We assume players are set up like... { Player, O1, O2, O3}
*
*/
public static int SimulatePlay(MCTSNode node, State state, ArrayList<ArrayList<Card>> deal) {
// Deep copy the deal
ArrayList<ArrayList<Card>> hands = Misc.DeepCopy(deal);
// Calculate winner of the trick passed
int winner = node.getWinner();
int[] scores = {0, 0, 0, 0};
/*
for (ArrayList<Card> player : deal) {
System.out.println("Deal Player Size: " + player.size());
}
*/
// Set up the game state - remove all cards that have already been played
MCTSNode currentNode = node;
//while (currentNode != null && (currentNode.getTrick() != null && currentNode.getTrick().size() != 0)) {
while (currentNode.getParent() != null) {
// Remove a card from each hand per trick
for (int i = 0; i < hands.size(); i++) {
for (int x = 0; x < hands.get(i).size(); x++) {
if (hands.get(i).get(x).match(currentNode.getTrick().get(i))) {
if (hands.get(i).get(x).getSuit() == 0) {
scores[currentNode.getWinner()] += 1;
}
if (hands.get(i).get(x).match(new Card(1, 10))) {
scores[currentNode.getWinner()] += 13;
}
hands.get(i).remove(x);
break;
}
}
}
currentNode = currentNode.getParent();
}
/*
for (ArrayList<Card> player : hands) {
System.out.println("Hands Player Size Post Removal: " + player.size());
}
*/
// We now have a current game state, current scores, and the winner of the last trick
// Time to pass all this information to the doubledummy player and play it out
return DoubleDummy.PlayOut(hands, scores, winner);
/*
for (ArrayList<Card> i : hands) {
System.out.println(i);
}
for (int i : scores) {
System.out.println(i);
}
*/
}
/* ----------------------- VISUALISE METHODS ----------------------- */
// A depth first search through the tree printing out the length of the subtree
public static int CalculateLengths (MCTSNode childNode) {
// If no more children, we are at a leaf node, return 1
if (childNode.children == null || childNode.getChildren().size() == 0) {
childNode.setLength(1);
//System.out.println("1");
return 1;
}
int length = 0;
for (MCTSNode i : childNode.getChildren()) {
length += CalculateLengths(i) + 1;
}
childNode.setLength(length);
//System.out.println(length);
return length;
}
// A depth first search through the tree printing out the length of the subtree
// Each node returns a string that is a representation of the node on the graph
public static String CalculatePositions (MCTSNode childNode, int leftBarrier, int parentPos, int depth) {
String nodeString = "";
if (parentPos == -1) {
// If root node
// Add labels to the return string
nodeString += "ROOT,";
nodeString += childNode.getScore() + ",";
} else {
// Add labels to the return string
nodeString += childNode.getPlay().getSuitString().charAt(0);
nodeString += childNode.getPlay().getRank() + ",";
nodeString += childNode.getScore() + ",";
}
// Place the node you are currently on
// Do you have child nodes?
if (childNode.children != null) {
// Yes?
// Split into two groups, left and right
ArrayList<MCTSNode> children = childNode.getChildren();
/* FIX THIS MORE ADVANCE SPLITTING BASED ON LENGTH
// Find the total length
int totalLength = 0;
for (MCTSNode i : children) {
totalLength += i.getLength();
}
// Find the midway point
int tempLength = 0;
int leftNodeLength = 0;
for (MCTSNode i : children) {
tempLength += i.getLength();
if (tempLength > totalLength / 2) break;
}
// Place yourself at sum(left node lengths) + leftBarrier + 1
int myPos = tempLength + leftBarrier + 1;
nodeString += myPos + ",";
// Add width and parent info
nodeString += depth + ",";
nodeString += parentPos + ";";
int prevLengths = 0;
boolean left = true;
for (MCTSNode i : children) {
// Call Calculate on all left nodes, sending leftBarrier + sum(prevChildNode.length)
if (left = true) {
nodeString += CalculatePositions(i, leftBarrier + prevLengths, myPos, depth + 1);
prevLengths += i.getLength();
if (prevLengths >= tempLength) {
left = false;
prevLengths = 0;
}
}
// Call calculate on all right nodes, sending your position as leftBarrier + prevChildNode.length
else {
nodeString += CalculatePositions(i, myPos + prevLengths, myPos, depth + 1);
prevLengths += i.getLength();
}
}
*/
// SPLITTING BASED ON NUMBER OF NODES
// Find the total length
int midway = children.size() / 2;
// Find the midway point (length)
int tempLength = 0;
int count = 0;
for (MCTSNode i : children) {
tempLength += i.getLength();
if (count > midway) break;
count++;
}
// Place yourself at sum(left node lengths) + leftBarrier + 1
int myPos = tempLength + leftBarrier + 1;
nodeString += myPos + ",";
// Add width and parent info
nodeString += depth + ",";
nodeString += parentPos + ";";
count = 0;
boolean resetFlag = true;
int prevLengths = 0;
for (MCTSNode i : children) {
// Call Calculate on all left nodes, sending leftBarrier + sum(prevChildNode.length)
if (count <= midway) {
nodeString += CalculatePositions(i, leftBarrier + prevLengths, myPos, depth + 1);
prevLengths += i.getLength();
}
// Call calculate on all right nodes, sending your position as leftBarrier + prevChildNode.length
else {
if (resetFlag) {
prevLengths = 0;
resetFlag = false;
}
nodeString += CalculatePositions(i, myPos + prevLengths, myPos, depth + 1);
prevLengths += i.getLength();
}
count++;
}
} else {
// No?
// Place yourself at 1 + leftBarrier
nodeString += (1 + leftBarrier) + ",";
// Add width and parent info
nodeString += depth + ",";
nodeString += parentPos + ";";
}
System.out.println(nodeString);
return nodeString;
}
}
<file_sep>import java.util.*;
public interface Player {
public String toString();
// Plays a card to begin the trick
public Card getLead();
// Plays a card in the trick
public Card getPlay(State state);
// Determines whether or not they have the 2 of clubs
public boolean getStart();
// Finds position of player
public int getPosition();
// Sets up the player state
public void setUp (int position, ArrayList<Card> hand, Controller controller);
// Returns the players current hand
public ArrayList<Card> getHand();
}
| 9330f80de566e12e3f0e1338cc33914a4f7e3d40 | [
"Markdown",
"Java",
"Text"
] | 16 | Java | LewisCarey/HowToPlayWell | a1070f1344edce1dd3c360006d611391de17ccdb | 92e47a2074b8d2eac39f8c2b93e92b249191933e |
refs/heads/master | <file_sep>import React, { Component } from 'react';
import './App.css';
import MessageList from '../MessageList/MessageList'
import Toolbar from '../Toolbar/Toolbar'
import ComposeForm from '../ComposeForm/ComposeForm'
const API = 'gschool-api.herokuapp.com/api/messages'
class App extends Component {
constructor() {
super()
this.state = {
messages: [],
compose: false
}
}
async componentDidMount() {
const response = await fetch(`${API}`)
const json = await response.json()
this.setState({
...this.state,
messages: json
})
}
onStarClick = async (id) => {
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: [id],
command: 'star'
})
})
this.setState({
...this.state,
messages: this.state.messages.map(message => {
if (message.id === id) {
message.starred = !message.starred
}
return message
})
})
}
onBoxCheck = (id) => {
this.setState({
...this.state,
messages: this.state.messages.map(message => {
if (message.id === id) {
message.selected ? delete message.selected : message.selected = true
}
return message
})
})
}
selectAll = () => {
let selected = this.state.messages.filter(message => { return message.selected}).length
if (this.state.messages.length === selected) {
this.setState({
...this.state,
messages: this.state.messages.map(message => {
delete message.selected
return message
})
})
}
else {
this.setState({
...this.state,
messages: this.state.messages.map(message => {
message.selected = true
return message
})
})
}
}
markAsRead = async () => {
const ids = this.state.messages.filter(message => {
return message.selected === true
}).map(message => {
return message.id
})
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: ids,
command: 'read',
read: true
})
})
const newList = await response.json()
this.setState({
...this.state,
messages: newList
})
}
markAsUnread = async () => {
const ids = this.state.messages.filter(message => {
return message.selected === true
}).map(message => {
return message.id
})
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: ids,
command: 'read',
read: false
})
})
const newList = await response.json()
this.setState({
...this.state,
messages: newList
})
}
deleteMessage = async () => {
const ids = this.state.messages.filter(message => {
return message.selected === true
}).map(message => {
return message.id
})
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: ids,
command: 'delete'
})
})
const newList = await response.json()
this.setState({
...this.state,
messages: newList
})
}
addLabel = async(label) => {
const ids = this.state.messages.filter(message => {
return message.selected === true
}).map(message => {
return message.id
})
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: ids,
command: 'addLabel',
label: label
})
})
/// STOP HERE
this.setState({
...this.state,
messages: this.state.messages.map(message => {
if (message.selected === true) {
if (label === 'Apply label') {
return message
}
else if (!message.labels.includes(label)) {
message.labels.push(label)
return message
}
}
return message
})
})
}
addLabel2 = async (label2) => {
const ids = this.state.messages.filter(message => {
return message.selected === true
}).map(message => {
return message.id
})
const response = await fetch(`${API}`, {
method: 'PATCH',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
messageIds: ids,
command: 'removeLabel',
label: label2
})
})
this.setState({
...this.state,
messages: this.state.messages.map(message => {
if (message.selected === true) {
if (message.labels.includes(label2)) {
let i = message.labels.indexOf(label2)
message.labels.splice(i,i+1)
return message
}
}
return message
})
})
}
onComposeClick = () => {
this.setState({
...this.state,
compose: !this.state.compose
})
}
onSendForm = async (sub, bod) => {
console.log(this.state.messages);
const response = await fetch(`${API}`, {
method: 'POST',
headers: {'Content-Type': 'application/json; charset=utf-8'},
body: JSON.stringify({
subject: sub,
body: bod,
read: false,
starred: false
})
})
const newItem = await response.json()
this.setState({
...this.state,
compose: !this.state.compose,
messages: [...this.state.messages, newItem]
})
}
render() {
return (
<div className="App">
<Toolbar messages={this.state.messages} selectAll={this.selectAll}
selected={this.state.messages.filter(message => { return message.selected}).length}
unselected={this.state.messages.filter(message => { return !message.selected}).length}
markAsRead={this.markAsRead}
markAsUnread={this.markAsUnread}
deleteMessage={this.deleteMessage}
addLabel={this.addLabel}
addLabel2={this.addLabel2}
onComposeClick={this.onComposeClick}
/>
<ComposeForm onComposeClick={this.onComposeClick} composing={this.state.compose} onSendForm={this.onSendForm}/>
<MessageList messages={this.state.messages} onStarClick={this.onStarClick} onBoxCheck={this.onBoxCheck}/>
</div>
);
}
}
export default App;
<file_sep>import React, { Component } from 'react'
class ComposeForm extends Component {
constructor(props){
super(props)
this.state = {
...this.state,
formSubject: '',
body: ''
}
}
onButtonClick = (e) => {
e.preventDefault()
let sub = document.getElementById('subject').value
let bod = document.getElementById('body').value
this.setState({
...this.state,
formSubject: sub,
body: bod
})
this.props.onSendForm(sub, bod)
}
render () {
const {composing, onComposeClick, onSendForm} = this.props
return (
<form className="form-horizontal well" className={`${composing ? '' : 'hidden'}`}>
<div className="form-group">
<div className="col-sm-8 col-sm-offset-2">
<h4>Compose Message</h4>
</div>
</div>
<div className="form-group">
<label for="subject" className="col-sm-2 control-label">Subject</label>
<div className="col-sm-8">
<input type="text" className="form-control" id="subject" placeholder="Enter a subject" name="subject">
</input>
</div>
</div>
<div className="form-group">
<label for="body" className="col-sm-2 control-label">Body</label>
<div className="col-sm-8">
<textarea name="body" id="body" className="form-control"></textarea>
</div>
</div>
<div className="form-group">
<div className="col-sm-8 col-sm-offset-2">
<input type="submit" value="Send" className="btn btn-primary" onClick={this.onButtonClick}>
</input>
</div>
</div>
</form>
)
}
}
export default ComposeForm
<file_sep>import React from 'react'
const Message = ({ message, onStarClick, onBoxCheck }) => {
console.log("message: ", message);
return (
<div className={`row message ${message.read ? 'read' : 'unread'} ${message.selected ? 'selected' : ''}`}>
<div className="col-xs-1">
<div className="row">
<div className="col-xs-2">
<input type="checkbox" checked={message.selected || false} onClick={function() {onBoxCheck(message.id)}} />
</div>
<div className="col-xs-2">
<i onClick={function() {onStarClick(message.id)}} className={`${message.starred ? 'star fa fa-star' : 'star fa fa-star-o'}`}></i>
</div>
</div>
</div>
<div className="col-xs-11">
{message.labels.map((label, idx) => {
return <span key={idx} className="label label-warning">{label}</span>
})}
<a href="#">
{message.subject}
</a>
</div>
</div>
)
}
export default Message
| 57506f0a7058a8bccd5e24615a137f177f62dab4 | [
"JavaScript"
] | 3 | JavaScript | rileyburns345/react-inbox | 3a02e296c29352fcfacea0d28656c60d6f7f9fec | 78ce7ae6248a8e703d7ac6b3dfd687925b2811cb |
refs/heads/master | <file_sep>package com.cisco.cmad.blog.test;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import com.cisco.cmad.blog.api.Blog;
import com.cisco.cmad.blog.api.Category;
import com.cisco.cmad.blog.api.Comment;
import com.cisco.cmad.blog.api.User;
public class Jpatest {
private static void insertTest(EntityManager em) {
System.out.println("StartBlog Injection !");
User newUser = new User();
newUser.setUserId("avi0100");
newUser.setUserName("Avinash");
newUser.setAboutUser("Udev");
newUser.setEmailId("<EMAIL>");
Category tag = new Category();
tag.setCategoryName("Travel");
Category tag2 = new Category();
tag2.setCategoryName("Food");
Category tag3 = new Category();
tag3.setCategoryName("Science");
Blog newBlog = new Blog();
newBlog.setAuthor(newUser);
newBlog.setTitle("My First Blog");
newBlog.setBlogMessage("Blog Message for Travel 1");
newBlog.getCategories().add(tag);
newBlog.getCategories().add(tag2);
System.out.println("Test Before Persist BlogID: "+newBlog.getBlogId()+" TITLE: "+newBlog.getTitle());
em.getTransaction().begin();
em.persist(newBlog);
em.getTransaction().commit();
System.out.println("Test After Persist BlogID: "+newBlog.getBlogId()+" TITLE: "+newBlog.getTitle());
System.out.println("Done Blog Injection !");
}
private static void insertTestComment(EntityManager em) {
System.out.println("Start Comment Injection !");
User newUser2 = new User();
newUser2.setUserId("guest");
newUser2.setUserName("GuestUser");
newUser2.setAboutUser("Anonymous");
newUser2.setEmailId("<EMAIL>");
Comment newComment = new Comment();
newComment.setCommentText("First comment");
Comment newComment2 = new Comment();
newComment2.setCommentText("Second comment");
em.getTransaction().begin();
Blog blog = em.find(Blog.class,1L);
System.out.println("BlogID: "+blog.getBlogId()+" TITLE: "+blog.getTitle());
blog.getCommentList().add(newComment);
blog.getCommentList().add(newComment2);
newComment.setAuthor(newUser2);
newComment.setCommentedBlog(blog);
newComment2.setAuthor(newUser2);
newComment2.setCommentedBlog(blog);
em.persist(blog);
em.getTransaction().commit();
System.out.println("Done Comment Injection !");
}
private static void readCreatedBlog(EntityManager em) {
System.out.println("Start read Entities !");
em.getTransaction().begin();
Blog blog = em.find(Blog.class,1L);
System.out.println("BlogID: "+blog.getBlogId()+" TITLE: "+blog.getTitle()+" CONTENT: "+blog.getBlogMessage()+" CATEGORY: "+blog.getCategories().get(0).getCategoryName()+" OWNER: "+blog.getAuthor().getUserName()+" COMMENT: "+blog.getCommentList().get(0).getCommentText());
em.getTransaction().commit();
System.out.println("Done read Entities !");
}
public static void main(String[] args) {
// TODO Auto-generated method stub
System.out.println("Test JPA!");
/*
EntityManagerFactory factory = Persistence
.createEntityManagerFactory("blogPu");
EntityManager em = factory.createEntityManager();
insertTest(em);
insertTestComment(em);
readCreatedBlog(em);
em.close();
EntityManager em2 = factory.createEntityManager();
readCreatedBlog(em2);
em2.close();
// factory.close();
*/
DatabaseFeeder df= new DatabaseFeeder();
df.testInsertMethod();
df.testReadMethod();
}
}
<file_sep>package com.cisco.cmad.blog.rs;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
import com.cisco.cmad.blog.api.User;
@Path("/users")
public class UsersRootResource {
@POST
public Response create(User user) {
return Response.ok().entity(user).build();
}
@GET
@Path("/{userId}")
public Response read(@PathParam("userId") String userId) {
return Response.ok().build();
}
@PUT
public Response update(User user) {
return Response.ok().entity(user).build();
}
@DELETE
@Path("/{userId}")
public Response delete(@PathParam("userId") String userId) {
return Response.ok().build();
}
}
<file_sep># cmad-blog
FullStack Architecture
-----------------------

REST APIs
---------

UI Mockup Screen
--------------

Class Diagrams
--------------

<file_sep>package com.cisco.cmad.blog.api;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.Table;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
@Entity
@XmlRootElement
@Table(name="CATEGORY_TABLE")
public class Category {
@Id
@Column(name="CATEGORY_ID")
@GeneratedValue(strategy = GenerationType.AUTO)
private long categoryId;
private String CategoryName;
private long blogCount;
@ManyToMany(mappedBy="categories")
@XmlTransient
private List<Blog> blogList = new ArrayList<Blog>();
@XmlTransient
public List<Blog> getBlogList() {
return blogList;
}
@XmlTransient
public void setBlogList(List<Blog> blogList) {
this.blogList = blogList;
}
public long getCategoryId() {
return categoryId;
}
public void setCategoryId(long categoryId) {
this.categoryId = categoryId;
}
public String getCategoryName() {
return CategoryName;
}
public void setCategoryName(String categoryName) {
CategoryName = categoryName;
}
public long getBlogCount() {
return blogCount;
}
public void setBlogCount(long blogCount) {
this.blogCount = blogCount;
}
}
<file_sep>package com.cisco.cmad.blog.test;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
import com.cisco.cmad.blog.api.Blog;
import com.cisco.cmad.blog.api.Category;
import com.cisco.cmad.blog.api.Comment;
import com.cisco.cmad.blog.api.User;
import com.cisco.cmad.blog.biz.BlogService;
@WebListener
public class DatabaseFeeder implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent arg0) {
testInsertMethod();
testReadMethod();
}// end contextInitialized method
@Override
public void contextDestroyed(ServletContextEvent arg0) {
}// end constextDestroyed method
public void testInsertMethod() {
System.out.println("StartBlog Injection !");
User newUser = new User();
newUser.setUserId("avi0100");
newUser.setUserName("Avinash");
newUser.setAboutUser("Udev");
newUser.setEmailId("<EMAIL>");
User newUser2 = new User();
newUser2.setUserId("guest");
newUser2.setUserName("GuestUser");
newUser2.setAboutUser("Anonymous");
newUser2.setEmailId("<EMAIL>");
Category tag = new Category();
tag.setCategoryName("Travel");
Category tag2 = new Category();
tag2.setCategoryName("Food");
Category tag3 = new Category();
tag3.setCategoryName("Science");
Blog newBlog = new Blog();
newBlog.setAuthor(newUser);
newBlog.setTitle("My First Blog");
newBlog.setBlogMessage("Blog Message for Travel 1");
newBlog.getCategories().add(tag);
newBlog.getCategories().add(tag2);
Comment newComment = new Comment();
newComment.setCommentText("First comment");
Comment newComment2 = new Comment();
newComment2.setCommentText("Second comment");
newComment.setAuthor(newUser2);
newComment.setCommentedBlog(newBlog);
newComment2.setAuthor(newUser2);
newComment2.setCommentedBlog(newBlog);
newBlog.getCommentList().add(newComment);
newBlog.getCommentList().add(newComment2);
BlogService blogService = new BlogService();
blogService.create(newBlog);
System.out.println("PersistInit BlogID: " + newBlog.getBlogId()
+ " TITLE: " + newBlog.getTitle());
}
public void testReadMethod() {
System.out.println("Start read Entities !");
BlogService blogService = new BlogService();
Blog blog = blogService.read(1L);
System.out.println(" BlogID: " + blog.getBlogId()
+ " TITLE: " + blog.getTitle());
System.out.println("BlogID: " + blog.getBlogId() + " TITLE: "
+ blog.getTitle() + " CONTENT: " + blog.getBlogMessage()
+ " CATEGORY: " + blog.getCategories().get(0).getCategoryName()
+ " OWNER: " + blog.getAuthor().getUserName() + " COMMENT: "
+ blog.getCommentList().get(0).getCommentText());
System.out.println("Done read Entities !");
}
}<file_sep>package com.cisco.cmad.blog.data;
import com.cisco.cmad.blog.api.Blog;
public interface BlogDAO {
public Blog create(Blog blog);
public Blog read(long id);
public void update(Blog blog);
public void delete(long id);
}
<file_sep>package com.cisco.cmad.blog.api;
import java.util.Date;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.CreationTimestamp;
@Entity
@Table(name="COMMENT_TABLE")
public class Comment {
@Id
@Column(name="COMMENT_ID")
@GeneratedValue(strategy = GenerationType.AUTO)
private long commentId;
private String commentText;
@CreationTimestamp
@Temporal(TemporalType.TIMESTAMP)
private Date createTimeStamp;
@ManyToOne(cascade = {CascadeType.REMOVE,CascadeType.PERSIST})
@JoinColumn(name="USER_ID")
private User author;
@JoinColumn(name="BLOG_ID")
@ManyToOne(cascade = {CascadeType.REMOVE,CascadeType.PERSIST})
private Blog commentedBlog;
//@OneToOne
//private Comment parentComment;
public long getCommentId() {
return commentId;
}
public void setCommentId(Long commentId) {
this.commentId = commentId;
}
public String getCommentText() {
return commentText;
}
public void setCommentText(String commentText) {
this.commentText = commentText;
}
public User getAuthor() {
return author;
}
public void setAuthor(User author) {
this.author = author;
}
public Blog getCommentedBlog() {
return commentedBlog;
}
public void setCommentedBlog(Blog commentedBlog) {
this.commentedBlog = commentedBlog;
}
public Date getCreateTimeStamp() {
return createTimeStamp;
}
public void setCreateTimeStamp(Date createTimeStamp) {
this.createTimeStamp = createTimeStamp;
}
}
<file_sep>package com.cisco.cmad.blog.api;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.UpdateTimestamp;
@Entity
public class Comment {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long commentId;
private String commentText;
@UpdateTimestamp
@Temporal(TemporalType.TIMESTAMP)
private Date lastUpdatedOn;
@ManyToOne
private User author;
@ManyToOne
private Blog commentedBlogPost;
@OneToOne
private Comment parentComment;
public long getCommentId() {
return commentId;
}
public void setCommentId(Long commentId) {
this.commentId = commentId;
}
public String getCommentText() {
return commentText;
}
public void setCommentText(String commentText) {
this.commentText = commentText;
}
public Date getLastUpdatedOn() {
return lastUpdatedOn;
}
public void setLastUpdatedOn(Date lastUpdatedOn) {
this.lastUpdatedOn = lastUpdatedOn;
}
public User getAuthor() {
return author;
}
public void setAuthor(User author) {
this.author = author;
}
public Comment getParentComment() {
return parentComment;
}
public void setParentComment(Comment parentComment) {
this.parentComment = parentComment;
}
public Blog getCommentedBlogPost() {
return commentedBlogPost;
}
public void setCommentedBlogPost(Blog commentedBlogPost) {
this.commentedBlogPost = commentedBlogPost;
}
}
<file_sep>/**
*
*/
var $blogs = $('#blogs');
var $title = $('#title');
var $Message = $('#Message');
var blogTemplate = $('#blog-template').html();
function showBlog(blog) {
$blogs.append(Mustache.render(blogTemplate, blog));
};
$.ajax({
type : 'GET',
url : 'http://localhost:8080/cmadblog/blogsite/blogs/',
success : function(data) {
console.log('success', data);
$.each(data, function(i, blog) {
console.log('item"', blog);
showBlog(blog);
});
},
error : function() {
alert('error loading Blogs');
}
});
$('#add-blog').on('click', function() {
var blog = {
title : $title.val(),
blogMessage : $Message.val()
};
$.ajax({
type : 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
url : 'http://localhost:8080/cmadblog/blogsite/blogs/',
data :JSON.stringify( blog),
dataType : 'json',
success : function(blog) {
addOrder(newOrder);
},
error : function() {
alert('error Posting Blog');
}
});
});<file_sep>package com.cisco.cmad.blog.rs;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
import com.cisco.cmad.blog.api.Blog;
import com.cisco.cmad.blog.api.Comment;
@Path("/blogs")
public class BlogsRootResource {
@GET
@Path("/{blogId}")
public Response read(@PathParam("blogId") int blogId) {
Blog blog = new Blog();
return Response.ok().entity(blog).build();
}
@POST
public Response create(Blog blog) {
return Response.ok().entity(blog).build();
}
@DELETE
@Path("/{blogId}")
public Response delete(@PathParam("blogId") long blogId) {
return Response.ok().build();
}
@POST
@Path("/comments")
public Response createComment(Comment comment) {
return Response.ok().entity(comment).build();
}
@GET
@Path("/comments/{commentId}")
public Response readComment(@PathParam("commentId") long commentId) {
return Response.ok().build();
}
@DELETE
@Path("comments/{commentId}")
public Response deleteComment(@PathParam("commentId") long commentId) {
return Response.ok().build();
}
}
<file_sep>package com.cisco.cmad.blog.api;
import java.util.Date;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.UpdateTimestamp;
@Entity
public class Blog {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long blogId;
@ManyToOne(cascade = CascadeType.ALL)
private User author;
private String title;
private String blogMessage;
@OneToMany(cascade = CascadeType.ALL)
private List<Tag> tags;
@Temporal(TemporalType.TIMESTAMP)
private Date createDate;
@UpdateTimestamp
@Temporal(TemporalType.TIMESTAMP)
private Date updateDate;
private long viewCount;
private long likes;
@OneToMany
private List<Comment> commentList;
public long getBlogId() {
return blogId;
}
public void setBlogId(long blogId) {
this.blogId = blogId;
}
public User getAuthor() {
return author;
}
public void setAuthor(User author) {
this.author = author;
}
public String getBlogMessage() {
return blogMessage;
}
public void setBlogMessage(String blogMessage) {
this.blogMessage = blogMessage;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public long getViewCount() {
return viewCount;
}
public void setViewCount(int viewCount) {
this.viewCount = viewCount;
}
public List<Comment> getCommentList() {
return commentList;
}
public void setCommentList(List<Comment> commentList) {
this.commentList = commentList;
}
public long getLikes() {
return likes;
}
public void setLikes(long likes) {
this.likes = likes;
}
}
| 89b589857d11049f43e57d87f960f7aab0b171a8 | [
"Markdown",
"Java",
"JavaScript"
] | 11 | Java | avramach/cmad-blog | 3c2cb01ac5fc4018dc5b5f5a378791134c0268ad | ad99433607146a6e228dd6ba1460d26f1eae6a86 |
refs/heads/master | <file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionTwo
import android.content.res.Resources
import android.util.Log
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.google.firebase.FirebaseException
import com.google.firebase.database.DataSnapshot
import com.google.firebase.database.DatabaseError
import com.google.firebase.database.ValueEventListener
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
import net.yan.kotlin.promoterapp.promoorigin.model.Cidade
class OptionTwoViewModel(resources: Resources) : ViewModel() {
val job = Job()
val uiScope = CoroutineScope(Dispatchers.IO + job)
val cidade = MutableLiveData<Cidade>()
val nomesLista = MutableLiveData<Array<Cidade>>()
val lista = mutableListOf<Cidade>()
init {
if (nomesLista.value == null) {
uiScope.launch {
try {
val firebase = FirebaseHelper()
val ref = firebase.database!!.child("Cidade")
ref.addValueEventListener(object : ValueEventListener {
override fun onCancelled(p0: DatabaseError) {
}
override fun onDataChange(p0: DataSnapshot) {
for (data in p0.children) {
val cidade = data.getValue(
Cidade::class.java
)!!
cidade.id = data.key.toString()
lista.add(cidade)
}
nomesLista.value = lista.toTypedArray()
}
})
} catch (e: FirebaseException) {
Log.i("ERRO", "CLIENTES DATABASE")
}
}
}
}
fun selecionarNome(name: Cidade) {
cidade.value = name
}
fun selecionarNomeExit() {
cidade.value = null
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.model
import com.google.firebase.database.Exclude
import com.google.firebase.database.IgnoreExtraProperties
@IgnoreExtraProperties
class Cidade {
var id: String = ""
@Exclude
get
var local: String = ""
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionOne
import android.content.res.Resources
import android.util.Log
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.google.firebase.FirebaseException
import com.google.firebase.database.DataSnapshot
import com.google.firebase.database.DatabaseError
import com.google.firebase.database.ValueEventListener
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
import net.yan.kotlin.promoterapp.promoorigin.model.Cliente
class OptionOneViewModel(resources: Resources) : ViewModel() {
val job = Job()
val uiScope = CoroutineScope(Dispatchers.Default + job)
val nome = MutableLiveData<Cliente>()
val nomesLista = MutableLiveData<Array<Cliente>>()
val lista = mutableListOf<Cliente>()
init {
if (nomesLista.value == null) {
uiScope.launch {
try {
val firebase = FirebaseHelper()
val ref = firebase.database!!.child("Clientes")
ref.addValueEventListener(object : ValueEventListener {
override fun onCancelled(p0: DatabaseError) {
}
override fun onDataChange(p0: DataSnapshot) {
for (data in p0.children) {
val cliente: Cliente = data.getValue(
Cliente::class.java
)!!
cliente.id = data.key.toString()
lista.add(cliente)
}
nomesLista.value = lista.toTypedArray()
}
})
} catch (e: FirebaseException) {
Log.i("ERRO", "CLIENTES DATABASE")
}
}
}
}
fun selecionarNome(name: Cliente) {
nome.value = name
}
fun selecionarNomeExit() {
nome.value = null
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda
import android.content.res.Resources
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
class VendaViewModelFactory(
private val resources: Resources,
private val dataSource: FirebaseHelper
) : ViewModelProvider.Factory {
@Suppress("unchecked_cast")
override fun <T : ViewModel?> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(VendaViewModel::class.java)) {
return VendaViewModel(resources, dataSource) as T
}
throw IllegalArgumentException("Unknown ViewModel class")
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionOne
import android.content.res.Resources
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
class OptionOneViewModelFactory(val resources: Resources) : ViewModelProvider.Factory {
@Suppress("unchecked_cast")
override fun <T : ViewModel?> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(OptionOneViewModel::class.java)) {
return OptionOneViewModel(resources) as T
}
throw IllegalArgumentException("Unknown ViewModel class")
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda
import android.content.res.Resources
import android.graphics.Bitmap
import android.util.Log
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.google.firebase.FirebaseException
import com.google.firebase.storage.FirebaseStorage
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
import net.yan.kotlin.promoterapp.promoorigin.model.PromPontos
import net.yan.kotlin.promoterapp.promoorigin.model.Promoter
import java.io.ByteArrayOutputStream
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.*
class VendaViewModel(val resouces: Resources, val dataSource: FirebaseHelper) : ViewModel() {
val job = Job()
val coroutine = CoroutineScope(Dispatchers.IO + job)
val lista = MutableLiveData<Array<Promoter>>()
val foto = MutableLiveData<Boolean>()
val verifica = MutableLiveData<Boolean>()
val isFim = MutableLiveData<Boolean>()
fun tirarFoto() {
foto.value = true
}
fun alert() {
verifica.value = true
}
fun alertExit() {
verifica.value = false
}
fun exit() {
isFim.value = false
}
fun onFire(pontos: PromPontos) {
coroutine.launch {
try {
val prom = PromPontos()
val dateFormat: DateFormat = SimpleDateFormat("yyyy/MM/dd HH:mm:ss")
val date = Date()
val strDate: String = dateFormat.format(date).toString()
prom.fk_id_promoter = dataSource.auth!!.currentUser!!.uid
prom.fk_id_cidade = pontos.fk_id_cidade
prom.fk_id_pontos = pontos.fk_id_pontos
prom.data = strDate
prom.foto = pontos.foto
val ref =
dataSource.database!!.child("Promoter_Ponto").push()
.setValue(prom)
ref.addOnCompleteListener {
alertExit()
if (it.isSuccessful) {
isFim.value = true
}
}
} catch (e: FirebaseException) {
Log.i("ERRO", e.message)
}
}
}
fun gravarFoto(cidade: String, cliente: String, imageBitmap: Bitmap) {
alert()
coroutine.launch {
try {
val nomeImagem = UUID.randomUUID().toString()
val storageRef =
FirebaseStorage.getInstance().reference.child("imagens").child("vendas").child(
"${nomeImagem}.jpeg"
)
val baos = ByteArrayOutputStream()
imageBitmap.compress(Bitmap.CompressFormat.JPEG, 100, baos)
val data = baos.toByteArray()
var uploadTask = storageRef.putBytes(data)
uploadTask.addOnFailureListener {
Log.i("FALHOU", it.message)
}.addOnSuccessListener {
val dow = storageRef.downloadUrl
dow.addOnSuccessListener {
val link = it.toString()
val p = PromPontos()
p.foto = link
p.fk_id_pontos = cliente
p.fk_id_cidade = cidade
onFire(p)
}
}
} catch (e: FirebaseException) {
Log.i("Exception", "UPLOAD FOTO " + e.message)
}
}
}
fun tirarFotoClose() {
foto.value = false
}
override fun onCleared() {
super.onCleared()
job.cancel()
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.model
class Venda {
var id: String = ""
var fk_id_prom_pontos: String = ""
var foto: String = ""
var data: Long? = null
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionTwo
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.SearchView
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import androidx.navigation.fragment.findNavController
import androidx.navigation.fragment.navArgs
import net.yan.kotlin.promoterapp.promoorigin.R
import net.yan.kotlin.promoterapp.promoorigin.databinding.OptionTwoFragmentBinding
class OptionTwoFragment : Fragment() {
private lateinit var viewModel: OptionTwoViewModel
private lateinit var binding: OptionTwoFragmentBinding
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
binding = DataBindingUtil.inflate(inflater, R.layout.option_two_fragment, container, false)
val viewModelFactory = OptionTwoViewModelFactory(resources)
viewModel =
ViewModelProviders.of(this, viewModelFactory).get(OptionTwoViewModel::class.java)
val argments by navArgs<OptionTwoFragmentArgs>()
val adapter = AdapterTwo(Clique {
viewModel.selecionarNome(it)
})
viewModel.nomesLista.observe(viewLifecycleOwner, Observer {
it.let {
adapter.adicionarLista(it)
}
})
viewModel.cidade.observe(viewLifecycleOwner, Observer {
if (it != null) {
findNavController().navigate(
OptionTwoFragmentDirections.actionOptionTwoFragmentToVendaFragment(
cidade = it.id,
cliente = argments.cliente,
cidadeNome = it.local,
clienteNome = argments.clienteNome
)
)
viewModel.selecionarNomeExit()
}
})
binding.search2.setOnQueryTextListener(object : SearchView.OnQueryTextListener,
androidx.appcompat.widget.SearchView.OnQueryTextListener {
override fun onQueryTextSubmit(query: String?): Boolean {
return false
}
override fun onQueryTextChange(newText: String?): Boolean {
adapter.filter.filter(newText)
return false
}
})
binding.recycler2.adapter = adapter
binding.toolbar2.setNavigationOnClickListener {
findNavController().popBackStack()
}
return binding.root
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionTwo
import android.content.res.Resources
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
class OptionTwoViewModelFactory(val resources: Resources) : ViewModelProvider.Factory {
@Suppress("unchecked_cast")
override fun <T : ViewModel?> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(OptionTwoViewModel::class.java)) {
return OptionTwoViewModel(resources) as T
}
throw IllegalArgumentException("Unknown ViewModel class")
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.data
import android.util.Base64
object Base64Custom {
fun codificarBase64(texto: String?): String {
return Base64.encodeToString(texto!!.toByteArray(), Base64.DEFAULT).replace("(\\n|\\r)", "")
}
fun decodificarBase64(textoCodificado: String): String {
return String(Base64.decode(textoCodificado, Base64.DEFAULT))
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda
import android.content.Intent
import android.graphics.Bitmap
import android.graphics.drawable.BitmapDrawable
import android.os.Bundle
import android.provider.MediaStore
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.appcompat.app.AppCompatActivity
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import androidx.navigation.fragment.findNavController
import androidx.navigation.fragment.navArgs
import com.google.android.material.snackbar.Snackbar
import net.yan.kotlin.promoterapp.promoorigin.R
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
import net.yan.kotlin.promoterapp.promoorigin.databinding.FragmentVendaBinding
/**
* A simple [Fragment] subclass.
*/
class VendaFragment : Fragment() {
private lateinit var binding: FragmentVendaBinding
private lateinit var viewModel: VendaViewModel
private lateinit var firebaseHelper: FirebaseHelper
private var foto: Bitmap? = null
private var verificador: Boolean = false
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
binding = DataBindingUtil.inflate(inflater, R.layout.fragment_venda, container, false)
setHasOptionsMenu(true)
binding.lifecycleOwner = this
firebaseHelper = FirebaseHelper()
val viewModelFactory = VendaViewModelFactory(resources, firebaseHelper)
viewModel = ViewModelProviders.of(this, viewModelFactory).get(VendaViewModel::class.java)
binding.viewModel = viewModel
val arguments by navArgs<VendaFragmentArgs>()
binding.cidadeNome = arguments.cidadeNome
binding.clienteNome = arguments.clienteNome
binding.cadastrar.setOnClickListener {
if (verificador == true && foto != null) {
viewModel.gravarFoto(
arguments.cidade.toString(), arguments.cliente.toString(),
foto!!
)
} else {
Snackbar.make(binding.root, "Tire uma foto do local", Snackbar.LENGTH_LONG).show()
}
}
viewModel.lista.observe(viewLifecycleOwner, Observer {
it.let {
}
})
viewModel.verifica.observe(viewLifecycleOwner, Observer {
if (it == true) {
binding.loadingSpinner.visibility = View.VISIBLE
}
})
viewModel.foto.observe(viewLifecycleOwner, Observer {
if (it == true) {
Intent(MediaStore.ACTION_IMAGE_CAPTURE).also { takePictureIntent ->
takePictureIntent.resolveActivity(requireActivity().packageManager)?.also {
startActivityForResult(takePictureIntent, 100)
}
}
viewModel.tirarFotoClose()
}
})
(activity as AppCompatActivity).setSupportActionBar(binding.toolbar)
binding.toolbar.setNavigationOnClickListener {
findNavController().popBackStack()
}
viewModel.isFim.observe(viewLifecycleOwner, Observer {
if (it == true) {
findNavController().navigate(VendaFragmentDirections.actionVendaFragmentToHomeFragment())
}
})
return binding.root
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (resultCode == -1) {
val imageBitmap = data?.extras?.get("data") as Bitmap
val drawable = BitmapDrawable(resources, imageBitmap)
verificador = true
binding.im.setImageBitmap(imageBitmap)
foto = imageBitmap
} else {
foto = null
verificador = false
}
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionOne.adapter
import android.view.LayoutInflater
import android.view.ViewGroup
import android.widget.Filter
import android.widget.Filterable
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.ListAdapter
import androidx.recyclerview.widget.RecyclerView
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import net.yan.kotlin.promoterapp.promoorigin.databinding.CardOneBinding
import net.yan.kotlin.promoterapp.promoorigin.model.Cliente
import java.util.*
private val ITEM_VIEW_TYPE_ITEM = 1
class Adapter(val clickListener: ClienteListener) : ListAdapter<DataItem,
RecyclerView.ViewHolder>(ClientCallBack()), Filterable {
private val adapterScope = CoroutineScope(Dispatchers.Default)
private var itens: List<DataItem.ClienteItem>? = null
fun addHeaderAndSubmitList(
list: Array<Cliente
>?
) {
adapterScope.launch {
itens = list?.map { DataItem.ClienteItem(it) }
withContext(Dispatchers.Main) {
submitList(itens)
}
}
}
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
when (holder) {
is ViewHolder -> {
val nightItem = getItem(position) as DataItem.ClienteItem
holder.bind(clickListener, nightItem.cliente)
}
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
return when (viewType) {
ITEM_VIEW_TYPE_ITEM -> ViewHolder.from(parent)
else -> throw ClassCastException("Unknown viewType ${viewType}")
}
}
override fun getItemViewType(position: Int): Int {
return when (getItem(position)) {
is DataItem.ClienteItem -> ITEM_VIEW_TYPE_ITEM
else -> null
}!!
}
class ViewHolder private constructor(val binding: CardOneBinding) :
RecyclerView.ViewHolder(binding.root) {
fun bind(clickListener: ClienteListener, nome: Cliente) {
binding.click = clickListener
binding.cliente = nome
binding.executePendingBindings()
}
companion object {
fun from(parent: ViewGroup): ViewHolder {
val layoutInflater = LayoutInflater.from(parent.context)
val binding = CardOneBinding.inflate(layoutInflater, parent, false)
return ViewHolder(binding)
}
}
}
override fun getFilter(): Filter {
return object : Filter() {
override fun performFiltering(constraint: CharSequence?): FilterResults {
val charSearch = constraint.toString().toLowerCase(Locale.ROOT).trim()
var lista = mutableListOf<DataItem.ClienteItem>()
if (charSearch.isEmpty()) {
lista.addAll(itens!!)
} else {
for (row in itens!!) {
if (row.cliente.endereco.toLowerCase(Locale.ROOT)
.contains(charSearch.toLowerCase(Locale.ROOT))
) {
lista.add(row)
}
}
}
val filterResults = FilterResults()
filterResults.values = lista
return filterResults
}
@Suppress("UNCHECKED_CAST")
override fun publishResults(constraint: CharSequence?, results: FilterResults?) {
submitList(results?.values as MutableList<DataItem>?)
}
}
}
}
class ClientCallBack : DiffUtil.ItemCallback<DataItem>() {
override fun areItemsTheSame(oldItem: DataItem, newItem: DataItem): Boolean {
return oldItem.cliente == newItem.cliente
}
override fun areContentsTheSame(oldItem: DataItem, newItem: DataItem): Boolean {
return oldItem == newItem
}
}
class ClienteListener(val clickListener: (sleepId: Cliente) -> Unit) {
fun onClick(client: Cliente) = clickListener(client)
}
sealed class DataItem {
data class ClienteItem(val cli: Cliente) : DataItem() {
override val cliente = cli
}
abstract val cliente: Cliente
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin
/*
val lista = initializeListPopupMenu(binding.clientes)
@SuppressLint("ResourceType")
private fun initializeListPopupMenu(v: View): ListPopupWindow? {
val listPopupWindow = context?.let {
ListPopupWindow(
it,
null,
R.attr.listPopupWindowStyle
)
}
val adapter = context?.let {
ArrayAdapter(
it,
R.layout.chip,
resources.getStringArray(R.array.promotores)
)
}
listPopupWindow?.setAdapter(adapter)
listPopupWindow?.anchorView = v
listPopupWindow?.setOnItemClickListener { parent, view, position, id ->
listPopupWindow.dismiss()
}
return listPopupWindow
}
val chip = Chip(binding.scrollGroup.context)
chip.text = text
chip.isCheckable = true
chip.isClickable = true
binding.scrollGroup.addView(chip)
fun gravarFoto(bitmap: Bitmap, id: String): String {
var uri = ""
uiScope.launch {
try{
val storageRef = dataSource.storage?.child("imagens")?.child("perfil")?.child(
"$id.jpeg"
)
val baos = ByteArrayOutputStream()
bitmap.compress(Bitmap.CompressFormat.JPEG, 70, baos)
val data = baos.toByteArray()
var uploadTask = storageRef?.putBytes(data)
uploadTask?.addOnFailureListener {
Log.i("URI", it.message)
}?.addOnSuccessListener {
uri = it.metadata!!.toString()
}
}catch (e: FirebaseException){
Log.i("Exception", "UPLOAD FOTO "+ e.message)
}
}
Log.i("URI","KKKKKKKKKKKKKKK "+ uri)
return uri
}
*/
/*
<androidx.appcompat.widget.SearchView
android:id="@+id/search_prom"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textCursorDrawable="@null"
app:iconifiedByDefault="false"
app:queryBackground="@null"/>
<androidx.recyclerview.widget.RecyclerView
android:id="@+id/rec"
app:layoutManager="androidx.recyclerview.widget.LinearLayoutManager"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
selectionTracker = SelectionTracker.Builder<Long>(
"mySelection",
binding.rec,
AdapterVenda.KeyProvider(adapter),
AdapterVenda.DetailsLookup(binding.rec),
StorageStrategy.createLongStorage()
).withSelectionPredicate(
object : SelectionPredicate<Long>() {
override fun canSetStateForKey(
key: Long,
nextState: Boolean
): Boolean {
return true
}
override fun canSetStateAtPosition(
position: Int,
nextState: Boolean
): Boolean {
return true
}
override fun canSelectMultiple(): Boolean {
return false // Set to false to allow single selecting
}
}
).build()
adapter.setSelection(selectionTracker)
selectionTracker!!.addObserver(
object : SelectionTracker.SelectionObserver<Long?>() {
override fun onItemStateChanged(key: Long, selected: Boolean) {
super.onItemStateChanged(key, selected)
if (selected){
selectPromotor = lista?.get(key.toInt())
binding.promotor = selectPromotor
binding.floating.show()
}else{
binding.promotor = null
selectPromotor = null
binding.floating.hide()
}
}
override fun onSelectionChanged() {
}
})
val lista = resources.getStringArray(R.array.clientes)
for (da in lista){
val cliente = Cliente()
cliente.endereco = da
val fire = FirebaseDatabase.getInstance().reference.child("Clientes").push().setValue(cliente)
}
*/<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionOne
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.SearchView
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import androidx.navigation.fragment.findNavController
import net.yan.kotlin.promoterapp.promoorigin.R
import net.yan.kotlin.promoterapp.promoorigin.databinding.FragmentOptionOneBinding
import net.yan.kotlin.promoterapp.promoorigin.venda.optionOne.adapter.Adapter
import net.yan.kotlin.promoterapp.promoorigin.venda.optionOne.adapter.ClienteListener
/**
* A simple [Fragment] subclass.
*/
class OptionOneFragment : Fragment() {
private lateinit var binding: FragmentOptionOneBinding
private lateinit var viewModel: OptionOneViewModel
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
binding = DataBindingUtil.inflate(inflater, R.layout.fragment_option_one, container, false)
val viewModelFactory = OptionOneViewModelFactory(resources)
viewModel =
ViewModelProviders.of(this, viewModelFactory).get(OptionOneViewModel::class.java)
val adapter = Adapter(ClienteListener { nome ->
viewModel.selecionarNome(nome)
})
binding.search.setOnQueryTextListener(object : SearchView.OnQueryTextListener,
androidx.appcompat.widget.SearchView.OnQueryTextListener {
override fun onQueryTextSubmit(query: String?): Boolean {
return false
}
override fun onQueryTextChange(newText: String?): Boolean {
adapter.filter.filter(newText)
return false
}
})
viewModel.nome.observe(viewLifecycleOwner, Observer {
if (it != null) {
findNavController().navigate(
OptionOneFragmentDirections.actionOptionOneFragmentToOptionTwoFragment(
it.id,
it.endereco
)
)
viewModel.selecionarNomeExit()
}
})
viewModel.nomesLista.observe(viewLifecycleOwner, Observer {
it.let {
adapter.addHeaderAndSubmitList(it)
}
})
binding.toolbar.setNavigationOnClickListener {
findNavController().popBackStack()
}
binding.recycler.adapter = adapter
return binding.root
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.principal
import android.os.Bundle
import android.view.LayoutInflater
import android.view.MenuItem
import android.view.View
import android.view.ViewGroup
import android.widget.FrameLayout
import androidx.activity.OnBackPressedCallback
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import androidx.navigation.fragment.findNavController
import com.google.android.material.bottomsheet.BottomSheetBehavior
import com.google.android.material.navigation.NavigationView
import net.yan.kotlin.promoterapp.promoorigin.R
import net.yan.kotlin.promoterapp.promoorigin.databinding.FragmentHomeBinding
/**
* A simple [Fragment] subclass.
*/
class HomeFragment : Fragment() {
private lateinit var bottomSheetBehavior: BottomSheetBehavior<FrameLayout>
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val viewModelFactory = HomeViewModelFactory(resources)
val viewModel = ViewModelProviders.of(this, viewModelFactory).get(HomeViewModel::class.java)
viewModel.estaLogado.observe(viewLifecycleOwner, Observer {
if (it == true) {
findNavController().navigate(HomeFragmentDirections.actionHomeFragmentToLoginFragment())
}
})
val binding = DataBindingUtil.inflate<FragmentHomeBinding>(
inflater,
R.layout.fragment_home,
container,
false
)
activity?.onBackPressedDispatcher?.addCallback(
viewLifecycleOwner,
object : OnBackPressedCallback(true) {
override fun handleOnBackPressed() {
if (bottomSheetBehavior.state != BottomSheetBehavior.STATE_HIDDEN) {
bottomSheetBehavior.state = BottomSheetBehavior.STATE_HIDDEN
} else {
activity?.finish()
}
}
})
viewModel.newPhoto.observe(viewLifecycleOwner, Observer {
if (it == true) {
findNavController().navigate(HomeFragmentDirections.actionHomeFragmentToOptionOneFragment())
viewModel.addLocalAndVendaClose()
}
})
binding.lifecycleOwner = this
binding.viewModel = viewModel
binding.navigationView.setNavigationItemSelectedListener(object :
NavigationView.OnNavigationItemSelectedListener {
override fun onNavigationItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.sair) {
viewModel.sair()
}
return true
}
})
val linear = binding.bottomDrawer
bottomSheetBehavior = BottomSheetBehavior.from(linear)
binding.bar.setNavigationOnClickListener {
bottomSheetBehavior.state = BottomSheetBehavior.STATE_HALF_EXPANDED
}
binding.bar.setNavigationIcon(R.drawable.ic_menu_black_24dp)
return binding.root
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.principal
import android.content.res.Resources
import android.util.Log
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.google.firebase.FirebaseException
import com.google.firebase.auth.FirebaseAuth
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
class HomeViewModel(
val resources: Resources
) : ViewModel() {
val job = Job()
val uiScope = CoroutineScope(Dispatchers.IO + job)
val estaLogado = MutableLiveData<Boolean>()
val newPhoto = MutableLiveData<Boolean>()
init {
try {
val user = FirebaseAuth.getInstance().currentUser
estaLogado.value = user == null
} catch (e: FirebaseException) {
Log.i("DADO", e.message)
}
}
fun sair() {
FirebaseAuth.getInstance().signOut()
estaLogado.value = true
}
fun addLocalAndVenda() {
newPhoto.value = true
}
fun addLocalAndVendaClose() {
newPhoto.value = false
}
fun novaTela() {
estaLogado.value = false
}
override fun onCleared() {
super.onCleared()
job.cancel()
}
}<file_sep>@file:Suppress("DEPRECATED_IDENTITY_EQUALS")
package net.yan.kotlin.promoterapp.promoorigin
import android.Manifest
import android.app.Activity
import android.os.Build
import androidx.core.app.ActivityCompat
object Permissao {
fun validarPermissoes(
activity: Activity
): Boolean {
if (Build.VERSION.SDK_INT >= 23) {
ActivityCompat.requestPermissions(
activity,
arrayOf(
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.ACCESS_NETWORK_STATE,
Manifest.permission.CAMERA
),
1
)
}
return true
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda.optionTwo
import android.view.LayoutInflater
import android.view.ViewGroup
import android.widget.Filter
import android.widget.Filterable
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.ListAdapter
import androidx.recyclerview.widget.RecyclerView
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import net.yan.kotlin.promoterapp.promoorigin.databinding.CardForTwoScreenBinding
import net.yan.kotlin.promoterapp.promoorigin.model.Cidade
import java.util.*
class AdapterTwo(val click: Clique) : ListAdapter<Data, RecyclerView.ViewHolder>(ClienteCallBack()),
Filterable {
val uiScope = CoroutineScope(Dispatchers.Default)
private var lista: List<Data.DataItem>? = null
fun adicionarLista(array: Array<Cidade>?) {
uiScope.launch {
lista = array?.map { Data.DataItem(it) }
withContext(Dispatchers.Main) {
submitList(lista)
}
}
}
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
when (holder) {
is MyViewHolder -> {
val nightItem = getItem(position) as Data.DataItem
holder.bind(click, nightItem.cidade)
}
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
return when (viewType) {
1 -> MyViewHolder.from(parent)
else -> throw ClassCastException("Unknown viewType ${viewType}")
}
}
override fun getItemViewType(position: Int): Int {
return when (getItem(position)) {
is Data.DataItem -> 1
else -> null
}!!
}
class MyViewHolder private constructor(val binding: CardForTwoScreenBinding) :
RecyclerView.ViewHolder(binding.root) {
fun bind(click: Clique, cidade: Cidade) {
binding.cidade = cidade
binding.click = click
binding.executePendingBindings()
}
companion object {
fun from(parent: ViewGroup): MyViewHolder {
val inflater = LayoutInflater.from(parent.context)
val binding = CardForTwoScreenBinding.inflate(inflater, parent, false)
return MyViewHolder(binding)
}
}
}
override fun getFilter(): Filter {
return object : Filter() {
override fun performFiltering(constraint: CharSequence?): FilterResults {
val const = constraint.toString().toLowerCase(Locale.ROOT).trim()
val fLista = mutableListOf<Data.DataItem>()
if (const.isEmpty()) {
fLista.addAll(lista!!)
} else {
for (row in lista!!) {
if (row.cidade.local.toLowerCase(Locale.ROOT).contains(const)) {
fLista.add(row)
}
}
}
val filter = FilterResults()
filter.values = fLista
return filter
}
override fun publishResults(constraint: CharSequence?, results: FilterResults?) {
submitList(results?.values as MutableList<Data>?)
}
}
}
}
class ClienteCallBack : DiffUtil.ItemCallback<Data>() {
override fun areItemsTheSame(oldItem: Data, newItem: Data): Boolean {
return oldItem.cidade == newItem.cidade
}
override fun areContentsTheSame(oldItem: Data, newItem: Data): Boolean {
return oldItem == newItem
}
}
class Clique(val clickListener: (cida: Cidade) -> Unit) {
fun onClick(cidade: Cidade) = clickListener(cidade)
}
sealed class Data {
data class DataItem(val name: Cidade) : Data() {
override val cidade = name
}
abstract val cidade: Cidade
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.model
import com.google.firebase.database.Exclude
data class Promoter(
@Exclude var id: String = "",
var email: String = "",
@Exclude var senha: String = "",
var foto: String = "",
var nome: String = ""
)<file_sep>package net.yan.kotlin.promoterapp.promoorigin.venda
import android.view.LayoutInflater
import android.view.MotionEvent
import android.view.ViewGroup
import android.widget.Filter
import android.widget.Filterable
import androidx.recyclerview.selection.ItemDetailsLookup
import androidx.recyclerview.selection.ItemDetailsLookup.ItemDetails
import androidx.recyclerview.selection.ItemKeyProvider
import androidx.recyclerview.selection.SelectionTracker
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.ListAdapter
import androidx.recyclerview.widget.RecyclerView
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import net.yan.kotlin.promoterapp.promoorigin.databinding.CardForPromBinding
import net.yan.kotlin.promoterapp.promoorigin.model.Promoter
import java.util.*
class AdapterVenda(val click: Clique) :
ListAdapter<Data, RecyclerView.ViewHolder>(ClienteCallBack()),
Filterable {
val uiScope = CoroutineScope(Dispatchers.Default)
private var lista: List<Data.DataItem>? = null
var selectionTracker: SelectionTracker<Long>? = null
fun setSelection(selectionTracker: SelectionTracker<Long>?) {
this.selectionTracker = selectionTracker
}
fun adicionarLista(array: Array<Promoter>?) {
uiScope.launch {
lista = array?.map { Data.DataItem(it) }
withContext(Dispatchers.Main) {
submitList(lista)
}
}
}
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
when (holder) {
is MyViewHolder -> {
val nightItem = getItem(position) as Data.DataItem
holder.bind(click, nightItem.promotor, position, selectionTracker)
}
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
return when (viewType) {
1 -> MyViewHolder.from(parent)
else -> throw ClassCastException("Unknown viewType ${viewType}")
}
}
override fun getItemViewType(position: Int): Int {
return when (getItem(position)) {
is Data.DataItem -> 1
else -> null
}!!
}
class MyViewHolder private constructor(val binding: CardForPromBinding) :
RecyclerView.ViewHolder(binding.root) {
private val details: Details = Details()
var selectionTrackerr: SelectionTracker<Long>? = null
fun bind(
click: Clique,
nome: Promoter,
position: Int,
selectionTracker: SelectionTracker<Long>?
) {
binding.promoter = nome
binding.click = click
details.position = position.toLong()
selectionTrackerr = selectionTracker
if (selectionTrackerr != null) {
bindSelectedState()
}
binding.executePendingBindings()
}
private fun bindSelectedState() {
binding.card.isChecked = selectionTrackerr!!.isSelected(details.selectionKey)
}
fun getItemDetails(): ItemDetails<Long?>? {
return details
}
companion object {
fun from(parent: ViewGroup): MyViewHolder {
val inflater = LayoutInflater.from(parent.context)
val binding = CardForPromBinding.inflate(inflater, parent, false)
return MyViewHolder(binding)
}
}
}
internal class Details : ItemDetailsLookup.ItemDetails<Long?>() {
var position: Long = 0
override fun getPosition(): Int {
return position.toInt()
}
override fun getSelectionKey(): Long? {
return position
}
override fun inSelectionHotspot(e: MotionEvent): Boolean {
return true
}
override fun inDragRegion(e: MotionEvent): Boolean {
return false
}
}
class DetailsLookup(private val recyclerView: RecyclerView) : ItemDetailsLookup<Long>() {
override fun getItemDetails(e: MotionEvent): ItemDetails<Long?>? {
val view = recyclerView.findChildViewUnder(e.x, e.y)
var cont = 0
if (view != null) {
cont += 1
if (cont == 1) {
val viewHolder = recyclerView.getChildViewHolder(view)
if (viewHolder is AdapterVenda.MyViewHolder) {
return viewHolder.getItemDetails()
}
}
}
return null
}
}
class KeyProvider(adapter: AdapterVenda?) :
ItemKeyProvider<Long?>(SCOPE_MAPPED) {
//Pega as chaves únicas de cada card do recyclerView
override fun getKey(position: Int): Long? {
return position.toLong()
}
//retorna a posição do card
override fun getPosition(key: Long): Int {
return key.toInt()
}
}
override fun getFilter(): Filter {
return object : Filter() {
override fun performFiltering(constraint: CharSequence?): FilterResults {
val const = constraint.toString().toLowerCase(Locale.ROOT).trim()
val fLista = mutableListOf<Data.DataItem>()
if (const.isEmpty()) {
fLista.addAll(lista!!)
} else {
for (row in lista!!) {
if (row.promotor.nome.toLowerCase(Locale.ROOT).contains(const)) {
fLista.add(row)
}
}
}
val filter = FilterResults()
filter.values = fLista
return filter
}
override fun publishResults(constraint: CharSequence?, results: FilterResults?) {
submitList(results?.values as MutableList<Data>?)
}
}
}
}
class ClienteCallBack : DiffUtil.ItemCallback<Data>() {
override fun areItemsTheSame(oldItem: Data, newItem: Data): Boolean {
return oldItem.promotor == newItem.promotor
}
override fun areContentsTheSame(oldItem: Data, newItem: Data): Boolean {
return oldItem == newItem
}
}
class Clique(val clickListener: (sleepId: Promoter) -> Unit) {
fun onClick(nome: Promoter) = clickListener(nome)
}
sealed class Data {
data class DataItem(val name: Promoter) : Data() {
override val promotor = name
}
abstract val promotor: Promoter
}
<file_sep>rootProject.name='Promo Origin'
include ':app'
<file_sep>package net.yan.kotlin.promoterapp.promoorigin
import android.widget.TextView
import androidx.databinding.BindingAdapter
import com.google.android.material.chip.ChipGroup
@BindingAdapter("textFormater")
fun chip(chipGroup: ChipGroup, array: List<String>) {
for (text in array) {
}
}
@BindingAdapter("itens")
fun TextView.setNomes(item: String?) {
item?.let {
text = item
}
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.model
import com.google.firebase.database.Exclude
class Cliente {
var id: String = ""
@Exclude
get
var endereco: String = ""
}<file_sep>package net.yan.kotlin.promoterapp.promoorigin.login.cadastro
import android.os.Bundle
import android.view.*
import androidx.appcompat.app.AppCompatActivity
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import androidx.navigation.fragment.findNavController
import net.yan.kotlin.promoterapp.promoorigin.R
import net.yan.kotlin.promoterapp.promoorigin.data.FirebaseHelper
import net.yan.kotlin.promoterapp.promoorigin.databinding.FragmentCadastroBinding
/**
* A simple [Fragment] subclass.
*/
class CadastroFragment : Fragment() {
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val binding = DataBindingUtil.inflate<FragmentCadastroBinding>(
inflater,
R.layout.fragment_cadastro,
container,
false
)
val firebase = FirebaseHelper()
val viewModelFactory = CadastroViewModelFactory(firebase, binding)
val viewModel =
ViewModelProviders.of(this, viewModelFactory).get(CadastroViewModel::class.java)
binding.viewModel = viewModel
binding.lifecycleOwner = this
viewModel.isLogado.observe(viewLifecycleOwner, Observer {
if (it == true) {
findNavController().navigate(
CadastroFragmentDirections.actionCadastroFragmentToContinuacaoFragment(
binding.loginCad.text.toString(),
binding.senhaCad.text.toString()
)
)
viewModel.mudouTela()
}
})
viewModel.error.observe(viewLifecycleOwner, Observer {
if (it.first == 1) {
binding.loginCad.error = it.second
} else {
binding.senhaCad.error = it.second
}
})
(activity as AppCompatActivity).setSupportActionBar(binding.toolbar1)
setHasOptionsMenu(true)
return binding.root
}
override fun onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) {
inflater.inflate(R.menu.menu_cadastro, menu)
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.back) {
findNavController().popBackStack()
}
return true
}
}
<file_sep>package net.yan.kotlin.promoterapp.promoorigin.principal
import android.content.res.Resources
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
class HomeViewModelFactory(
private val resources: Resources
) : ViewModelProvider.Factory {
@Suppress("unchecked_cast")
override fun <T : ViewModel?> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(HomeViewModel::class.java)) {
return HomeViewModel(resources) as T
}
throw IllegalArgumentException("Unknown ViewModel class")
}
}
| d48a735031fa050aea0484a02025f4133f43217a | [
"Kotlin",
"Gradle"
] | 25 | Kotlin | YanCamiloDev/Promoter-User | 3b22c2be390a3e5e6fba269c6979e740732e14cd | 57476f58c8582c35c1aa39f30f056e7f3af31997 |
refs/heads/master | <file_sep>int a,b;
int c=a+b;
<file_sep># arithmetic
This is a description
| 4a5892446ce637d0ee0d2cdbe2d5c06d1bb18f28 | [
"Markdown",
"C++"
] | 2 | C++ | srijanee/arithmetic | c8f00519465af22f5bfdf1caa5c7d4ecf3b91309 | b7b6476936b647cf50052abc0e469db2f8318996 |
refs/heads/master | <repo_name>tayutaedomo/facebook-js-sdk-trial<file_sep>/README.md
# facebook-js-sdk-trial
Try Facebook Javascript SDK
<file_sep>/routes/index.js
var express = require('express');
var router = express.Router();
var passport = require('passport');
var crypto = require('crypto');
var authorized = require('../middleware/auth');
var title = 'Facebook Javascript SDK Trial';
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: title });
});
router.get('/like_jssdk', function(req, res, next) {
res.render('like_jssdk', { title : 'Like jssdk | ' + title });
});
router.get('/like_iframe', function(req, res, next) {
res.render('like_iframe', { title : 'Like iframe | ' + title });
});
// Refer: http://christina04.hatenablog.com/entry/2015/11/07/225734
router.get('/auth', function(req, res, next) {
if (!req.session.state) {
var current_date = (new Date()).valueOf().toString();
var random = Math.random().toString();
var hash = crypto.createHash('sha1').update(current_date + random).digest('hex');
req.session.state = hash;
}
passport.authenticate('facebook', {
state: req.session.state
})(req, res, next);
});
router.get('/auth/callback', function(req, res, next) {
if (!req.session.state) {
return res.status(400).send({err: 'no state parameter'});
}
// CSRF verification
if (req.query.state !== req.session.state) {
return res.status(400).send({err: 'invalid state parameter'});
}
passport.authenticate('facebook', {
failureRedirect: '/auth',
successRedirect: '/me'
})(req, res, next);
});
router.get('/me', authorized, function(req, res, next) {
console.log(req.user);
res.render('me', { title: 'OAuth', profile: req.user });
});
module.exports = router;
<file_sep>/lib/passport.js
'use strict';
var FacebookStrategy = require('passport-facebook').Strategy;
var initPassport = function(passport) {
passport.use(new FacebookStrategy({
clientID: process.env.FACEBOOK_CLIENT_ID,
clientSecret: process.env.FACEBOOK_CLIENT_SECRET,
callbackURL: process.env.FACEBOOK_CALLBACK_URL || 'http://localhost:3000/auth/callback',
enableProof: true,
//scope: ['email', 'user_friends', 'user_birthday', 'user_location']
scope: ['email', 'user_friends', 'public_profile']
}, function(accessToken, refreshToken, profile, done) {
profile.accessToken = accessToken;
profile.refreshToken = refreshToken;
// asynchronous verification, for effect...
process.nextTick(function() {
return done(null, profile);
});
}));
passport.serializeUser(function(user, done) {
done(null, user);
});
passport.deserializeUser(function(obj, done) {
done(null, obj);
});
};
module.exports = initPassport;
| be947f92d24efe5d919f3b6f3fef397ab5978691 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | tayutaedomo/facebook-js-sdk-trial | d49666ad3029ba373e7e66ac4941438d1f7f3692 | 02858ad2ef72512373d35a280851484e3243396c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.